diff --git a/README.md b/README.md index fbcde2cc..5efcb519 100644 --- a/README.md +++ b/README.md @@ -147,9 +147,10 @@ fastify.post('/', async function (req, reply) { fastify.post('/upload/raw/any', async function (req, reply) { const parts = req.parts() for await (const part of parts) { - if (part.file) { + if (part.type === 'file') { await pump(part.file, fs.createWriteStream(part.filename)) } else { + // part.type === 'field console.log(part) } } @@ -177,6 +178,7 @@ This will store all files in the operating system default directory for temporar fastify.post('/upload/files', async function (req, reply) { // stores files to tmp dir and return files const files = await req.saveRequestFiles() + files[0].type // "file" files[0].filepath files[0].fieldname files[0].filename diff --git a/examples/example.js b/examples/example.js index dd8fa5d1..4de2ecef 100644 --- a/examples/example.js +++ b/examples/example.js @@ -41,9 +41,10 @@ fastify.post('/upload/stream/files', async function (req, reply) { fastify.post('/upload/raw/any', async function (req, reply) { const parts = req.parts() for await (const part of parts) { - if (part.file) { + if (part.type === 'file') { await pump(part.file, fs.createWriteStream(part.filename)) } else { + // part.type === 'field' console.log(part) } } diff --git a/index.d.ts b/index.d.ts index 2867dcb8..30309032 100644 --- a/index.d.ts +++ b/index.d.ts @@ -82,6 +82,7 @@ declare namespace fastifyMultipart { export type Multipart = MultipartFile | MultipartValue; export interface MultipartFile { + type: 'file'; toBuffer: () => Promise; file: BusboyFileStream; fieldname: string; @@ -92,6 +93,7 @@ declare namespace fastifyMultipart { } export interface MultipartValue { + type: 'field'; value: T; fieldname: string; mimetype: string; diff --git a/index.js b/index.js index 0351220f..f3f76cad 100644 --- a/index.js +++ b/index.js @@ -410,6 +410,7 @@ function fastifyMultipart (fastify, options, done) { } const value = { + type: 'field', fieldname: name, mimetype: contentType, encoding, @@ -444,6 +445,7 @@ function fastifyMultipart (fastify, options, done) { : defaultThrowFileSizeLimit const value = { + type: 'file', fieldname: name, filename, encoding, diff --git a/test/big.test.js b/test/big.test.js index c104a60d..c96b161c 100644 --- a/test/big.test.js +++ b/test/big.test.js @@ -13,7 +13,7 @@ const sendToWormhole = require('stream-wormhole') // skipping on Github Actions because it takes too long test('should upload a big file in constant memory', { skip: process.env.CI }, function (t) { - t.plan(9) + t.plan(10) const fastify = Fastify() const hashInput = crypto.createHash('sha256') @@ -32,6 +32,7 @@ test('should upload a big file in constant memory', { skip: process.env.CI }, fu for await (const part of req.parts()) { if (part.file) { + t.equal(part.type, 'file') t.equal(part.fieldname, 'upload') t.equal(part.filename, 'random-data') t.equal(part.encoding, '7bit') diff --git a/test/fix-313.test.js b/test/fix-313.test.js index 65174008..31eb2624 100644 --- a/test/fix-313.test.js +++ b/test/fix-313.test.js @@ -14,7 +14,7 @@ const { once } = EventEmitter const filePath = path.join(__dirname, '../README.md') test('should store file on disk, remove on response when attach fields to body is true', async function (t) { - t.plan(22) + t.plan(25) const fastify = Fastify() t.teardown(fastify.close.bind(fastify)) @@ -29,18 +29,21 @@ test('should store file on disk, remove on response when attach fields to body i const files = await req.saveRequestFiles() t.ok(files[0].filepath) + t.equal(files[0].type, 'file') t.equal(files[0].fieldname, 'upload') t.equal(files[0].filename, 'README.md') t.equal(files[0].encoding, '7bit') t.equal(files[0].mimetype, 'text/markdown') t.ok(files[0].fields.upload) t.ok(files[1].filepath) + t.equal(files[1].type, 'file') t.equal(files[1].fieldname, 'upload') t.equal(files[1].filename, 'README.md') t.equal(files[1].encoding, '7bit') t.equal(files[1].mimetype, 'text/markdown') t.ok(files[1].fields.upload) t.ok(files[2].filepath) + t.equal(files[2].type, 'file') t.equal(files[2].fieldname, 'other') t.equal(files[2].filename, 'README.md') t.equal(files[2].encoding, '7bit') diff --git a/test/index.test-d.ts b/test/index.test-d.ts index a18a788d..72e66951 100644 --- a/test/index.test-d.ts +++ b/test/index.test-d.ts @@ -55,7 +55,8 @@ const runServer = async () => { app.post('/', async (req, reply) => { const data = await req.file() if (data == null) throw new Error('missing file') - + + expectType<'file'>(data.type) expectType(data.file) expectType(data.file.truncated) expectType(data.fields) @@ -69,7 +70,7 @@ const runServer = async () => { // field missing from the request } else if (Array.isArray(field)) { // multiple fields with the same name - } else if ('file' in field) { + } else if (field.type === 'file') { // field containing a file field.file.resume() } else { @@ -85,10 +86,11 @@ const runServer = async () => { // Multiple fields including scalar values app.post<{Body: {file: MultipartFile, foo: MultipartValue}}>('/upload/stringvalue', async (req, reply) => { expectError(req.body.foo.file); + expectType<'field'>(req.body.foo.type) expectType(req.body.foo.value); expectType(req.body.file.file) - expectError(req.body.file.value); + expectType<'file'>(req.body.file.type); reply.send(); }) @@ -123,7 +125,7 @@ const runServer = async () => { app.post('/upload/raw/any', async function (req, reply) { const parts = req.parts() for await (const part of parts) { - if ('file' in part) { + if (part.type === 'file') { await pump(part.file, fs.createWriteStream(part.filename)) } else { console.log(part.value) @@ -145,6 +147,7 @@ const runServer = async () => { app.post('/upload/files', async function (req, reply) { // stores files to tmp dir and return files const files = await req.saveRequestFiles() + files[0].type // "file" files[0].filepath files[0].fieldname files[0].filename diff --git a/test/multipart-body-schema.test.js b/test/multipart-body-schema.test.js index c5c0eccd..d4e74fdf 100644 --- a/test/multipart-body-schema.test.js +++ b/test/multipart-body-schema.test.js @@ -11,7 +11,7 @@ const fs = require('fs') const filePath = path.join(__dirname, '../README.md') test('should be able to use JSON schema to validate request', function (t) { - t.plan(6) + t.plan(7) const fastify = Fastify() t.teardown(fastify.close.bind(fastify)) @@ -39,6 +39,7 @@ test('should be able to use JSON schema to validate request', function (t) { const content = await req.body.upload.toBuffer() t.equal(content.toString(), original) + t.equal(req.body.hello.type, 'field') t.equal(req.body.hello.value, 'world') reply.code(200).send() diff --git a/test/multipart.test.js b/test/multipart.test.js index 5c8f077c..98eab535 100644 --- a/test/multipart.test.js +++ b/test/multipart.test.js @@ -17,7 +17,7 @@ const sendToWormhole = require('stream-wormhole') const filePath = path.join(__dirname, '../README.md') test('should parse forms', function (t) { - t.plan(8) + t.plan(9) const fastify = Fastify() t.teardown(fastify.close.bind(fastify)) @@ -27,6 +27,7 @@ test('should parse forms', function (t) { fastify.post('/', async function (req, reply) { for await (const part of req.parts()) { if (part.file) { + t.equal(part.type, 'file') t.equal(part.fieldname, 'upload') t.equal(part.filename, 'README.md') t.equal(part.encoding, '7bit') @@ -76,7 +77,7 @@ test('should parse forms', function (t) { }) test('should respond when all files are processed', function (t) { - t.plan(4) + t.plan(6) const fastify = Fastify() t.teardown(fastify.close.bind(fastify)) @@ -87,6 +88,7 @@ test('should respond when all files are processed', function (t) { const parts = req.files() for await (const part of parts) { t.ok(part.file) + t.equal(part.type, 'file') await sendToWormhole(part.file) } reply.code(200).send() @@ -327,6 +329,7 @@ test('should be able to configure limits globally with plugin register options', const parts = req.files() for await (const part of parts) { t.ok(part.file) + t.equal(part.type, 'file') await sendToWormhole(part.file) } reply.code(200).send() @@ -469,7 +472,7 @@ test('should throw error due to partsLimit (The max number of parts (fields + fi }) test('should throw error due to file size limit exceed (Default: true)', function (t) { - t.plan(4) + t.plan(6) const fastify = Fastify() t.teardown(fastify.close.bind(fastify)) @@ -481,6 +484,7 @@ test('should throw error due to file size limit exceed (Default: true)', functio const parts = req.files() for await (const part of parts) { t.ok(part.file) + t.equal(part.type, 'file') await sendToWormhole(part.file) } reply.code(200).send() @@ -516,7 +520,7 @@ test('should throw error due to file size limit exceed (Default: true)', functio }) test('should not throw error due to file size limit exceed - files setting (Default: true)', function (t) { - t.plan(3) + t.plan(5) const fastify = Fastify() t.teardown(fastify.close.bind(fastify)) @@ -527,6 +531,7 @@ test('should not throw error due to file size limit exceed - files setting (Defa const parts = req.files({ limits: { fileSize: 1 } }) for await (const part of parts) { t.ok(part.file) + t.equal(part.type, 'file') await sendToWormhole(part.file) } reply.code(200).send() @@ -557,7 +562,7 @@ test('should not throw error due to file size limit exceed - files setting (Defa }) test('should not miss fields if part handler takes much time than formdata parsing', async function (t) { - t.plan(11) + t.plan(12) const original = fs.readFileSync(filePath, 'utf8') const immediate = util.promisify(setImmediate) @@ -576,6 +581,7 @@ test('should not miss fields if part handler takes much time than formdata parsi for await (const part of req.parts()) { if (part.file) { + t.equal(part.type, 'file') t.equal(part.fieldname, 'upload') t.equal(part.filename, 'README.md') t.equal(part.encoding, '7bit')