Skip to content

Commit

Permalink
Merge pull request from GHSA-hpp2-2cr5-pf6g
Browse files Browse the repository at this point in the history
* lib: implement default values for parts and fileSize

* fixup! lib: implement default values for parts and fileSize
  • Loading branch information
RafaelGSS authored Feb 14, 2023
1 parent dc25a58 commit 85be81b
Show file tree
Hide file tree
Showing 5 changed files with 136 additions and 5 deletions.
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,13 @@ fastify.register(require('@fastify/multipart'), {
fileSize: 1000000, // For multipart forms, the max file size in bytes
files: 1, // Max number of file fields
headerPairs: 2000 // Max number of header key=>value pairs
parts: 1000 // For multipart forms, the max number of parts (fields + files)
}
});
```

For security reasons, `@fastify/multipart` sets the limit for `parts` and `fileSize` being _1000_ and _1048576_ respectively.

**Note**: if the file stream that is provided by `data.file` is not consumed, like in the example below with the usage of pump, the promise will not be fulfilled at the end of the multipart processing.
This behavior is inherited from [`@fastify/busboy`](https://github.com/fastify/busboy).

Expand Down
18 changes: 15 additions & 3 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,12 @@ function busboy (options) {
}

function fastifyMultipart (fastify, options, done) {
options.limits = {
...options.limits,
parts: options.limits?.parts || 1000,
fileSize: options.limits?.fileSize || fastify.initialConfig.bodyLimit
}

const attachFieldsToBody = options.attachFieldsToBody
if (options.addToBody === true) {
if (typeof options.sharedSchemaId === 'string') {
Expand Down Expand Up @@ -360,15 +366,21 @@ function fastifyMultipart (fastify, options, done) {
.on('finish', onEnd)

bb.on('partsLimit', function () {
onError(new PartsLimitError())
const err = new PartsLimitError()
onError(err)
process.nextTick(() => onEnd(err))
})

bb.on('filesLimit', function () {
onError(new FilesLimitError())
const err = new FilesLimitError()
onError(err)
process.nextTick(() => onEnd(err))
})

bb.on('fieldsLimit', function () {
onError(new FieldsLimitError())
const err = new FieldsLimitError()
onError(err)
process.nextTick(() => onEnd(err))
})

request.pipe(bb)
Expand Down
7 changes: 6 additions & 1 deletion test/big.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,12 @@ test('should upload a big file in constant memory', { skip: process.env.CI }, fu

t.teardown(fastify.close.bind(fastify))

fastify.register(multipart)
fastify.register(multipart, {
limits: {
fileSize: Infinity,
parts: Infinity
}
})

fastify.post('/', async function (req, reply) {
t.ok(req.isMultipart())
Expand Down
7 changes: 6 additions & 1 deletion test/legacy/big.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,12 @@ test('should upload a big file in constant memory', { skip: process.env.CI }, fu

t.teardown(fastify.close.bind(fastify))

fastify.register(multipart)
fastify.register(multipart, {
limits: {
fileSize: Infinity,
parts: Infinity
}
})

fastify.post('/', function (req, reply) {
t.ok(req.isMultipart())
Expand Down
106 changes: 106 additions & 0 deletions test/multipart-security.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ const multipart = require('..')
const http = require('http')
const path = require('path')
const fs = require('fs')
const crypto = require('crypto')
const EventEmitter = require('events')
const { once } = EventEmitter

const filePath = path.join(__dirname, '../README.md')

Expand Down Expand Up @@ -100,3 +103,106 @@ test('should not allow __proto__ as field name', function (t) {
form.pipe(req)
})
})

test('should use default for fileSize', async function (t) {
t.plan(4)

const fastify = Fastify()
t.teardown(fastify.close.bind(fastify))

fastify.register(multipart, {
throwFileSizeLimit: true
})

fastify.post('/', async function (req, reply) {
t.ok(req.isMultipart())

const part = await req.file()
t.pass('the file is not consumed yet')

try {
await part.toBuffer()
t.fail('it should throw')
} catch (error) {
t.ok(error)
reply.send(error)
}
})

await fastify.listen({ port: 0 })

// request
const form = new FormData()
const opts = {
hostname: '127.0.0.1',
port: fastify.server.address().port,
path: '/',
headers: form.getHeaders(),
method: 'POST'
}

const randomFileBuffer = Buffer.alloc(15_000_000)
crypto.randomFillSync(randomFileBuffer)

const req = http.request(opts)
form.append('upload', randomFileBuffer)

form.pipe(req)

try {
const [res] = await once(req, 'response')
t.equal(res.statusCode, 413)
res.resume()
await once(res, 'end')
} catch (error) {
t.error(error, 'request')
}
})

test('should use default for parts - 1000', function (t) {
const fastify = Fastify()
t.teardown(fastify.close.bind(fastify))

fastify.register(multipart)

fastify.post('/', async function (req, reply) {
try {
// eslint-disable-next-lint no-empty
for await (const _ of req.parts()) { console.assert(_) }
t.fail('should throw on 1001')
reply.code(200).send()
} catch (error) {
t.ok(error instanceof fastify.multipartErrors.PartsLimitError)
reply.code(500).send()
}
})

fastify.listen({ port: 0 }, async function () {
// request
const form = new FormData()
const opts = {
protocol: 'http:',
hostname: 'localhost',
port: fastify.server.address().port,
path: '/',
headers: form.getHeaders(),
method: 'POST'
}

const req = http.request(opts, (res) => {
t.equal(res.statusCode, 500)
res.resume()
res.on('end', () => {
t.pass('res ended successfully')
t.end()
})
})
for (let i = 0; i < 1000; ++i) {
form.append('hello' + i, 'world')
}
// Exceeds the default limit (1000)
form.append('hello', 'world')

form.pipe(req)
})
})

0 comments on commit 85be81b

Please sign in to comment.