Skip to content

Commit ddfe17c

Browse files
authored
fix: file size limit (#198)
* fix: add error option for throwing file size limit reached * chore: update typings * docs: handle file size limit * docs: add missing options * feat: throwFileSizeLimit default to true * chore: update package command - use glob provided by tap to prevent different behavior based on enviornment - increase test coverage target to 90 * feat: remove req.multipartIterator * test: fix test command - single quote is not available in windows * refactor: remove duplicate code
1 parent e38dcfe commit ddfe17c

File tree

7 files changed

+105
-37
lines changed

7 files changed

+105
-37
lines changed

README.md

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -179,11 +179,14 @@ fastify.post('/upload/files', async function (req, reply) {
179179

180180
## Handle file size limitation
181181

182-
If you set a `fileSize` limit `req.saveRequestFiles()` is able to throw an `RequestFileTooLargeError` error.
182+
If you set a `fileSize` limit, it is able to throw an `RequestFileTooLargeError` error when limit reached.
183183

184184
```js
185185
fastify.post('/upload/files', async function (req, reply) {
186186
try {
187+
//const file = await req.file({ limits: { fileSize: 17000 } })
188+
//const files = await req.files({ limits: { fileSize: 17000 } })
189+
//const parts = await req.parts({ limits: { fileSize: 17000 } })
187190
const files = await req.saveRequestFiles({ limits: { fileSize: 17000 } })
188191
reply.send()
189192
} catch (error) {
@@ -192,6 +195,22 @@ fastify.post('/upload/files', async function (req, reply) {
192195
})
193196
```
194197

198+
If you want to fallback to the handling before `4.0.0`, you can disable the throwing behavior by passing `throwFileSizeLimit`.
199+
Note: It will not affect the behavior of `saveRequestFiles()`
200+
201+
```js
202+
// globally disable
203+
fastify.register(fastifyMultipart, { throwFileSizeLimit: false })
204+
205+
fastify.post('/upload/file', async function (req, reply) {
206+
const file = await req.file({ throwFileSizeLimit: false, limits: { fileSize: 17000 } })
207+
//const files = await req.files({ throwFileSizeLimit: false, limits: { fileSize: 17000 } })
208+
//const parts = await req.parts({ throwFileSizeLimit: false, limits: { fileSize: 17000 } })
209+
//const files = await req.saveRequestFiles({ throwFileSizeLimit: false, limits: { fileSize: 17000 } })
210+
reply.send()
211+
})
212+
```
213+
195214
## Parse all fields and assign them to the body
196215

197216
This allows you to parse all fields automatically and assign them to the `request.body`. By default files are accumulated in memory (Be careful!) to buffer objects. Uncaught errors are [handled](https://github.com/fastify/fastify/blob/master/docs/Hooks.md#manage-errors-from-a-hook) by fastify.

index.d.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,11 @@ export interface FastifyMultipartOptions {
9292
*/
9393
sharedSchemaId?: string;
9494

95+
/**
96+
* Allow throwing error when file size limit reached.
97+
*/
98+
throwFileSizeLimit?: boolean
99+
95100
/**
96101
* Manage the file stream like you need
97102
*/

index.js

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,11 @@ function fastifyMultipart (fastify, options, done) {
149149
})
150150
}
151151

152+
let throwFileSizeLimit = true
153+
if (typeof options.throwFileSizeLimit === 'boolean') {
154+
throwFileSizeLimit = options.throwFileSizeLimit
155+
}
156+
152157
const PartsLimitError = createError('FST_PARTS_LIMIT', 'reach parts limit', 413)
153158
const FilesLimitError = createError('FST_FILES_LIMIT', 'reach files limit', 413)
154159
const FieldsLimitError = createError('FST_FIELDS_LIMIT', 'reach fields limit', 413)
@@ -169,9 +174,6 @@ function fastifyMultipart (fastify, options, done) {
169174
fastify.decorateRequest(kMultipartHandler, handleMultipart)
170175

171176
fastify.decorateRequest('parts', getMultipartIterator)
172-
// keeping multipartIterator to avoid bumping a major
173-
// TODO remove on 4.x
174-
fastify.decorateRequest('multipartIterator', getMultipartIterator)
175177

176178
fastify.decorateRequest('isMultipart', isMultipart)
177179
fastify.decorateRequest('tmpUploads', null)
@@ -400,6 +402,10 @@ function fastifyMultipart (fastify, options, done) {
400402
return
401403
}
402404

405+
if (typeof opts.throwFileSizeLimit === 'boolean') {
406+
throwFileSizeLimit = opts.throwFileSizeLimit
407+
}
408+
403409
const value = {
404410
fieldname: name,
405411
filename,
@@ -420,6 +426,15 @@ function fastifyMultipart (fastify, options, done) {
420426
return this._buf
421427
}
422428
}
429+
430+
if (throwFileSizeLimit) {
431+
file.on('limit', function () {
432+
const err = new RequestFileTooLargeError()
433+
err.part = value
434+
onError(err)
435+
})
436+
}
437+
423438
if (body[name] === undefined) {
424439
body[name] = value
425440
} else if (Array.isArray(body[name])) {
@@ -461,12 +476,6 @@ function fastifyMultipart (fastify, options, done) {
461476
await pump(file.file, target)
462477
requestFiles.push({ ...file, filepath })
463478
this.tmpUploads.push(filepath)
464-
// busboy set truncated to true when the configured file size limit was reached
465-
if (file.file.truncated) {
466-
const err = new RequestFileTooLargeError()
467-
err.part = file
468-
throw err
469-
}
470479
} catch (err) {
471480
this.log.error({ err }, 'save request file')
472481
throw err

package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,10 +38,10 @@
3838
},
3939
"scripts": {
4040
"lint": "standard | snazzy",
41-
"unit": "tap test/**/*.test.js test/*.test.js -t 60",
41+
"unit": "tap \"test/**/*.test.js\" -t 90",
4242
"typescript": "tsd",
4343
"test": "npm run lint && npm run unit && npm run typescript",
44-
"coverage": "tap test/**/*.test.js test/*.test.js --coverage-report=html",
44+
"coverage": "tap \"test/**/*.test.js\" --coverage-report=html",
4545
"start": "CLIMEM=8999 node -r climem ./examples/example",
4646
"climem": "climem 8999 localhost"
4747
},

test/big.test.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ test('should upload a big file in constant memory', { skip: process.env.CI }, fu
2525
fastify.post('/', async function (req, reply) {
2626
t.ok(req.isMultipart())
2727

28-
for await (const part of req.multipartIterator()) {
28+
for await (const part of req.parts()) {
2929
if (part.file) {
3030
t.equal(part.fieldname, 'upload')
3131
t.equal(part.filename, 'random-data')

test/index.test-d.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ const runServer = async () => {
1414
app.register(fastifyMultipart, {
1515
addToBody: true,
1616
sharedSchemaId: 'sharedId',
17+
throwFileSizeLimit: false,
1718
// stream should be of type streams.Readable
1819
// body should be of type fastifyMulipart.Record<string, BodyEntry>
1920
onFile: (fieldName: string, stream: any, filename: string, encoding: string, mimetype: string, body: Record<string, any>) => {

test/multipart.test.js

Lines changed: 58 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -477,33 +477,71 @@ test('should throw error due to partsLimit (The max number of parts (fields + fi
477477
})
478478
})
479479

480-
test('should also work with multipartIterator', function (t) {
481-
t.plan(8)
480+
test('should throw error due to file size limit exceed (Default: true)', function (t) {
481+
t.plan(4)
482482

483483
const fastify = Fastify()
484484
t.tearDown(fastify.close.bind(fastify))
485485

486-
fastify.register(multipart)
486+
fastify.register(multipart, { limits: { fileSize: 1 } })
487487

488488
fastify.post('/', async function (req, reply) {
489-
for await (const part of req.multipartIterator()) {
490-
if (part.file) {
491-
t.equal(part.fieldname, 'upload')
492-
t.equal(part.filename, 'README.md')
493-
t.equal(part.encoding, '7bit')
494-
t.equal(part.mimetype, 'text/markdown')
495-
t.ok(part.fields.upload)
496-
497-
const original = fs.readFileSync(filePath, 'utf8')
498-
await pump(
499-
part.file,
500-
concat(function (buf) {
501-
t.equal(buf.toString(), original)
502-
})
503-
)
489+
try {
490+
const parts = await req.files()
491+
for await (const part of parts) {
492+
t.ok(part.file)
493+
await sendToWormhole(part.file)
504494
}
495+
reply.code(200).send()
496+
} catch (error) {
497+
t.true(error instanceof fastify.multipartErrors.RequestFileTooLargeError)
498+
reply.code(500).send()
499+
}
500+
})
501+
502+
fastify.listen(0, async function () {
503+
// request
504+
const form = new FormData()
505+
const opts = {
506+
protocol: 'http:',
507+
hostname: 'localhost',
508+
port: fastify.server.address().port,
509+
path: '/',
510+
headers: form.getHeaders(),
511+
method: 'POST'
505512
}
506513

514+
const req = http.request(opts, (res) => {
515+
t.equal(res.statusCode, 500)
516+
res.on('end', () => {
517+
t.pass('res ended successfully')
518+
})
519+
})
520+
form.append('upload', fs.createReadStream(filePath))
521+
form.append('upload2', fs.createReadStream(filePath))
522+
523+
try {
524+
await pump(form, req)
525+
} catch (error) {
526+
t.error(error, 'formData request pump: no err')
527+
}
528+
})
529+
})
530+
531+
test('should not throw error due to file size limit exceed - files setting (Default: true)', function (t) {
532+
t.plan(3)
533+
534+
const fastify = Fastify()
535+
t.tearDown(fastify.close.bind(fastify))
536+
537+
fastify.register(multipart)
538+
539+
fastify.post('/', async function (req, reply) {
540+
const parts = await req.files({ throwFileSizeLimit: false, limits: { fileSize: 1 } })
541+
for await (const part of parts) {
542+
t.ok(part.file)
543+
await sendToWormhole(part.file)
544+
}
507545
reply.code(200).send()
508546
})
509547

@@ -521,16 +559,12 @@ test('should also work with multipartIterator', function (t) {
521559

522560
const req = http.request(opts, (res) => {
523561
t.equal(res.statusCode, 200)
524-
// consume all data without processing
525-
res.resume()
526562
res.on('end', () => {
527563
t.pass('res ended successfully')
528564
})
529565
})
530-
const rs = fs.createReadStream(filePath)
531-
form.append('upload', rs)
532-
form.append('hello', 'world')
533-
form.append('willbe', 'dropped')
566+
form.append('upload', fs.createReadStream(filePath))
567+
form.append('upload2', fs.createReadStream(filePath))
534568

535569
try {
536570
await pump(form, req)

0 commit comments

Comments
 (0)