Skip to content

Commit

Permalink
Merge branch 'main' into (fastify#4439)-Access-handler-name-add-prope…
Browse files Browse the repository at this point in the history
…rties-to-req-route-options
  • Loading branch information
Uzlopak committed Aug 21, 2023
2 parents 5eb061b + d3c5532 commit c13caa1
Show file tree
Hide file tree
Showing 17 changed files with 331 additions and 219 deletions.
15 changes: 15 additions & 0 deletions .github/dependabot.yml
Expand Up @@ -17,3 +17,18 @@ updates:
schedule:
interval: "weekly"
open-pull-requests-limit: 10
groups:
# Production dependencies without breaking changes
dependencies:
dependency-type: "production"
update-types:
- "minor"
- "patch"
# Production dependencies with breaking changes
dependencies-major:
dependency-type: "production"
update-types:
- "major"
# Development dependencies
dev-dependencies:
dependency-type: "development"
41 changes: 10 additions & 31 deletions .github/workflows/benchmark-parser.yml
@@ -1,4 +1,4 @@
name: Benchmark-parser
name: Benchmark Parser

on:
pull_request_target:
Expand All @@ -11,15 +11,15 @@ jobs:
permissions:
contents: read
outputs:
PR-BENCH-14: ${{ steps.benchmark-pr.outputs.BENCH_RESULT14 }}
PR-BENCH-16: ${{ steps.benchmark-pr.outputs.BENCH_RESULT16 }}
PR-BENCH-18: ${{ steps.benchmark-pr.outputs.BENCH_RESULT18 }}
MAIN-BENCH-14: ${{ steps.benchmark-main.outputs.BENCH_RESULT14 }}
PR-BENCH-20: ${{ steps.benchmark-pr.outputs.BENCH_RESULT20 }}
MAIN-BENCH-16: ${{ steps.benchmark-main.outputs.BENCH_RESULT16 }}
MAIN-BENCH-18: ${{ steps.benchmark-main.outputs.BENCH_RESULT18 }}
MAIN-BENCH-20: ${{ steps.benchmark-main.outputs.BENCH_RESULT20 }}
strategy:
matrix:
node-version: [14, 16, 18]
node-version: [16, 18, 20]
steps:
- uses: actions/checkout@v3
with:
Expand Down Expand Up @@ -74,39 +74,18 @@ jobs:
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
message: |
**Node**: 14
**Type**: Parser
**PR**: ${{ needs.benchmark.outputs.PR-BENCH-14 }}
**MAIN**: ${{ needs.benchmark.outputs.MAIN-BENCH-14 }}
---
**Node**: 16
**Type**: Parser
**PR**: ${{ needs.benchmark.outputs.PR-BENCH-16 }}
**MAIN**: ${{ needs.benchmark.outputs.MAIN-BENCH-16 }}
---
**Node**: 18
**Type**: Parser
**PR**: ${{ needs.benchmark.outputs.PR-BENCH-18 }}
**MAIN**: ${{ needs.benchmark.outputs.MAIN-BENCH-18 }}
remove-label:
if: "always()"
needs:
- benchmark
- output-benchmark
runs-on: ubuntu-latest
steps:
- name: Remove benchmark label
uses: octokit/request-action@v2.x
id: remove-label
with:
route: DELETE /repos/{repo}/issues/{issue_number}/labels/{name}
repo: ${{ github.event.pull_request.head.repo.full_name }}
issue_number: ${{ github.event.pull_request.number }}
name: benchmark
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
---
**Node**: 20
**PR**: ${{ needs.benchmark.outputs.PR-BENCH-20 }}
**MAIN**: ${{ needs.benchmark.outputs.MAIN-BENCH-20 }}
2 changes: 1 addition & 1 deletion .github/workflows/benchmark.yml
Expand Up @@ -91,7 +91,7 @@ jobs:
**MAIN**: ${{ needs.benchmark.outputs.MAIN-BENCH-20 }}
remove-label:
if: "always()"
if: ${{ github.event.label.name == 'benchmark' }}
needs:
- benchmark
- output-benchmark
Expand Down
25 changes: 0 additions & 25 deletions .github/workflows/sync-next.yml

This file was deleted.

3 changes: 3 additions & 0 deletions docs/Reference/Hooks.md
Expand Up @@ -106,6 +106,9 @@ returned stream. This property is used to correctly match the request payload
with the `Content-Length` header value. Ideally, this property should be updated
on each received chunk.

**Notice:** The size of the returned stream is checked to not exceed the limit
set in [`bodyLimit`](./Server.md#bodylimit) option.

### preValidation

If you are using the `preValidation` hook, you can change the payload before it
Expand Down
4 changes: 4 additions & 0 deletions docs/Reference/Server.md
Expand Up @@ -282,6 +282,10 @@ attacks](https://www.owasp.org/index.php/Regular_expression_Denial_of_Service_-_
+ Default: `1048576` (1MiB)

Defines the maximum payload, in bytes, the server is allowed to accept.
The default body reader sends [`FST_ERR_CTP_BODY_TOO_LARGE`](./Errors.md#fst_err_ctp_body_too_large)
reply, if the size of the body exceeds this limit.
If [`preParsing` hook](./Hooks.md#preparsing) is provided, this limit is applied
to the size of the stream the hook returns (i.e. the size of "decoded" body).

### `onProtoPoisoning`
<a id="factory-on-proto-poisoning"></a>
Expand Down
7 changes: 5 additions & 2 deletions lib/contentTypeParser.js
Expand Up @@ -226,8 +226,11 @@ function rawBody (request, reply, options, parser, done) {

function onData (chunk) {
receivedLength += chunk.length

if ((payload.receivedEncodedLength || receivedLength) > limit) {
const { receivedEncodedLength = 0 } = payload
// The resulting body length must not exceed bodyLimit (see "zip bomb").
// The case when encoded length is larger than received length is rather theoretical,
// unless the stream returned by preParsing hook is broken and reports wrong value.
if (receivedLength > limit || receivedEncodedLength > limit) {
payload.removeListener('data', onData)
payload.removeListener('end', onEnd)
payload.removeListener('error', onEnd)
Expand Down
15 changes: 9 additions & 6 deletions lib/pluginUtils.js
Expand Up @@ -25,12 +25,15 @@ function getPluginName (func) {
// let's see if this is a file, and in that case use that
// this is common for plugins
const cache = require.cache
const keys = Object.keys(cache)

for (let i = 0; i < keys.length; i++) {
const key = keys[i]
if (cache[key].exports === func) {
return key
// cache is undefined inside SEA
if (cache) {
const keys = Object.keys(cache)

for (let i = 0; i < keys.length; i++) {
const key = keys[i]
if (cache[key].exports === func) {
return key
}
}
}

Expand Down
5 changes: 4 additions & 1 deletion lib/wrapThenable.js
Expand Up @@ -18,7 +18,10 @@ function wrapThenable (thenable, reply) {
// the request may be terminated during the reply. in this situation,
// it require an extra checking of request.aborted to see whether
// the request is killed by client.
if (payload !== undefined || (reply.sent === false && reply.raw.headersSent === false && reply.request.raw.aborted === false)) {
// Most of the times aborted will be true when destroyed is true,
// however there is a race condition where the request is not
// aborted but only destroyed.
if (payload !== undefined || (reply.sent === false && reply.raw.headersSent === false && reply.request.raw.aborted === false && reply.request.raw.destroyed === false)) {
// we use a try-catch internally to avoid adding a catch to another
// promise, increase promise perf by 10%
try {
Expand Down
9 changes: 5 additions & 4 deletions package.json
Expand Up @@ -14,10 +14,11 @@
"coverage:ci": "c8 --reporter=lcov tap --coverage-report=html --no-browser --no-check-coverage",
"coverage:ci-check-coverage": "c8 check-coverage --branches 100 --functions 100 --lines 100 --statements 100",
"lint": "npm run lint:standard && npm run lint:typescript && npm run lint:markdown",
"lint:fix": "standard --fix",
"lint:fix": "standard --fix && npm run lint:typescript:fix",
"lint:markdown": "markdownlint-cli2",
"lint:standard": "standard | snazzy",
"lint:typescript": "eslint -c types/.eslintrc.json types/**/*.d.ts test/types/**/*.test-d.ts",
"lint:typescript:fix": "npm run lint:typescript -- --fix",
"prepublishOnly": "cross-env PREPUBLISH=true tap --no-check-coverage test/build/**.test.js && npm run test:validator:integrity",
"test": "npm run lint && npm run unit && npm run test:typescript",
"test:ci": "npm run unit -- --cov --coverage-report=lcovonly && npm run test:typescript",
Expand Down Expand Up @@ -133,11 +134,11 @@
"homepage": "https://www.fastify.io/",
"devDependencies": {
"@fastify/pre-commit": "^2.0.2",
"@sinclair/typebox": "^0.30.2",
"@sinclair/typebox": "^0.31.1",
"@sinonjs/fake-timers": "^11.0.0",
"@types/node": "^20.1.0",
"@typescript-eslint/eslint-plugin": "^5.59.2",
"@typescript-eslint/parser": "^5.59.2",
"@typescript-eslint/eslint-plugin": "^6.3.0",
"@typescript-eslint/parser": "^6.3.0",
"ajv": "^8.12.0",
"ajv-errors": "^3.0.0",
"ajv-formats": "^2.1.1",
Expand Down
69 changes: 69 additions & 0 deletions test/bodyLimit.test.js
Expand Up @@ -2,6 +2,7 @@

const Fastify = require('..')
const sget = require('simple-get').concat
const zlib = require('zlib')
const t = require('tap')
const test = t.test

Expand Down Expand Up @@ -45,6 +46,74 @@ test('bodyLimit', t => {
})
})

test('bodyLimit is applied to decoded content', t => {
t.plan(9)

const body = { x: 'x'.repeat(30000) }
const json = JSON.stringify(body)
const encoded = zlib.gzipSync(json)

const fastify = Fastify()

fastify.addHook('preParsing', async (req, reply, payload) => {
t.equal(req.headers['content-length'], `${encoded.length}`)
const unzip = zlib.createGunzip()
Object.defineProperty(unzip, 'receivedEncodedLength', {
get () {
return unzip.bytesWritten
}
})
payload.pipe(unzip)
return unzip
})

fastify.post('/body-limit-40k', {
bodyLimit: 40000,
onError: async (req, res, err) => {
t.fail('should not be called')
}
}, (request, reply) => {
reply.send({ x: request.body.x })
})

fastify.post('/body-limit-20k', {
bodyLimit: 20000,
onError: async (req, res, err) => {
t.equal(err.code, 'FST_ERR_CTP_BODY_TOO_LARGE')
t.equal(err.statusCode, 413)
}
}, (request, reply) => {
reply.send({ x: 'handler should not be called' })
})

fastify.inject({
method: 'POST',
url: '/body-limit-40k',
headers: {
'content-encoding': 'gzip',
'content-type': 'application/json'
},
payload: encoded
}, (err, res) => {
t.error(err)
t.equal(res.statusCode, 200)
t.same(res.json(), body)
})

fastify.inject({
method: 'POST',
url: '/body-limit-20k',
headers: {
'content-encoding': 'gzip',
'content-type': 'application/json'
},
payload: encoded
}, (err, res) => {
t.error(err)
t.equal(res.statusCode, 413)
})
})

test('default request.routeOptions.bodyLimit should be 1048576', t => {
t.plan(4)
const fastify = Fastify()
Expand Down
15 changes: 15 additions & 0 deletions test/internals/plugin.test.js
Expand Up @@ -29,6 +29,21 @@ test('getPluginName should return plugin name if the file is cached', t => {
t.equal(pluginName, expectedPluginName)
})

test('getPluginName should not throw when require.cache is undefined', t => {
t.plan(1)
function example () {
console.log('is just an example')
}
const cache = require.cache
require.cache = undefined
t.teardown(() => {
require.cache = cache
})
const pluginName = pluginUtilsPublic.getPluginName(example)

t.equal(pluginName, 'example')
})

test("getMeta should return the object stored with the 'plugin-meta' symbol", t => {
t.plan(1)

Expand Down
13 changes: 13 additions & 0 deletions test/types/hooks.test-d.ts
Expand Up @@ -392,3 +392,16 @@ server.addHook('preClose', function (done) {
server.addHook('preClose', async function () {
expectType<FastifyInstance>(this)
})

expectError(server.addHook('onClose', async function (instance, done) {}))
expectError(server.addHook('onError', async function (request, reply, error, done) {}))
expectError(server.addHook('onReady', async function (done) {}))
expectError(server.addHook('onRequest', async function (request, reply, done) {}))
expectError(server.addHook('onRequestAbort', async function (request, done) {}))
expectError(server.addHook('onResponse', async function (request, reply, done) {}))
expectError(server.addHook('onSend', async function (request, reply, payload, done) {}))
expectError(server.addHook('onTimeout', async function (request, reply, done) {}))
expectError(server.addHook('preClose', async function (done) {}))
expectError(server.addHook('preHandler', async function (request, reply, done) {}))
expectError(server.addHook('preSerialization', async function (request, reply, payload, done) {}))
expectError(server.addHook('preValidation', async function (request, reply, done) {}))
22 changes: 22 additions & 0 deletions test/wrapThenable.test.js
Expand Up @@ -27,3 +27,25 @@ test('should reject immediately when reply[kReplyHijacked] is true', t => {
const thenable = Promise.reject(new Error('Reply sent already'))
wrapThenable(thenable, reply)
})

test('should not send the payload if the raw socket was destroyed but not aborted', async t => {
const reply = {
sent: false,
raw: {
headersSent: false
},
request: {
raw: {
aborted: false,
destroyed: true
}
},
send () {
t.fail('should not send')
}
}
const thenable = Promise.resolve()
wrapThenable(thenable, reply)

await thenable
})

0 comments on commit c13caa1

Please sign in to comment.