diff --git a/.c8rc b/.c8rc new file mode 100644 index 0000000..8738d13 --- /dev/null +++ b/.c8rc @@ -0,0 +1,14 @@ +{ + "all": true, + "exclude": [ + "test/", + "coverage/", + "types/" + ], + "clean": true, + "check-coverage": true, + "branches": 100, + "lines": 100, + "functions": 100, + "statements": 100 +} \ No newline at end of file diff --git a/.taprc b/.taprc deleted file mode 100644 index 7b26380..0000000 --- a/.taprc +++ /dev/null @@ -1,3 +0,0 @@ -disable-coverage: true -files: - - test/**/*.test.js diff --git a/index.js b/index.js index a6025e0..609f57b 100644 --- a/index.js +++ b/index.js @@ -118,7 +118,7 @@ const recommendedDefaultBrotliOptions = { } function processCompressParams (opts) { - /* istanbul ignore next */ + /* c8 ignore next 3 */ if (!opts) { return } @@ -148,7 +148,7 @@ function processCompressParams (opts) { params.uncompressStream = { // Currently params.uncompressStream.br() is never called as we do not have any way to autodetect brotli compression in `fastify-compress` // Brotli documentation reference: [RFC 7932](https://www.rfc-editor.org/rfc/rfc7932) - br: /* istanbul ignore next */ () => ((opts.zlib || zlib).createBrotliDecompress || zlib.createBrotliDecompress)(params.brotliOptions), + br: /* c8 ignore next */ () => ((opts.zlib || zlib).createBrotliDecompress || zlib.createBrotliDecompress)(params.brotliOptions), gzip: () => ((opts.zlib || zlib).createGunzip || zlib.createGunzip)(params.zlibOptions), deflate: () => ((opts.zlib || zlib).createInflate || zlib.createInflate)(params.zlibOptions) } @@ -165,7 +165,7 @@ function processCompressParams (opts) { } function processDecompressParams (opts) { - /* istanbul ignore next */ + /* c8 ignore next 3 */ if (!opts) { return } @@ -525,8 +525,8 @@ function zipStream (deflate, encoding) { function unzipStream (inflate, maxRecursion) { if (!(maxRecursion >= 0)) maxRecursion = 3 return peek({ newline: false, maxBuffer: 10 }, function (data, swap) { - /* istanbul ignore if */ // This path is never taken, when `maxRecursion` < 0 it is automatically set back to 3 + /* c8 ignore next */ if (maxRecursion < 0) return swap(new Error('Maximum recursion reached')) switch (isCompressed(data)) { case 1: return swap(null, pumpify(inflate.gzip(), unzipStream(inflate, maxRecursion - 1))) @@ -555,7 +555,7 @@ function createError (code, message, statusCode) { FastifyCompressError.prototype[Symbol.toStringTag] = 'Error' - /* istanbul ignore next */ + /* c8 ignore next 3 */ FastifyCompressError.prototype.toString = function () { return `${this.name} [${this.code}]: ${this.message}` } diff --git a/package.json b/package.json index 4927ccf..aee942b 100644 --- a/package.json +++ b/package.json @@ -17,14 +17,14 @@ }, "devDependencies": { "@fastify/pre-commit": "^2.1.0", - "@types/node": "^20.12.7", + "@types/node": "^22.0.0", "@typescript-eslint/eslint-plugin": "^7.6.0", "@typescript-eslint/parser": "^7.6.0", "adm-zip": "^0.5.12", + "c8": "^10.1.2", "fastify": "^5.0.0-alpha.3", "jsonstream": "^1.0.3", "standard": "^17.1.0", - "tap": "^18.7.2", "tsd": "^0.31.0", "typescript": "^5.4.5" }, @@ -34,8 +34,8 @@ "lint:fix:typescript": "npm run lint:fix -- --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin \"**/*.d.ts\"", "test": "npm run test:unit && npm run test:typescript", "test:typescript": "tsd", - "test:unit": "tap", - "test:coverage": "npm run test:unit -- --coverage-report=html", + "test:unit": "node --test", + "test:coverage": "c8 node --test && c8 report --reporter=html", "test:unit:verbose": "npm run test:unit -- -Rspec" }, "keywords": [ diff --git a/test/global-compress.test.js b/test/global-compress.test.js index 92be91f..ecc7f92 100644 --- a/test/global-compress.test.js +++ b/test/global-compress.test.js @@ -1,6 +1,6 @@ 'use strict' -const { test } = require('tap') +const { describe, test } = require('node:test') const { createReadStream, readFile, readFileSync } = require('node:fs') const { Readable, Writable, PassThrough } = require('node:stream') const zlib = require('node:zlib') @@ -10,8 +10,8 @@ const Fastify = require('fastify') const compressPlugin = require('../index') const { once } = require('node:events') -test('When `global` is not set, it is `true` by default :', async (t) => { - t.test('it should compress Buffer data using brotli when `Accept-Encoding` request header is `br`', async (t) => { +describe('When `global` is not set, it is `true` by default :', async () => { + test('it should compress Buffer data using brotli when `Accept-Encoding` request header is `br`', async (t) => { t.plan(1) const fastify = Fastify() @@ -30,10 +30,10 @@ test('When `global` is not set, it is `true` by default :', async (t) => { } }) const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(payload.toString('utf-8'), buf.toString()) + t.assert.equal(payload.toString('utf-8'), buf.toString()) }) - t.test('it should compress Buffer data using deflate when `Accept-Encoding` request header is `deflate`', async (t) => { + test('it should compress Buffer data using deflate when `Accept-Encoding` request header is `deflate`', async (t) => { t.plan(1) const fastify = Fastify() @@ -52,10 +52,10 @@ test('When `global` is not set, it is `true` by default :', async (t) => { } }) const payload = zlib.inflateSync(response.rawPayload) - t.equal(payload.toString('utf-8'), buf.toString()) + t.assert.equal(payload.toString('utf-8'), buf.toString()) }) - t.test('it should compress Buffer data using gzip when `Accept-Encoding` request header is `gzip`', async (t) => { + test('it should compress Buffer data using gzip when `Accept-Encoding` request header is `gzip`', async (t) => { t.plan(1) const fastify = Fastify() @@ -74,10 +74,10 @@ test('When `global` is not set, it is `true` by default :', async (t) => { } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(payload.toString('utf-8'), buf.toString()) + t.assert.equal(payload.toString('utf-8'), buf.toString()) }) - t.test('it should compress JSON data using brotli when `Accept-Encoding` request header is `br`', async (t) => { + test('it should compress JSON data using brotli when `Accept-Encoding` request header is `br`', async (t) => { t.plan(1) const fastify = Fastify() @@ -97,10 +97,10 @@ test('When `global` is not set, it is `true` by default :', async (t) => { } }) const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(payload.toString('utf-8'), JSON.stringify(json)) + t.assert.equal(payload.toString('utf-8'), JSON.stringify(json)) }) - t.test('it should compress JSON data using deflate when `Accept-Encoding` request header is `deflate`', async (t) => { + test('it should compress JSON data using deflate when `Accept-Encoding` request header is `deflate`', async (t) => { t.plan(1) const fastify = Fastify() @@ -120,10 +120,10 @@ test('When `global` is not set, it is `true` by default :', async (t) => { } }) const payload = zlib.inflateSync(response.rawPayload) - t.equal(payload.toString('utf-8'), JSON.stringify(json)) + t.assert.equal(payload.toString('utf-8'), JSON.stringify(json)) }) - t.test('it should compress JSON data using gzip when `Accept-Encoding` request header is `gzip`', async (t) => { + test('it should compress JSON data using gzip when `Accept-Encoding` request header is `gzip`', async (t) => { t.plan(1) const fastify = Fastify() @@ -142,10 +142,10 @@ test('When `global` is not set, it is `true` by default :', async (t) => { } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(payload.toString('utf-8'), JSON.stringify(json)) + t.assert.equal(payload.toString('utf-8'), JSON.stringify(json)) }) - t.test('it should compress string data using brotli when `Accept-Encoding` request header is `br', async (t) => { + test('it should compress string data using brotli when `Accept-Encoding` request header is `br', async (t) => { t.plan(1) const fastify = Fastify() @@ -165,10 +165,10 @@ test('When `global` is not set, it is `true` by default :', async (t) => { } }) const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(payload.toString('utf-8'), 'hello') + t.assert.equal(payload.toString('utf-8'), 'hello') }) - t.test('it should compress string data using deflate when `Accept-Encoding` request header is `deflate', async (t) => { + test('it should compress string data using deflate when `Accept-Encoding` request header is `deflate', async (t) => { t.plan(1) const fastify = Fastify() @@ -188,10 +188,10 @@ test('When `global` is not set, it is `true` by default :', async (t) => { } }) const payload = zlib.inflateSync(response.rawPayload) - t.equal(payload.toString('utf-8'), 'hello') + t.assert.equal(payload.toString('utf-8'), 'hello') }) - t.test('it should compress string data using gzip when `Accept-Encoding` request header is `gzip', async (t) => { + test('it should compress string data using gzip when `Accept-Encoding` request header is `gzip', async (t) => { t.plan(1) const fastify = Fastify() @@ -211,12 +211,12 @@ test('When `global` is not set, it is `true` by default :', async (t) => { } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(payload.toString('utf-8'), 'hello') + t.assert.equal(payload.toString('utf-8'), 'hello') }) }) -test('It should send compressed Stream data when `global` is `true` :', async (t) => { - t.test('using brotli when `Accept-Encoding` request header is `br`', async (t) => { +describe('It should send compressed Stream data when `global` is `true` :', async () => { + test('using brotli when `Accept-Encoding` request header is `br`', async (t) => { t.plan(3) const fastify = Fastify() @@ -237,12 +237,12 @@ test('It should send compressed Stream data when `global` is `true` :', async (t }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'br') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'br') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('using deflate when `Accept-Encoding` request header is `deflate`', async (t) => { + test('using deflate when `Accept-Encoding` request header is `deflate`', async (t) => { t.plan(4) const fastify = Fastify() @@ -263,13 +263,13 @@ test('It should send compressed Stream data when `global` is `true` :', async (t }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.notOk(response.headers['content-length'], 'no content length') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('using gzip when `Accept-Encoding` request header is `gzip`', async (t) => { + test('using gzip when `Accept-Encoding` request header is `gzip`', async (t) => { t.plan(3) const fastify = Fastify() @@ -290,14 +290,14 @@ test('It should send compressed Stream data when `global` is `true` :', async (t }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) }) -test('It should send compressed Buffer data when `global` is `true` :', async (t) => { - t.test('using brotli when `Accept-Encoding` request header is `br`', async (t) => { +describe('It should send compressed Buffer data when `global` is `true` :', async () => { + test('using brotli when `Accept-Encoding` request header is `br`', async (t) => { t.plan(1) const fastify = Fastify() @@ -316,10 +316,10 @@ test('It should send compressed Buffer data when `global` is `true` :', async (t } }) const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(payload.toString('utf-8'), buf.toString()) + t.assert.equal(payload.toString('utf-8'), buf.toString()) }) - t.test('using deflate when `Accept-Encoding` request header is `deflate`', async (t) => { + test('using deflate when `Accept-Encoding` request header is `deflate`', async (t) => { t.plan(1) const fastify = Fastify() @@ -338,10 +338,10 @@ test('It should send compressed Buffer data when `global` is `true` :', async (t } }) const payload = zlib.inflateSync(response.rawPayload) - t.equal(payload.toString('utf-8'), buf.toString()) + t.assert.equal(payload.toString('utf-8'), buf.toString()) }) - t.test('using gzip when `Accept-Encoding` request header is `gzip`', async (t) => { + test('using gzip when `Accept-Encoding` request header is `gzip`', async (t) => { t.plan(1) const fastify = Fastify() @@ -360,12 +360,12 @@ test('It should send compressed Buffer data when `global` is `true` :', async (t } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(payload.toString('utf-8'), buf.toString()) + t.assert.equal(payload.toString('utf-8'), buf.toString()) }) }) -test('It should send compressed JSON data when `global` is `true` :', async (t) => { - t.test('using brotli when `Accept-Encoding` request header is `br`', async (t) => { +describe('It should send compressed JSON data when `global` is `true` :', async () => { + test('using brotli when `Accept-Encoding` request header is `br`', async (t) => { t.plan(1) const fastify = Fastify() @@ -384,10 +384,10 @@ test('It should send compressed JSON data when `global` is `true` :', async (t) } }) const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(payload.toString('utf-8'), JSON.stringify(json)) + t.assert.equal(payload.toString('utf-8'), JSON.stringify(json)) }) - t.test('using deflate when `Accept-Encoding` request header is `deflate`', async (t) => { + test('using deflate when `Accept-Encoding` request header is `deflate`', async (t) => { t.plan(1) const fastify = Fastify() @@ -406,10 +406,10 @@ test('It should send compressed JSON data when `global` is `true` :', async (t) } }) const payload = zlib.inflateSync(response.rawPayload) - t.equal(payload.toString('utf-8'), JSON.stringify(json)) + t.assert.equal(payload.toString('utf-8'), JSON.stringify(json)) }) - t.test('using gzip when `Accept-Encoding` request header is `gzip`', async (t) => { + test('using gzip when `Accept-Encoding` request header is `gzip`', async (t) => { t.plan(1) const fastify = Fastify() @@ -428,12 +428,12 @@ test('It should send compressed JSON data when `global` is `true` :', async (t) } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(payload.toString('utf-8'), JSON.stringify(json)) + t.assert.equal(payload.toString('utf-8'), JSON.stringify(json)) }) }) -test('It should fallback to the default `gzip` encoding compression :', async (t) => { - t.test('when `Accept-Encoding` request header value is set to `*`', async (t) => { +describe('It should fallback to the default `gzip` encoding compression :', async () => { + test('when `Accept-Encoding` request header value is set to `*`', async (t) => { t.plan(3) const fastify = Fastify() @@ -454,12 +454,12 @@ test('It should fallback to the default `gzip` encoding compression :', async (t }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('when `Accept-Encoding` request header value is set to multiple `*` directives', async (t) => { + test('when `Accept-Encoding` request header value is set to multiple `*` directives', async (t) => { t.plan(3) const fastify = Fastify() @@ -480,14 +480,14 @@ test('It should fallback to the default `gzip` encoding compression :', async (t }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) }) -test('When a custom `zlib` option is provided, it should compress data :`', async (t) => { - t.test('using the custom `createBrotliCompress()` method', async (t) => { +describe('When a custom `zlib` option is provided, it should compress data :`', async () => { + test('using the custom `createBrotliCompress()` method', async (t) => { t.plan(5) let usedCustom = false @@ -509,17 +509,17 @@ test('When a custom `zlib` option is provided, it should compress data :`', asyn 'accept-encoding': 'br' } }) - t.equal(usedCustom, true) + t.assert.equal(usedCustom, true) const file = readFileSync('./package.json', 'utf8') const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'br') - t.notOk(response.headers['content-length'], 'no content length') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'br') + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('using the custom `createDeflate()` method', async (t) => { + test('using the custom `createDeflate()` method', async (t) => { t.plan(5) let usedCustom = false @@ -541,17 +541,17 @@ test('When a custom `zlib` option is provided, it should compress data :`', asyn 'accept-encoding': 'deflate' } }) - t.equal(usedCustom, true) + t.assert.equal(usedCustom, true) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.notOk(response.headers['content-length'], 'no content length') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('using the custom `createGzip()` method', async (t) => { + test('using the custom `createGzip()` method', async (t) => { t.plan(4) let usedCustom = false @@ -573,18 +573,18 @@ test('When a custom `zlib` option is provided, it should compress data :`', asyn 'accept-encoding': 'gzip' } }) - t.equal(usedCustom, true) + t.assert.equal(usedCustom, true) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) }) -test('When a malformed custom `zlib` option is provided, it should compress data :', async (t) => { - t.test('using the fallback default Node.js core `zlib.createBrotliCompress()` method', async (t) => { +describe('When a malformed custom `zlib` option is provided, it should compress data :', async () => { + test('using the fallback default Node.js core `zlib.createBrotliCompress()` method', async (t) => { t.plan(1) const fastify = Fastify() @@ -608,10 +608,10 @@ test('When a malformed custom `zlib` option is provided, it should compress data } }) const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(payload.toString('utf-8'), 'hello') + t.assert.equal(payload.toString('utf-8'), 'hello') }) - t.test('using the fallback default Node.js core `zlib.createDeflate()` method', async (t) => { + test('using the fallback default Node.js core `zlib.createDeflate()` method', async (t) => { t.plan(1) const fastify = Fastify() @@ -635,10 +635,10 @@ test('When a malformed custom `zlib` option is provided, it should compress data } }) const payload = zlib.inflateSync(response.rawPayload) - t.equal(payload.toString('utf-8'), 'hello') + t.assert.equal(payload.toString('utf-8'), 'hello') }) - t.test('using the fallback default Node.js core `zlib.createGzip()` method', async (t) => { + test('using the fallback default Node.js core `zlib.createGzip()` method', async (t) => { t.plan(1) const fastify = Fastify() @@ -662,12 +662,12 @@ test('When a malformed custom `zlib` option is provided, it should compress data } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(payload.toString('utf-8'), 'hello') + t.assert.equal(payload.toString('utf-8'), 'hello') }) }) -test('When `inflateIfDeflated` is `true` and `X-No-Compression` request header is `true` :', async (t) => { - t.test('it should uncompress payloads using the deflate algorithm', async (t) => { +describe('When `inflateIfDeflated` is `true` and `X-No-Compression` request header is `true` :', async () => { + test('it should uncompress payloads using the deflate algorithm', async (t) => { t.plan(4) const fastify = Fastify() @@ -685,13 +685,13 @@ test('When `inflateIfDeflated` is `true` and `X-No-Compression` request header i 'x-no-compression': true } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(JSON.parse('' + response.payload), json) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(JSON.parse('' + response.payload), json) }) - t.test('it should uncompress payloads using the gzip algorithm', async (t) => { + test('it should uncompress payloads using the gzip algorithm', async (t) => { t.plan(4) const fastify = Fastify() @@ -709,10 +709,10 @@ test('When `inflateIfDeflated` is `true` and `X-No-Compression` request header i 'x-no-compression': true } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(JSON.parse('' + response.payload), json) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(JSON.parse('' + response.payload), json) }) }) @@ -738,16 +738,16 @@ test('it should not uncompress payloads using the zip algorithm', async (t) => { 'x-no-compression': true } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(response.rawPayload, fileBuffer) - t.equal(response.payload, fileBuffer.toString('utf-8')) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(response.rawPayload, fileBuffer) + t.assert.equal(response.payload, fileBuffer.toString('utf-8')) }) -test('It should not compress :', async (t) => { - t.test('Using `reply.compress()` :', async (t) => { - t.test('when payload length is smaller than the `threshold` defined value', async (t) => { +describe('It should not compress :', async () => { + describe('Using `reply.compress()` :', async () => { + test('when payload length is smaller than the `threshold` defined value', async (t) => { t.plan(4) const fastify = Fastify() @@ -766,13 +766,13 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'deflate' } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.payload, 'a message') + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.payload, 'a message') }) - t.test('when `customTypes` is set and does not match `Content-Type` reply header or `mime-db`', async (t) => { + test('when `customTypes` is set and does not match `Content-Type` reply header or `mime-db`', async (t) => { t.plan(3) const fastify = Fastify() @@ -791,12 +791,12 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'gzip' } }) - t.notOk(response.headers.vary, 'accept-encoding') - t.notOk(response.headers['content-encoding']) - t.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary, 'accept-encoding') + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.statusCode, 200) }) - t.test('when `customTypes` is a function and returns false on the provided `Content-Type` reply header`', async (t) => { + test('when `customTypes` is a function and returns false on the provided `Content-Type` reply header`', async (t) => { t.plan(3) const fastify = Fastify() @@ -815,12 +815,12 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'gzip' } }) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.statusCode, 200) }) - t.test('when `X-No-Compression` request header is `true`', async (t) => { + test('when `X-No-Compression` request header is `true`', async (t) => { t.plan(4) const fastify = Fastify() @@ -838,13 +838,13 @@ test('It should not compress :', async (t) => { 'x-no-compression': true } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(JSON.parse(response.payload), json) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(JSON.parse(response.payload), json) }) - t.test('when `Content-Type` reply header is not set and the content is not detected as a compressible type', async (t) => { + test('when `Content-Type` reply header is not set and the content is not detected as a compressible type', async (t) => { t.plan(3) const fastify = Fastify() @@ -868,12 +868,12 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'identity' } }) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.payload, JSON.stringify(json)) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.payload, JSON.stringify(json)) }) - t.test('when `Content-Type` reply header is a mime type with undefined compressible values', async (t) => { + test('when `Content-Type` reply header is a mime type with undefined compressible values', async (t) => { t.plan(4) const fastify = Fastify() @@ -892,13 +892,13 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'gzip, deflate, br' } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.payload, 'hello') + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.payload, 'hello') }) - t.test('when `Content-Type` reply header value is `text/event-stream`', async (t) => { + test('when `Content-Type` reply header value is `text/event-stream`', async (t) => { t.plan(4) const fastify = Fastify() @@ -920,13 +920,13 @@ test('It should not compress :', async (t) => { url: '/', method: 'GET' }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(response.payload, 'event: open\n\nevent: change\ndata: schema\n\n') + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(response.payload, 'event: open\n\nevent: change\ndata: schema\n\n') }) - t.test('when `Content-Type` reply header value is an invalid type', async (t) => { + test('when `Content-Type` reply header value is an invalid type', async (t) => { t.plan(4) const fastify = Fastify() @@ -945,13 +945,13 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'deflate' } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.payload, 'a message') + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.payload, 'a message') }) - t.test('when `Accept-Encoding` request header is missing', async (t) => { + test('when `Accept-Encoding` request header is missing', async (t) => { t.plan(3) const fastify = Fastify() @@ -967,12 +967,12 @@ test('It should not compress :', async (t) => { url: '/', method: 'GET' }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) }) - t.test('when `Accept-Encoding` request header is set to `identity`', async (t) => { + test('when `Accept-Encoding` request header is set to `identity`', async (t) => { t.plan(3) const fastify = Fastify() @@ -990,12 +990,12 @@ test('It should not compress :', async (t) => { } }) const payload = JSON.parse(response.payload) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same({ hello: 'world' }, payload) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual({ hello: 'world' }, payload) }) - t.test('when `Accept-Encoding` request header value is not supported', async (t) => { + test('when `Accept-Encoding` request header value is not supported', async (t) => { t.plan(3) const fastify = Fastify() @@ -1015,12 +1015,12 @@ test('It should not compress :', async (t) => { } }) const file = readFileSync('./package.json', 'utf8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.equal(response.payload, file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.equal(response.payload, file) }) - t.test('when `Accept-Encoding` request header value is not supported (with quality value)', async (t) => { + test('when `Accept-Encoding` request header value is not supported (with quality value)', async (t) => { t.plan(3) const fastify = Fastify() @@ -1040,12 +1040,12 @@ test('It should not compress :', async (t) => { } }) const file = readFileSync('./package.json', 'utf8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.equal(response.payload, file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.equal(response.payload, file) }) - t.test('when `Accept-Encoding` request header is set to `identity and `inflateIfDeflated` is `true``', async (t) => { + test('when `Accept-Encoding` request header is set to `identity and `inflateIfDeflated` is `true``', async (t) => { t.plan(3) const fastify = Fastify() @@ -1063,14 +1063,14 @@ test('It should not compress :', async (t) => { } }) const payload = JSON.parse(response.payload) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same({ hello: 'world' }, payload) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual({ hello: 'world' }, payload) }) }) - t.test('Using `onSend` hook :', async (t) => { - t.test('when there is no payload', async (t) => { + describe('Using `onSend` hook :', async () => { + test('when there is no payload', async (t) => { t.plan(4) const fastify = Fastify() @@ -1087,13 +1087,13 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'gzip' } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.payload, '') + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.payload, '') }) - t.test('when payload length is smaller than the `threshold` defined value', async (t) => { + test('when payload length is smaller than the `threshold` defined value', async (t) => { t.plan(4) const fastify = Fastify() @@ -1112,13 +1112,13 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'deflate' } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.payload, 'a message') + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.payload, 'a message') }) - t.test('when `customTypes` is set and does not match `Content-Type` reply header or `mime-db`', async (t) => { + test('when `customTypes` is set and does not match `Content-Type` reply header or `mime-db`', async (t) => { t.plan(3) const fastify = Fastify() @@ -1137,12 +1137,12 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'gzip' } }) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.statusCode, 200) }) - t.test('when `X-No-Compression` request header is `true`', async (t) => { + test('when `X-No-Compression` request header is `true`', async (t) => { t.plan(4) const fastify = Fastify() @@ -1160,13 +1160,13 @@ test('It should not compress :', async (t) => { 'x-no-compression': true } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(JSON.parse(response.payload), json) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(JSON.parse(response.payload), json) }) - t.test('when `Content-Type` reply header is not set and the content is not detected as a compressible type', async (t) => { + test('when `Content-Type` reply header is not set and the content is not detected as a compressible type', async (t) => { t.plan(3) const fastify = Fastify() @@ -1190,12 +1190,12 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'identity' } }) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.payload, JSON.stringify(json)) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.payload, JSON.stringify(json)) }) - t.test('when `Content-Type` reply header is a mime type with undefined compressible values', async (t) => { + test('when `Content-Type` reply header is a mime type with undefined compressible values', async (t) => { t.plan(4) const fastify = Fastify() @@ -1214,13 +1214,13 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'gzip, deflate, br' } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.payload, 'hello') + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.payload, 'hello') }) - t.test('when `Content-Type` reply header value is `text/event-stream`', async (t) => { + test('when `Content-Type` reply header value is `text/event-stream`', async (t) => { t.plan(4) const fastify = Fastify() @@ -1242,13 +1242,13 @@ test('It should not compress :', async (t) => { url: '/', method: 'GET' }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(response.payload, 'event: open\n\nevent: change\ndata: schema\n\n') + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(response.payload, 'event: open\n\nevent: change\ndata: schema\n\n') }) - t.test('when `Content-Type` reply header value is an invalid type', async (t) => { + test('when `Content-Type` reply header value is an invalid type', async (t) => { t.plan(4) const fastify = Fastify() @@ -1267,13 +1267,13 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'deflate' } }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.payload, 'a message') + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.payload, 'a message') }) - t.test('when `Accept-Encoding` request header is missing', async (t) => { + test('when `Accept-Encoding` request header is missing', async (t) => { t.plan(3) const fastify = Fastify() @@ -1289,12 +1289,12 @@ test('It should not compress :', async (t) => { url: '/', method: 'GET' }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) }) - t.test('when `Accept-Encoding` request header is set to `identity`', async (t) => { + test('when `Accept-Encoding` request header is set to `identity`', async (t) => { t.plan(3) const fastify = Fastify() @@ -1312,12 +1312,12 @@ test('It should not compress :', async (t) => { } }) const payload = JSON.parse(response.payload) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same({ hello: 'world' }, payload) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual({ hello: 'world' }, payload) }) - t.test('when `Accept-Encoding` request header value is not supported', async (t) => { + test('when `Accept-Encoding` request header value is not supported', async (t) => { t.plan(2) const fastify = Fastify() @@ -1336,11 +1336,11 @@ test('It should not compress :', async (t) => { 'accept-encoding': 'invalid' } }) - t.equal(response.statusCode, 200) - t.equal(response.payload, 'something') + t.assert.equal(response.statusCode, 200) + t.assert.equal(response.payload, 'something') }) - t.test('when `Accept-Encoding` request header value is not supported (with quality value)', async (t) => { + test('when `Accept-Encoding` request header value is not supported (with quality value)', async (t) => { t.plan(3) const fastify = Fastify() @@ -1360,12 +1360,12 @@ test('It should not compress :', async (t) => { } }) const file = readFileSync('./package.json', 'utf8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.equal(response.payload, file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.equal(response.payload, file) }) - t.test('when `Accept-Encoding` request header is set to `identity and `inflateIfDeflated` is `true``', async (t) => { + test('when `Accept-Encoding` request header is set to `identity and `inflateIfDeflated` is `true``', async (t) => { t.plan(3) const fastify = Fastify() @@ -1383,15 +1383,15 @@ test('It should not compress :', async (t) => { } }) const payload = JSON.parse(response.payload) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same({ hello: 'world' }, payload) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual({ hello: 'world' }, payload) }) }) }) -test('It should not double-compress :', async (t) => { - t.test('when using `reply.compress()` to send an already deflated Stream', async (t) => { +describe('It should not double-compress :', async () => { + test('when using `reply.compress()` to send an already deflated Stream', async (t) => { t.plan(3) const fastify = Fastify() @@ -1414,12 +1414,12 @@ test('It should not double-compress :', async (t) => { }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('when using `reply.compress()` to send an already gzipped Stream', async (t) => { + test('when using `reply.compress()` to send an already gzipped Stream', async (t) => { t.plan(3) const fastify = Fastify() @@ -1442,12 +1442,12 @@ test('It should not double-compress :', async (t) => { }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('when using `onSend` hook to send an already brotli compressed Stream', async (t) => { + test('when using `onSend` hook to send an already brotli compressed Stream', async (t) => { t.plan(4) const fastify = Fastify() @@ -1472,13 +1472,13 @@ test('It should not double-compress :', async (t) => { } }) const payload = zlib.brotliDecompressSync(response.rawPayload) - t.notOk(response.headers.vary) - t.equal(response.headers['content-encoding'], 'br') - t.equal(response.headers['content-length'], response.rawPayload.length.toString()) - t.equal(payload.toString('utf-8'), file) + t.assert.ok(!response.headers.vary) + t.assert.equal(response.headers['content-encoding'], 'br') + t.assert.equal(response.headers['content-length'], response.rawPayload.length.toString()) + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('when using `onSend` hook to send an already deflated Stream', async (t) => { + test('when using `onSend` hook to send an already deflated Stream', async (t) => { t.plan(4) const fastify = Fastify() @@ -1503,13 +1503,13 @@ test('It should not double-compress :', async (t) => { } }) const payload = zlib.inflateSync(response.rawPayload) - t.notOk(response.headers.vary) - t.equal(response.headers['content-encoding'], 'deflate') - t.equal(response.headers['content-length'], response.rawPayload.length.toString()) - t.equal(payload.toString('utf-8'), file) + t.assert.ok(!response.headers.vary) + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.equal(response.headers['content-length'], response.rawPayload.length.toString()) + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('when using `onSend` hook to send an already gzipped Stream', async (t) => { + test('when using `onSend` hook to send an already gzipped Stream', async (t) => { t.plan(4) const fastify = Fastify() @@ -1534,16 +1534,16 @@ test('It should not double-compress :', async (t) => { } }) const payload = zlib.gunzipSync(response.rawPayload) - t.notOk(response.headers.vary) - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(response.headers['content-length'], response.rawPayload.length.toString()) - t.equal(payload.toString('utf-8'), file) + t.assert.ok(!response.headers.vary) + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(response.headers['content-length'], response.rawPayload.length.toString()) + t.assert.equal(payload.toString('utf-8'), file) }) }) -test('It should not compress Stream data and add a `Content-Encoding` reply header :', async (t) => { - t.test('Using `onSend` hook if `Accept-Encoding` request header value is `identity`', async (t) => { - t.test('when `inflateIfDeflated` is `true` and `encodings` is not set', async (t) => { +describe('It should not compress Stream data and add a `Content-Encoding` reply header :', async () => { + describe('Using `onSend` hook if `Accept-Encoding` request header value is `identity`', async () => { + test('when `inflateIfDeflated` is `true` and `encodings` is not set', async (t) => { t.plan(4) const fastify = Fastify() @@ -1563,13 +1563,13 @@ test('It should not compress Stream data and add a `Content-Encoding` reply head } }) const file = readFileSync('./package.json', 'utf8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.equal(response.headers['content-encoding'], 'identity') - t.equal(file, response.payload) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.equal(response.headers['content-encoding'], 'identity') + t.assert.equal(file, response.payload) }) - t.test('when `inflateIfDeflated` is `true` and `encodings` is set', async (t) => { + test('when `inflateIfDeflated` is `true` and `encodings` is set', async (t) => { t.plan(4) const fastify = Fastify() @@ -1592,15 +1592,15 @@ test('It should not compress Stream data and add a `Content-Encoding` reply head } }) const file = readFileSync('./package.json', 'utf-8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.equal(response.headers['content-encoding'], 'identity') - t.same(response.payload, file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.equal(response.headers['content-encoding'], 'identity') + t.assert.deepEqual(response.payload, file) }) }) - t.test('Using `reply.compress()` if `Accept-Encoding` request header value is `identity`', async (t) => { - t.test('when `inflateIfDeflated` is `true` and `encodings` is not set', async (t) => { + describe('Using `reply.compress()` if `Accept-Encoding` request header value is `identity`', async () => { + test('when `inflateIfDeflated` is `true` and `encodings` is not set', async (t) => { t.plan(4) const fastify = Fastify() @@ -1620,13 +1620,13 @@ test('It should not compress Stream data and add a `Content-Encoding` reply head } }) const file = readFileSync('./package.json', 'utf8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.equal(response.headers['content-encoding'], 'identity') - t.equal(file, response.payload) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.equal(response.headers['content-encoding'], 'identity') + t.assert.equal(file, response.payload) }) - t.test('when `inflateIfDeflated` is `true` and `encodings` is set', async (t) => { + test('when `inflateIfDeflated` is `true` and `encodings` is set', async (t) => { t.plan(4) const fastify = Fastify() @@ -1649,10 +1649,10 @@ test('It should not compress Stream data and add a `Content-Encoding` reply head } }) const file = readFileSync('./package.json', 'utf-8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.equal(response.headers['content-encoding'], 'identity') - t.same(response.payload, file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.equal(response.headers['content-encoding'], 'identity') + t.assert.deepEqual(response.payload, file) }) }) }) @@ -1685,10 +1685,10 @@ test('It should return a serialized payload when `inflateIfDeflated` is `true` a 'x-no-compression': true } }) - t.equal(one.statusCode, 200) - t.notOk(one.headers.vary) - t.notOk(one.headers['content-encoding']) - t.same(JSON.parse(one.payload), json) + t.assert.equal(one.statusCode, 200) + t.assert.ok(!one.headers.vary) + t.assert.ok(!one.headers['content-encoding']) + t.assert.deepEqual(JSON.parse(one.payload), json) const two = await fastify.inject({ url: '/two', @@ -1697,10 +1697,10 @@ test('It should return a serialized payload when `inflateIfDeflated` is `true` a 'x-no-compression': true } }) - t.equal(two.statusCode, 200) - t.notOk(two.headers.vary) - t.notOk(two.headers['content-encoding']) - t.equal(two.payload, compressedBufferPayload.toString()) + t.assert.equal(two.statusCode, 200) + t.assert.ok(!two.headers.vary) + t.assert.ok(!two.headers['content-encoding']) + t.assert.equal(two.payload, compressedBufferPayload.toString()) }) test('It should close the stream', async (t) => { @@ -1713,7 +1713,7 @@ test('It should close the stream', async (t) => { const closed = once(stream, 'close') fastify.get('/', (request, reply) => { - stream.on('close', () => t.ok('stream closed')) + stream.on('close', () => t.assert.ok('stream closed')) reply .type('text/plain') @@ -1726,8 +1726,8 @@ test('It should close the stream', async (t) => { }) const file = readFileSync('./package.json', 'utf8') - t.equal(response.statusCode, 200) - t.equal(file, response.payload) + t.assert.equal(response.statusCode, 200) + t.assert.equal(file, response.payload) await closed }) @@ -1772,11 +1772,11 @@ test('It should log an existing error with stream onEnd handler', async (t) => { 'accept-encoding': 'gzip' } }) - t.equal(actual.msg, expect.message) + t.assert.equal(actual.msg, expect.message) }) -test('It should support stream1 :', async (t) => { - t.test('when using `reply.compress()`', async (t) => { +describe('It should support stream1 :', async () => { + test('when using `reply.compress()`', async (t) => { t.plan(3) const fastify = Fastify() @@ -1801,12 +1801,12 @@ test('It should support stream1 :', async (t) => { } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.same(JSON.parse(payload.toString()), [{ hello: 'world' }, { a: 42 }]) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.deepEqual(JSON.parse(payload.toString()), [{ hello: 'world' }, { a: 42 }]) }) - t.test('when using `onSend` hook', async (t) => { + test('when using `onSend` hook', async (t) => { t.plan(3) const fastify = Fastify() @@ -1831,14 +1831,14 @@ test('It should support stream1 :', async (t) => { } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.same(JSON.parse(payload.toString()), [{ hello: 'world' }, { a: 42 }]) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.deepEqual(JSON.parse(payload.toString()), [{ hello: 'world' }, { a: 42 }]) }) }) -test('It should remove `Content-Length` header :', async (t) => { - t.test('using `reply.compress()`', async (t) => { +describe('It should remove `Content-Length` header :', async () => { + test('using `reply.compress()`', async (t) => { t.plan(4) const fastify = Fastify() @@ -1866,13 +1866,13 @@ test('It should remove `Content-Length` header :', async (t) => { }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.notOk(response.headers['content-length'], 'no content length') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('using `reply.compress()` on a Stream when `inflateIfDeflated` is `true` and `X-No-Compression` request header is `true`', async (t) => { + test('using `reply.compress()` on a Stream when `inflateIfDeflated` is `true` and `X-No-Compression` request header is `true`', async (t) => { t.plan(4) const fastify = Fastify() @@ -1892,13 +1892,13 @@ test('It should remove `Content-Length` header :', async (t) => { } }) const file = readFileSync('./package.json', 'utf8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(file, response.payload) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(file, response.payload) }) - t.test('using `onSend` hook', async (t) => { + test('using `onSend` hook', async (t) => { t.plan(4) const fastify = Fastify() @@ -1926,13 +1926,13 @@ test('It should remove `Content-Length` header :', async (t) => { }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.notOk(response.headers['content-length'], 'no content length') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('using `onSend` hook on a Stream When `inflateIfDeflated` is `true` and `X-No-Compression` request header is `true`', async (t) => { + test('using `onSend` hook on a Stream When `inflateIfDeflated` is `true` and `X-No-Compression` request header is `true`', async (t) => { t.plan(4) const fastify = Fastify() @@ -1952,15 +1952,15 @@ test('It should remove `Content-Length` header :', async (t) => { } }) const file = readFileSync('./package.json', 'utf8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(file, response.payload) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(file, response.payload) }) }) -test('When `removeContentLengthHeader` is `false`, it should not remove `Content-Length` header :', async (t) => { - t.test('using `reply.compress()`', async (t) => { +describe('When `removeContentLengthHeader` is `false`, it should not remove `Content-Length` header :', async () => { + test('using `reply.compress()`', async (t) => { t.plan(4) const fastify = Fastify() @@ -1988,13 +1988,13 @@ test('When `removeContentLengthHeader` is `false`, it should not remove `Content }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.equal(response.headers['content-length'], payload.length.toString()) - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.equal(response.headers['content-length'], payload.length.toString()) + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('using `onSend` hook', async (t) => { + test('using `onSend` hook', async (t) => { t.plan(4) const fastify = Fastify() @@ -2022,15 +2022,15 @@ test('When `removeContentLengthHeader` is `false`, it should not remove `Content }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.equal(response.headers['content-length'], payload.length.toString()) - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.equal(response.headers['content-length'], payload.length.toString()) + t.assert.equal(payload.toString('utf-8'), file) }) }) -test('It should add hooks correctly: ', async (t) => { - t.test('`onRequest` hooks', async (t) => { +describe('It should add hooks correctly: ', async () => { + test('`onRequest` hooks', async (t) => { t.plan(14) const fastify = Fastify() @@ -2074,11 +2074,11 @@ test('It should add hooks correctly: ', async (t) => { } }).then((response) => { const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.statusCode, 200) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(response.headers['x-fastify-global-test'], 'ok') - t.equal(response.headers['x-fastify-test-one'], 'ok') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(response.headers['x-fastify-global-test'], 'ok') + t.assert.equal(response.headers['x-fastify-test-one'], 'ok') + t.assert.equal(payload.toString('utf-8'), file) }).catch((err) => { t.error(err) }) @@ -2091,11 +2091,11 @@ test('It should add hooks correctly: ', async (t) => { } }).then((response) => { const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.statusCode, 200) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(response.headers['x-fastify-global-test'], 'ok') - t.equal(response.headers['x-fastify-test-two'], 'ok') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(response.headers['x-fastify-global-test'], 'ok') + t.assert.equal(response.headers['x-fastify-test-two'], 'ok') + t.assert.equal(payload.toString('utf-8'), file) }).catch((err) => { t.error(err) }) @@ -2108,16 +2108,16 @@ test('It should add hooks correctly: ', async (t) => { } }).then((response) => { const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.statusCode, 200) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(response.headers['x-fastify-global-test'], 'ok') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(response.headers['x-fastify-global-test'], 'ok') + t.assert.equal(payload.toString('utf-8'), file) }).catch((err) => { t.error(err) }) }) - t.test('`onSend` hooks', async (t) => { + test('`onSend` hooks', async (t) => { t.plan(14) const fastify = Fastify() @@ -2161,11 +2161,11 @@ test('It should add hooks correctly: ', async (t) => { } }).then((response) => { const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.statusCode, 200) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(response.headers['x-fastify-global-test'], 'ok') - t.equal(response.headers['x-fastify-test-one'], 'ok') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(response.headers['x-fastify-global-test'], 'ok') + t.assert.equal(response.headers['x-fastify-test-one'], 'ok') + t.assert.equal(payload.toString('utf-8'), file) }).catch((err) => { t.error(err) }) @@ -2178,11 +2178,11 @@ test('It should add hooks correctly: ', async (t) => { } }).then((response) => { const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.statusCode, 200) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(response.headers['x-fastify-global-test'], 'ok') - t.equal(response.headers['x-fastify-test-two'], 'ok') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(response.headers['x-fastify-global-test'], 'ok') + t.assert.equal(response.headers['x-fastify-test-two'], 'ok') + t.assert.equal(payload.toString('utf-8'), file) }).catch((err) => { t.error(err) }) @@ -2195,16 +2195,16 @@ test('It should add hooks correctly: ', async (t) => { } }).then((response) => { const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.statusCode, 200) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(response.headers['x-fastify-global-test'], 'ok') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(response.headers['x-fastify-global-test'], 'ok') + t.assert.equal(payload.toString('utf-8'), file) }).catch((err) => { t.error(err) }) }) - t.test('`preParsing` hooks', async (t) => { + test('`preParsing` hooks', async (t) => { t.plan(14) const fastify = Fastify() @@ -2255,11 +2255,11 @@ test('It should add hooks correctly: ', async (t) => { } }).then((response) => { const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.statusCode, 200) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(response.headers['x-fastify-global-test'], 'ok') - t.equal(response.headers['x-fastify-test-one'], 'ok') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(response.headers['x-fastify-global-test'], 'ok') + t.assert.equal(response.headers['x-fastify-test-one'], 'ok') + t.assert.equal(payload.toString('utf-8'), file) }).catch((err) => { t.error(err) }) @@ -2272,11 +2272,11 @@ test('It should add hooks correctly: ', async (t) => { } }).then((response) => { const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.statusCode, 200) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(response.headers['x-fastify-global-test'], 'ok') - t.equal(response.headers['x-fastify-test-two'], 'ok') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(response.headers['x-fastify-global-test'], 'ok') + t.assert.equal(response.headers['x-fastify-test-two'], 'ok') + t.assert.equal(payload.toString('utf-8'), file) }).catch((err) => { t.error(err) }) @@ -2289,18 +2289,18 @@ test('It should add hooks correctly: ', async (t) => { } }).then((response) => { const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.statusCode, 200) - t.notOk(response.headers['content-length'], 'no content length') - t.equal(response.headers['x-fastify-global-test'], 'ok') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers['content-length'], 'no content length') + t.assert.equal(response.headers['x-fastify-global-test'], 'ok') + t.assert.equal(payload.toString('utf-8'), file) }).catch((err) => { t.error(err) }) }) }) -test('When `Accept-Encoding` request header values are not supported and `onUnsupportedEncoding` is defined :', async (t) => { - t.test('it should call the defined `onUnsupportedEncoding()` method', async (t) => { +describe('When `Accept-Encoding` request header values are not supported and `onUnsupportedEncoding` is defined :', async () => { + test('it should call the defined `onUnsupportedEncoding()` method', async (t) => { t.plan(2) const fastify = Fastify() @@ -2325,11 +2325,11 @@ test('When `Accept-Encoding` request header values are not supported and `onUnsu 'accept-encoding': 'hello' } }) - t.equal(response.statusCode, 406) - t.same(JSON.parse(response.payload), { hello: 'hello' }) + t.assert.equal(response.statusCode, 406) + t.assert.deepEqual(JSON.parse(response.payload), { hello: 'hello' }) }) - t.test('it should call the defined `onUnsupportedEncoding()` method and throw an error', async (t) => { + test('it should call the defined `onUnsupportedEncoding()` method and throw an error', async (t) => { t.plan(2) const fastify = Fastify() @@ -2354,8 +2354,8 @@ test('When `Accept-Encoding` request header values are not supported and `onUnsu 'accept-encoding': 'hello' } }) - t.equal(response.statusCode, 406) - t.same(JSON.parse(response.payload), { + t.assert.equal(response.statusCode, 406) + t.assert.deepEqual(JSON.parse(response.payload), { error: 'Not Acceptable', message: 'testing error', statusCode: 406 @@ -2363,8 +2363,8 @@ test('When `Accept-Encoding` request header values are not supported and `onUnsu }) }) -test('`Accept-Encoding` request header values :', async (t) => { - t.test('can contain white space', async (t) => { +describe('`Accept-Encoding` request header values :', async () => { + test('can contain white space', async (t) => { t.plan(3) const fastify = Fastify() @@ -2384,12 +2384,12 @@ test('`Accept-Encoding` request header values :', async (t) => { } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), JSON.stringify(json)) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), JSON.stringify(json)) }) - t.test('can contain mixed uppercase and lowercase characters (e.g.: compressing a Stream)', async (t) => { + test('can contain mixed uppercase and lowercase characters (e.g.: compressing a Stream)', async (t) => { t.plan(3) const fastify = Fastify() @@ -2410,12 +2410,12 @@ test('`Accept-Encoding` request header values :', async (t) => { }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('can contain mixed uppercase and lowercase characters (e.g.: compressing a Buffer)', async (t) => { + test('can contain mixed uppercase and lowercase characters (e.g.: compressing a Buffer)', async (t) => { t.plan(1) const fastify = Fastify() @@ -2434,10 +2434,10 @@ test('`Accept-Encoding` request header values :', async (t) => { } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(payload.toString('utf-8'), buf.toString()) + t.assert.equal(payload.toString('utf-8'), buf.toString()) }) - t.test('should support `gzip` alias value `x-gzip`', async (t) => { + test('should support `gzip` alias value `x-gzip`', async (t) => { t.plan(3) const fastify = Fastify() @@ -2460,12 +2460,12 @@ test('`Accept-Encoding` request header values :', async (t) => { }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) - t.test('should support quality syntax', async (t) => { + test('should support quality syntax', async (t) => { t.plan(3) const fastify = Fastify() @@ -2488,9 +2488,9 @@ test('`Accept-Encoding` request header values :', async (t) => { }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.equal(payload.toString('utf-8'), file) }) }) @@ -2514,9 +2514,9 @@ test('It should compress data if `customTypes` is set and matches `Content-Type` }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) test('It should compress data if `customTypes` is a function and returns true on the provided `Content-Type` reply header value', async (t) => { @@ -2539,9 +2539,9 @@ test('It should compress data if `customTypes` is a function and returns true on }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) test('It should not apply `customTypes` option if the passed value is not a RegExp or Function', async (t) => { @@ -2563,9 +2563,9 @@ test('It should not apply `customTypes` option if the passed value is not a RegE 'accept-encoding': 'gzip' } }) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.statusCode, 200) }) test('When `encodings` option is set, it should only use the registered value', async (t) => { @@ -2585,13 +2585,13 @@ test('When `encodings` option is set, it should only use the registered value', 'accept-encoding': 'br,gzip,deflate' } }) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.equal(response.statusCode, 200) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.equal(response.statusCode, 200) }) -test('It should send data compressed according to `brotliOptions` :', async (t) => { - t.test('when using br encoding', async (t) => { +describe('It should send data compressed according to `brotliOptions` :', async () => { + test('when using br encoding', async (t) => { t.plan(4) const brotliOptions = { params: { @@ -2619,15 +2619,15 @@ test('It should send data compressed according to `brotliOptions` :', async (t) const file = readFileSync('./package.json', 'utf8') const payload = zlib.brotliDecompressSync(response.rawPayload, brotliOptions) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'br') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'br') + t.assert.equal(payload.toString('utf-8'), file) const compressedPayload = zlib.brotliCompressSync(file, brotliOptions) - t.same(response.rawPayload, compressedPayload) + t.assert.deepEqual(response.rawPayload, compressedPayload) }) - t.test('default BROTLI_PARAM_QUALITY to be 4', async (t) => { + test('default BROTLI_PARAM_QUALITY to be 4', async (t) => { t.plan(1) const fastify = Fastify() @@ -2652,12 +2652,12 @@ test('It should send data compressed according to `brotliOptions` :', async (t) params: { [zlib.constants.BROTLI_PARAM_QUALITY]: 4 } } const compressedPayload = zlib.brotliCompressSync(file, defaultBrotliOptions) - t.same(response.rawPayload, compressedPayload) + t.assert.deepEqual(response.rawPayload, compressedPayload) }) }) -test('It should send data compressed according to `zlibOptions` :', async (t) => { - t.test('when using deflate encoding', async (t) => { +describe('It should send data compressed according to `zlibOptions` :', async () => { + test('when using deflate encoding', async (t) => { t.plan(3) const zlibOptions = { @@ -2685,12 +2685,12 @@ test('It should send data compressed according to `zlibOptions` :', async (t) => } }) const file = readFileSync('./package.json') - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.same(response.rawPayload, zlib.deflateSync(file, zlibOptions)) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'deflate') + t.assert.deepEqual(response.rawPayload, zlib.deflateSync(file, zlibOptions)) }) - t.test('when using gzip encoding', async (t) => { + test('when using gzip encoding', async (t) => { t.plan(3) const zlibOptions = { level: 1 } @@ -2715,9 +2715,9 @@ test('It should send data compressed according to `zlibOptions` :', async (t) => } }) const file = readFileSync('./package.json') - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.same(response.rawPayload, zlib.gzipSync(file, zlibOptions)) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.deepEqual(response.rawPayload, zlib.gzipSync(file, zlibOptions)) }) }) @@ -2748,7 +2748,7 @@ test('It should concat `accept-encoding` to `Vary` reply header if present', asy 'accept-encoding': 'deflate' } }).then((response) => { - t.same(response.headers.vary, 'different-header, accept-encoding') + t.assert.deepEqual(response.headers.vary, 'different-header, accept-encoding') }).catch((err) => { t.error(err) }) @@ -2760,7 +2760,7 @@ test('It should concat `accept-encoding` to `Vary` reply header if present', asy 'accept-encoding': 'deflate' } }).then((response) => { - t.same(response.headers.vary, 'different-header, my-header, accept-encoding') + t.assert.deepEqual(response.headers.vary, 'different-header, my-header, accept-encoding') }).catch((err) => { t.error(err) }) @@ -2793,7 +2793,7 @@ test('It should not add `accept-encoding` to `Vary` reply header if already pres 'accept-encoding': 'deflate' } }).then((response) => { - t.same(response.headers.vary, 'accept-encoding,different-header') + t.assert.deepEqual(response.headers.vary, 'accept-encoding,different-header') }).catch((err) => { t.error(err) }) @@ -2805,7 +2805,7 @@ test('It should not add `accept-encoding` to `Vary` reply header if already pres 'accept-encoding': 'deflate' } }).then((response) => { - t.same(response.headers.vary, 'accept-encoding, different-header, my-header') + t.assert.deepEqual(response.headers.vary, 'accept-encoding, different-header, my-header') }).catch((err) => { t.error(err) }) @@ -2832,9 +2832,9 @@ test('It should follow the `Accept-Encoding` request header encoding order', asy }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'br') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'br') + t.assert.equal(payload.toString('utf-8'), file) }) test('It should sort and follow custom `encodings` options', async (t) => { @@ -2861,9 +2861,9 @@ test('It should sort and follow custom `encodings` options', async (t) => { }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'br') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'br') + t.assert.equal(payload.toString('utf-8'), file) }) test('It should sort and prefer the order of custom `encodings` options', async (t) => { @@ -2891,9 +2891,9 @@ test('It should sort and prefer the order of custom `encodings` options', async const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) test('It should sort and follow custom `requestEncodings` options', async (t) => { @@ -2920,13 +2920,13 @@ test('It should sort and follow custom `requestEncodings` options', async (t) => }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.brotliDecompressSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'br') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'br') + t.assert.equal(payload.toString('utf-8'), file) }) -test('It should uncompress data when `Accept-Encoding` request header is missing :', async (t) => { - t.test('using the fallback Node.js `zlib.createInflate()` method', async (t) => { +describe('It should uncompress data when `Accept-Encoding` request header is missing :', async () => { + test('using the fallback Node.js `zlib.createInflate()` method', async (t) => { t.plan(4) const fastify = Fastify() @@ -2946,13 +2946,13 @@ test('It should uncompress data when `Accept-Encoding` request header is missing url: '/', method: 'GET' }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(JSON.parse('' + response.payload), json) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(JSON.parse('' + response.payload), json) }) - t.test('using the fallback Node.js `zlib.createGunzip()` method', async (t) => { + test('using the fallback Node.js `zlib.createGunzip()` method', async (t) => { t.plan(4) const fastify = Fastify() @@ -2972,13 +2972,13 @@ test('It should uncompress data when `Accept-Encoding` request header is missing url: '/', method: 'GET' }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(JSON.parse('' + response.payload), json) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(JSON.parse('' + response.payload), json) }) - t.test('when the data is a deflated Buffer', async (t) => { + test('when the data is a deflated Buffer', async (t) => { t.plan(4) const fastify = Fastify() @@ -2997,13 +2997,13 @@ test('It should uncompress data when `Accept-Encoding` request header is missing url: '/', method: 'GET' }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(JSON.parse('' + response.payload), json) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(JSON.parse('' + response.payload), json) }) - t.test('when the data is a gzipped Buffer', async (t) => { + test('when the data is a gzipped Buffer', async (t) => { t.plan(4) const fastify = Fastify() @@ -3022,13 +3022,13 @@ test('It should uncompress data when `Accept-Encoding` request header is missing url: '/', method: 'GET' }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(JSON.parse('' + response.payload), json) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(JSON.parse('' + response.payload), json) }) - t.test('when the data is a deflated Stream', async (t) => { + test('when the data is a deflated Stream', async (t) => { t.plan(4) const fastify = Fastify() @@ -3050,13 +3050,13 @@ test('It should uncompress data when `Accept-Encoding` request header is missing method: 'GET' }) const file = readFileSync('./package.json', 'utf8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.rawPayload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.rawPayload.toString('utf-8'), file) }) - t.test('when the data is a gzipped Stream', async (t) => { + test('when the data is a gzipped Stream', async (t) => { t.plan(4) const fastify = Fastify() @@ -3078,13 +3078,13 @@ test('It should uncompress data when `Accept-Encoding` request header is missing method: 'GET' }) const file = readFileSync('./package.json', 'utf8') - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.rawPayload.toString('utf-8'), file) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.rawPayload.toString('utf-8'), file) }) - t.test('when the data has been compressed multiple times', async (t) => { + test('when the data has been compressed multiple times', async (t) => { t.plan(4) const fastify = Fastify() @@ -3107,15 +3107,15 @@ test('It should uncompress data when `Accept-Encoding` request header is missing url: '/', method: 'GET' }) - t.equal(response.statusCode, 200) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.same(JSON.parse('' + response.payload), json) + t.assert.equal(response.statusCode, 200) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.deepEqual(JSON.parse('' + response.payload), json) }) }) -test('When `onUnsupportedEncoding` is set and the `Accept-Encoding` request header value is an unsupported encoding', async (t) => { - t.test('it should call the defined `onUnsupportedEncoding()` method', async (t) => { +describe('When `onUnsupportedEncoding` is set and the `Accept-Encoding` request header value is an unsupported encoding', async () => { + test('it should call the defined `onUnsupportedEncoding()` method', async (t) => { t.plan(3) const fastify = Fastify() @@ -3140,12 +3140,12 @@ test('When `onUnsupportedEncoding` is set and the `Accept-Encoding` request head 'accept-encoding': 'hello' } }) - t.equal(response.statusCode, 406) - t.notOk(response.headers.vary) - t.same(JSON.parse(response.payload), { hello: 'hello' }) + t.assert.equal(response.statusCode, 406) + t.assert.ok(!response.headers.vary) + t.assert.deepEqual(JSON.parse(response.payload), { hello: 'hello' }) }) - t.test('it should call the defined `onUnsupportedEncoding()` method and throw an error', async (t) => { + test('it should call the defined `onUnsupportedEncoding()` method and throw an error', async (t) => { t.plan(2) const fastify = Fastify() @@ -3170,8 +3170,8 @@ test('When `onUnsupportedEncoding` is set and the `Accept-Encoding` request head 'accept-encoding': 'hello' } }) - t.equal(response.statusCode, 406) - t.same(JSON.parse(response.payload), { + t.assert.equal(response.statusCode, 406) + t.assert.deepEqual(JSON.parse(response.payload), { error: 'Not Acceptable', message: 'testing error', statusCode: 406 @@ -3179,19 +3179,19 @@ test('When `onUnsupportedEncoding` is set and the `Accept-Encoding` request head }) }) -test('It should error :', async (t) => { - t.test('when `encodings` array is empty', (t) => { +describe('It should error :', async () => { + test('when `encodings` array is empty', async (t) => { t.plan(1) const fastify = Fastify() fastify.register(compressPlugin, { encodings: [] }) - - fastify.ready(err => { - t.ok(err instanceof Error) + await t.assert.rejects(async () => fastify.ready(), { + name: 'Error', + message: 'The `encodings` option array must have at least 1 item.' }) }) - t.test('when no entries in `encodings` are supported', (t) => { + test('when no entries in `encodings` are supported', (t) => { t.plan(1) const fastify = Fastify() @@ -3199,8 +3199,9 @@ test('It should error :', async (t) => { encodings: ['(not-a-real-encoding)'] }) - fastify.ready(err => { - t.ok(err instanceof Error) + t.assert.rejects(async () => fastify.ready(), { + name: 'Error', + message: 'None of the passed `encodings` were supported — compression not possible.' }) }) }) @@ -3220,8 +3221,8 @@ test('It should return an error when using `reply.compress()` with a missing pay method: 'GET' }) const payload = JSON.parse(response.payload) - t.equal(response.statusCode, 500) - t.same({ + t.assert.equal(response.statusCode, 500) + t.assert.deepEqual({ error: 'Internal Server Error', message: 'Internal server error', statusCode: 500 @@ -3261,9 +3262,9 @@ for (const contentType of defaultSupportedContentTypes) { }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + t.assert.equal(response.headers.vary, 'accept-encoding') + t.assert.equal(response.headers['content-encoding'], 'gzip') + t.assert.equal(payload.toString('utf-8'), file) }) } @@ -3292,8 +3293,8 @@ for (const contentType of notByDefaultSupportedContentTypes) { } }) const file = readFileSync('./package.json', 'utf8') - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.rawPayload.toString('utf-8'), file) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.rawPayload.toString('utf-8'), file) }) } diff --git a/test/global-decompress.test.js b/test/global-decompress.test.js index 94aa443..c2cb619 100644 --- a/test/global-decompress.test.js +++ b/test/global-decompress.test.js @@ -1,6 +1,6 @@ 'use strict' -const { test } = require('tap') +const { test, describe } = require('node:test') const { createReadStream } = require('node:fs') const path = require('node:path') const zlib = require('node:zlib') @@ -18,8 +18,8 @@ function createPayload (compressor) { return payload } -test('It should decompress the request payload :', async (t) => { - t.test('using brotli algorithm when `Content-Encoding` request header value is set to `br`', async (t) => { +describe('It should decompress the request payload :', async () => { + test('using brotli algorithm when `Content-Encoding` request header value is set to `br`', async (t) => { t.plan(2) const fastify = Fastify() @@ -38,11 +38,11 @@ test('It should decompress the request payload :', async (t) => { }, payload: createPayload(zlib.createBrotliCompress) }) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + t.assert.equal(response.statusCode, 200) + t.assert.equal(response.body, '@fastify/compress') }) - t.test('using deflate algorithm when `Content-Encoding` request header value is set to `deflate`', async (t) => { + test('using deflate algorithm when `Content-Encoding` request header value is set to `deflate`', async (t) => { t.plan(2) const fastify = Fastify() @@ -61,11 +61,11 @@ test('It should decompress the request payload :', async (t) => { }, payload: createPayload(zlib.createDeflate) }) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + t.assert.equal(response.statusCode, 200) + t.assert.equal(response.body, '@fastify/compress') }) - t.test('using gzip algorithm when `Content-Encoding` request header value is set to `gzip`', async (t) => { + test('using gzip algorithm when `Content-Encoding` request header value is set to `gzip`', async (t) => { t.plan(2) const fastify = Fastify() @@ -84,11 +84,11 @@ test('It should decompress the request payload :', async (t) => { }, payload: createPayload(zlib.createGzip) }) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + t.assert.equal(response.statusCode, 200) + t.assert.equal(response.body, '@fastify/compress') }) - t.test('using the `forceRequestEncoding` provided algorithm over the `Content-Encoding` request header value', async (t) => { + test('using the `forceRequestEncoding` provided algorithm over the `Content-Encoding` request header value', async (t) => { t.plan(2) const fastify = Fastify() @@ -107,13 +107,13 @@ test('It should decompress the request payload :', async (t) => { }, payload: createPayload(zlib.createGzip) }) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + t.assert.equal(response.statusCode, 200) + t.assert.equal(response.body, '@fastify/compress') }) }) -test('It should not decompress :', async (t) => { - t.test('when `Content-Encoding` request header is missing', async (t) => { +describe('It should not decompress :', async () => { + test('when `Content-Encoding` request header is missing', async (t) => { t.plan(2) const fastify = Fastify() @@ -131,11 +131,11 @@ test('It should not decompress :', async (t) => { }, payload: createPayload() }) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + t.assert.equal(response.statusCode, 200) + t.assert.equal(response.body, '@fastify/compress') }) - t.test('when `Content-Encoding` request header value is set to `identity`', async (t) => { + test('when `Content-Encoding` request header value is set to `identity`', async (t) => { t.plan(2) const fastify = Fastify() @@ -154,13 +154,13 @@ test('It should not decompress :', async (t) => { }, payload: createPayload() }) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + t.assert.equal(response.statusCode, 200) + t.assert.equal(response.body, '@fastify/compress') }) }) -test('It should return an error :', async (t) => { - t.test('when `Content-Encoding` request header value is not supported', async (t) => { +describe('It should return an error :', async () => { + test('when `Content-Encoding` request header value is not supported', async (t) => { t.plan(2) const fastify = Fastify() @@ -179,8 +179,8 @@ test('It should return an error :', async (t) => { }, payload: createPayload(zlib.createDeflate) }) - t.equal(response.statusCode, 415) - t.strictSame(response.json(), { + t.assert.equal(response.statusCode, 415) + t.assert.deepEqual(response.json(), { statusCode: 415, code: 'FST_CP_ERR_INVALID_CONTENT_ENCODING', error: 'Unsupported Media Type', @@ -188,7 +188,7 @@ test('It should return an error :', async (t) => { }) }) - t.test('when `Content-Encoding` request header value is disabled by the `requestEncodings` option', async (t) => { + test('when `Content-Encoding` request header value is disabled by the `requestEncodings` option', async (t) => { t.plan(2) const fastify = Fastify() @@ -207,8 +207,8 @@ test('It should return an error :', async (t) => { }, payload: createPayload(zlib.createDeflate) }) - t.equal(response.statusCode, 415) - t.strictSame(response.json(), { + t.assert.equal(response.statusCode, 415) + t.assert.deepEqual(response.json(), { statusCode: 415, code: 'FST_CP_ERR_INVALID_CONTENT_ENCODING', error: 'Unsupported Media Type', @@ -216,7 +216,7 @@ test('It should return an error :', async (t) => { }) }) - t.test('when the compressed payload is invalid according to `Content-Encoding` request header value', async (t) => { + test('when the compressed payload is invalid according to `Content-Encoding` request header value', async (t) => { t.plan(2) const fastify = Fastify() @@ -235,8 +235,8 @@ test('It should return an error :', async (t) => { }, payload: createPayload(zlib.createGzip) }) - t.equal(response.statusCode, 400) - t.strictSame(response.json(), { + t.assert.equal(response.statusCode, 400) + t.assert.deepEqual(response.json(), { statusCode: 400, code: 'FST_CP_ERR_INVALID_CONTENT', error: 'Bad Request', @@ -245,8 +245,8 @@ test('It should return an error :', async (t) => { }) }) -test('It should return the error returned by :', async (t) => { - t.test('`onUnsupportedRequestEncoding`', async (t) => { +describe('It should return the error returned by :', async () => { + test('`onUnsupportedRequestEncoding`', async (t) => { t.plan(2) const fastify = Fastify() @@ -274,8 +274,8 @@ test('It should return the error returned by :', async (t) => { }, payload: createPayload(zlib.createDeflate) }) - t.equal(response.statusCode, 400) - t.strictSame(response.json(), { + t.assert.equal(response.statusCode, 400) + t.assert.deepEqual(response.json(), { statusCode: 400, code: 'INVALID', error: 'Bad Request', @@ -283,7 +283,7 @@ test('It should return the error returned by :', async (t) => { }) }) - t.test('`onInvalidRequestPayload`', async (t) => { + test('`onInvalidRequestPayload`', async (t) => { t.plan(2) const fastify = Fastify() @@ -311,8 +311,8 @@ test('It should return the error returned by :', async (t) => { }, payload: createPayload(zlib.createGzip) }) - t.equal(response.statusCode, 400) - t.strictSame(response.json(), { + t.assert.equal(response.statusCode, 400) + t.assert.deepEqual(response.json(), { statusCode: 400, code: 'INVALID', error: 'Bad Request', @@ -321,8 +321,8 @@ test('It should return the error returned by :', async (t) => { }) }) -test('It should return the default error :', async (t) => { - t.test('when `onUnsupportedRequestEncoding` throws', async (t) => { +describe('It should return the default error :', async () => { + test('when `onUnsupportedRequestEncoding` throws', async (t) => { t.plan(2) const fastify = Fastify() @@ -345,8 +345,8 @@ test('It should return the default error :', async (t) => { }, payload: createPayload(zlib.createDeflate) }) - t.equal(response.statusCode, 415) - t.strictSame(response.json(), { + t.assert.equal(response.statusCode, 415) + t.assert.deepEqual(response.json(), { statusCode: 415, code: 'FST_CP_ERR_INVALID_CONTENT_ENCODING', error: 'Unsupported Media Type', @@ -354,7 +354,7 @@ test('It should return the default error :', async (t) => { }) }) - t.test('when `onInvalidRequestPayload` throws', async (t) => { + test('when `onInvalidRequestPayload` throws', async (t) => { t.plan(2) const fastify = Fastify() @@ -377,8 +377,8 @@ test('It should return the default error :', async (t) => { }, payload: createPayload(zlib.createGzip) }) - t.equal(response.statusCode, 400) - t.strictSame(response.json(), { + t.assert.equal(response.statusCode, 400) + t.assert.deepEqual(response.json(), { statusCode: 400, code: 'FST_CP_ERR_INVALID_CONTENT', error: 'Bad Request', @@ -387,37 +387,47 @@ test('It should return the default error :', async (t) => { }) }) -test('It should validate `requestEncodings` option', (t) => { +test('It should validate `requestEncodings` option', async (t) => { t.plan(1) const fastify = Fastify() fastify.register(compressPlugin, { requestEncodings: [] }) - - fastify.ready((err) => { - t.equal(err.message, 'The `requestEncodings` option array must have at least 1 item.') - }) + t.assert.rejects( + async () => fastify.ready(), + { + name: 'Error', + message: 'The `requestEncodings` option array must have at least 1 item.' + } + ) }) -test('It should make sure that at least one encoding value is supported :', async (t) => { - t.test('when setting `requestEncodings`', (t) => { +describe('It should make sure that at least one encoding value is supported :', async () => { + test('when setting `requestEncodings`', async (t) => { t.plan(1) const fastify = Fastify() fastify.register(compressPlugin, { requestEncodings: ['whatever'] }) - fastify.ready((err) => { - t.equal(err.message, 'None of the passed `requestEncodings` were supported — request decompression not possible.') - }) + t.assert.rejects( + async () => fastify.ready(), + { + name: 'Error', + message: 'None of the passed `requestEncodings` were supported — request decompression not possible.' + } + ) }) - t.test('when setting `forceRequestEncodings`', (t) => { + test('when setting `forceRequestEncodings`', async (t) => { t.plan(1) const fastify = Fastify() fastify.register(compressPlugin, { forceRequestEncoding: ['whatever'] }) - - fastify.ready((err) => { - t.equal(err.message, 'Unsupported decompression encoding whatever.') - }) + t.assert.rejects( + async () => fastify.ready(), + { + name: 'Error', + message: 'Unsupported decompression encoding whatever.' + } + ) }) }) diff --git a/test/regression/issue-288.test.js b/test/regression/issue-288.test.js index beee851..c6669d9 100644 --- a/test/regression/issue-288.test.js +++ b/test/regression/issue-288.test.js @@ -1,6 +1,6 @@ 'use strict' -const { test } = require('tap') +const { test } = require('node:test') const Fastify = require('fastify') const fastifyCompress = require('../..') const { request } = require('node:http') @@ -35,7 +35,7 @@ test('should not corrupt the file content', async (t) => { }) .join('\n') const fastify = new Fastify() - t.teardown(() => fastify.close()) + t.after(() => fastify.close()) fastify.register(async (instance, opts) => { await fastify.register(fastifyCompress) @@ -57,6 +57,6 @@ test('should not corrupt the file content', async (t) => { fetch(`${address}/no-compress`) ]) - t.equal(body1, body2) - t.equal(body1, twoByteUnicodeContent) + t.assert.equal(body1, body2) + t.assert.equal(body1, twoByteUnicodeContent) }) diff --git a/test/routes-compress.test.js b/test/routes-compress.test.js index 74adc9b..5956dec 100644 --- a/test/routes-compress.test.js +++ b/test/routes-compress.test.js @@ -1,14 +1,15 @@ 'use strict' -const { test } = require('tap') +const { test, describe } = require('node:test') const { createReadStream, readFile, readFileSync } = require('node:fs') const zlib = require('node:zlib') const Fastify = require('fastify') const compressPlugin = require('../index') -test('When using routes `compress` settings :', async (t) => { - t.test('it should compress data using the route custom provided `createDeflate` method', async (t) => { +describe('When using routes `compress` settings :', async (t) => { + test('it should compress data using the route custom provided `createDeflate` method', async (t) => { t.plan(12) + const equal = t.assert.equal let usedCustomGlobal = false let usedCustom = false @@ -39,15 +40,15 @@ test('When using routes `compress` settings :', async (t) => { 'accept-encoding': 'deflate' } }).then((response) => { - t.equal(usedCustom, false) - t.equal(usedCustomGlobal, true) + equal(usedCustom, false) + equal(usedCustomGlobal, true) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.notOk(response.headers['content-length'], 'no content length') - t.equal(payload.toString('utf-8'), file) + equal(response.headers.vary, 'accept-encoding') + equal(response.headers['content-encoding'], 'deflate') + t.assert.ok(!response.headers['content-length'], 'no content length') + equal(payload.toString('utf-8'), file) usedCustom = false usedCustomGlobal = false @@ -60,19 +61,20 @@ test('When using routes `compress` settings :', async (t) => { 'accept-encoding': 'deflate' } }) - t.equal(usedCustom, true) - t.equal(usedCustomGlobal, false) + equal(usedCustom, true) + equal(usedCustomGlobal, false) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.notOk(response.headers['content-length'], 'no content length') - t.equal(payload.toString('utf-8'), file) + equal(response.headers.vary, 'accept-encoding') + equal(response.headers['content-encoding'], 'deflate') + t.assert.ok(!response.headers['content-length'], 'no content length') + equal(payload.toString('utf-8'), file) }) - t.test('it should compress data using the route custom provided `createGzip` method', async (t) => { + test('it should compress data using the route custom provided `createGzip` method', async (t) => { t.plan(10) + const equal = t.assert.equal let usedCustomGlobal = false let usedCustom = false @@ -101,14 +103,14 @@ test('When using routes `compress` settings :', async (t) => { 'accept-encoding': 'gzip' } }).then((response) => { - t.equal(usedCustom, false) - t.equal(usedCustomGlobal, true) + equal(usedCustom, false) + equal(usedCustomGlobal, true) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + equal(response.headers.vary, 'accept-encoding') + equal(response.headers['content-encoding'], 'gzip') + equal(payload.toString('utf-8'), file) usedCustom = false usedCustomGlobal = false @@ -121,18 +123,19 @@ test('When using routes `compress` settings :', async (t) => { 'accept-encoding': 'gzip' } }) - t.equal(usedCustom, true) - t.equal(usedCustomGlobal, false) + equal(usedCustom, true) + equal(usedCustomGlobal, false) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + equal(response.headers.vary, 'accept-encoding') + equal(response.headers['content-encoding'], 'gzip') + equal(payload.toString('utf-8'), file) }) - t.test('it should not compress data when `global` is `false` unless `compress` routes settings have been set up', async (t) => { + test('it should not compress data when `global` is `false` unless `compress` routes settings have been set up', async (t) => { t.plan(12) + const equal = t.assert.equal let usedCustom = false const customZlib = { createGzip: () => (usedCustom = true) && zlib.createGzip() } @@ -142,7 +145,7 @@ test('When using routes `compress` settings :', async (t) => { fastify.get('/', (request, reply) => { // compress function should still be available - t.type(reply.compress, 'function') + t.assert.ok(typeof reply.compress === 'function') reply.send({ foo: 1 }) }) @@ -168,10 +171,10 @@ test('When using routes `compress` settings :', async (t) => { 'accept-encoding': 'gzip' } }).then((response) => { - t.equal(usedCustom, false) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.rawPayload.toString('utf-8'), JSON.stringify({ foo: 1 })) + t.assert.equal(usedCustom, false) + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + t.assert.equal(response.rawPayload.toString('utf-8'), JSON.stringify({ foo: 1 })) usedCustom = false }) @@ -183,13 +186,13 @@ test('When using routes `compress` settings :', async (t) => { 'accept-encoding': 'gzip' } }).then((response) => { - t.equal(usedCustom, true) + equal(usedCustom, true) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + equal(response.headers.vary, 'accept-encoding') + equal(response.headers['content-encoding'], 'gzip') + equal(payload.toString('utf-8'), file) }) const response = await fastify.inject({ @@ -200,14 +203,14 @@ test('When using routes `compress` settings :', async (t) => { } }) const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), JSON.stringify({ foo: 1 })) + equal(response.headers.vary, 'accept-encoding') + equal(response.headers['content-encoding'], 'gzip') + equal(payload.toString('utf-8'), JSON.stringify({ foo: 1 })) }) - t.test('it should not compress data when route `compress` option is set to `false`', async (t) => { + test('it should not compress data when route `compress` option is set to `false`', async (t) => { t.plan(3) - + const equal = t.assert.equal const content = { message: 'Hello World!' } const fastify = Fastify() @@ -226,13 +229,15 @@ test('When using routes `compress` settings :', async (t) => { 'accept-encoding': 'gzip' } }) - t.notOk(response.headers.vary) - t.notOk(response.headers['content-encoding']) - t.equal(response.rawPayload.toString('utf-8'), JSON.stringify(content)) + + t.assert.ok(!response.headers.vary) + t.assert.ok(!response.headers['content-encoding']) + equal(response.rawPayload.toString('utf-8'), JSON.stringify(content)) }) - t.test('it should throw an error on invalid route `compress` settings', async (t) => { + test('it should throw an error on invalid route `compress` settings', async (t) => { t.plan(1) + const equal = t.assert.equal const fastify = Fastify() await fastify.register(compressPlugin, { global: false }) @@ -244,14 +249,15 @@ test('When using routes `compress` settings :', async (t) => { reply.send('') }) } catch (err) { - t.equal(err.message, 'Unknown value for route compress configuration') + equal(err.message, 'Unknown value for route compress configuration') } }) }) -test('When `compress.removeContentLengthHeader` is `false`, it should not remove `Content-Length` header :', async (t) => { - t.test('using `reply.compress()`', async (t) => { +describe('When `compress.removeContentLengthHeader` is `false`, it should not remove `Content-Length` header :', async () => { + test('using `reply.compress()`', async (t) => { t.plan(4) + const equal = t.assert.equal const fastify = Fastify() await fastify.register(compressPlugin, { global: true }) @@ -280,14 +286,15 @@ test('When `compress.removeContentLengthHeader` is `false`, it should not remove }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.equal(response.headers['content-length'], payload.length.toString()) - t.equal(payload.toString('utf-8'), file) + equal(response.headers.vary, 'accept-encoding') + equal(response.headers['content-encoding'], 'deflate') + equal(response.headers['content-length'], payload.length.toString()) + equal(payload.toString('utf-8'), file) }) - t.test('using `onSend` hook', async (t) => { + test('using `onSend` hook', async (t) => { t.plan(4) + const equal = t.assert.equal const fastify = Fastify() await fastify.register(compressPlugin, { global: true }) @@ -316,16 +323,17 @@ test('When `compress.removeContentLengthHeader` is `false`, it should not remove }) const file = readFileSync('./package.json', 'utf8') const payload = zlib.inflateSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'deflate') - t.equal(response.headers['content-length'], payload.length.toString()) - t.equal(payload.toString('utf-8'), file) + equal(response.headers.vary, 'accept-encoding') + equal(response.headers['content-encoding'], 'deflate') + equal(response.headers['content-length'], payload.length.toString()) + equal(payload.toString('utf-8'), file) }) }) -test('When using the old routes `{ config: compress }` option :', async (t) => { - t.test('it should compress data using the route custom provided `createGzip` method', async (t) => { +describe('When using the old routes `{ config: compress }` option :', async () => { + test('it should compress data using the route custom provided `createGzip` method', async (t) => { t.plan(10) + const equal = t.assert.equal let usedCustomGlobal = false let usedCustom = false @@ -358,14 +366,14 @@ test('When using the old routes `{ config: compress }` option :', async (t) => { 'accept-encoding': 'gzip' } }).then((response) => { - t.equal(usedCustom, false) - t.equal(usedCustomGlobal, true) + equal(usedCustom, false) + equal(usedCustomGlobal, true) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + equal(response.headers.vary, 'accept-encoding') + equal(response.headers['content-encoding'], 'gzip') + equal(payload.toString('utf-8'), file) usedCustom = false usedCustomGlobal = false @@ -378,17 +386,17 @@ test('When using the old routes `{ config: compress }` option :', async (t) => { 'accept-encoding': 'gzip' } }) - t.equal(usedCustom, true) - t.equal(usedCustomGlobal, false) + equal(usedCustom, true) + equal(usedCustomGlobal, false) const file = readFileSync('./package.json', 'utf8') const payload = zlib.gunzipSync(response.rawPayload) - t.equal(response.headers.vary, 'accept-encoding') - t.equal(response.headers['content-encoding'], 'gzip') - t.equal(payload.toString('utf-8'), file) + equal(response.headers.vary, 'accept-encoding') + equal(response.headers['content-encoding'], 'gzip') + equal(payload.toString('utf-8'), file) }) - t.test('it should use the old routes `{ config: compress }` options over routes `compress` options', async (t) => { + test('it should use the old routes `{ config: compress }` options over routes `compress` options', async (t) => { t.plan(1) const fastify = Fastify() @@ -406,7 +414,7 @@ test('When using the old routes `{ config: compress }` option :', async (t) => { reply.send('') }) } catch (err) { - t.equal(err.message, 'Unknown value for route compress configuration') + t.assert.equal(err.message, 'Unknown value for route compress configuration') } }) }) @@ -430,5 +438,5 @@ test('It should avoid to trigger `onSend` hook twice', async (t) => { 'accept-encoding': 'br' } }) - t.same(JSON.parse(zlib.brotliDecompressSync(response.rawPayload)), { hi: true }) + t.assert.deepEqual(JSON.parse(zlib.brotliDecompressSync(response.rawPayload)), { hi: true }) }) diff --git a/test/routes-decompress.test.js b/test/routes-decompress.test.js index e921695..fc998ea 100644 --- a/test/routes-decompress.test.js +++ b/test/routes-decompress.test.js @@ -1,6 +1,6 @@ 'use strict' -const { test } = require('tap') +const { test, describe } = require('node:test') const { createReadStream } = require('node:fs') const path = require('node:path') const zlib = require('node:zlib') @@ -18,9 +18,10 @@ function createPayload (compressor) { return payload } -test('When using routes `decompress` settings :', async (t) => { - t.test('it should decompress data using the route custom provided `createInflate` method', async (t) => { +describe('When using routes `decompress` settings :', async (suite) => { + test('it should decompress data using the route custom provided `createInflate` method', async (t) => { t.plan(8) + const equal = t.assert.equal let usedCustomGlobal = false let usedCustom = false @@ -51,11 +52,11 @@ test('When using routes `decompress` settings :', async (t) => { }, payload: createPayload(zlib.createDeflate) }).then((response) => { - t.equal(usedCustom, false) - t.equal(usedCustomGlobal, true) + equal(usedCustom, false) + equal(usedCustomGlobal, true) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + equal(response.statusCode, 200) + equal(response.body, '@fastify/compress') usedCustom = false usedCustomGlobal = false @@ -69,15 +70,16 @@ test('When using routes `decompress` settings :', async (t) => { }, payload: createPayload(zlib.createDeflate) }) - t.equal(usedCustom, true) - t.equal(usedCustomGlobal, false) + equal(usedCustom, true) + equal(usedCustomGlobal, false) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + equal(response.statusCode, 200) + equal(response.body, '@fastify/compress') }) - t.test('it should decompress data using the route custom provided `createGunzip` method', async (t) => { + test('it should decompress data using the route custom provided `createGunzip` method', async (t) => { t.plan(8) + const equal = t.assert.equal let usedCustomGlobal = false let usedCustom = false @@ -108,11 +110,11 @@ test('When using routes `decompress` settings :', async (t) => { }, payload: createPayload(zlib.createGzip) }).then((response) => { - t.equal(usedCustom, false) - t.equal(usedCustomGlobal, true) + equal(usedCustom, false) + equal(usedCustomGlobal, true) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + equal(response.statusCode, 200) + equal(response.body, '@fastify/compress') usedCustom = false usedCustomGlobal = false @@ -127,15 +129,16 @@ test('When using routes `decompress` settings :', async (t) => { }, payload: createPayload(zlib.createGzip) }) - t.equal(usedCustom, true) - t.equal(usedCustomGlobal, false) + equal(usedCustom, true) + equal(usedCustomGlobal, false) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + equal(response.statusCode, 200) + equal(response.body, '@fastify/compress') }) - t.test('it should not decompress data when route `decompress` option is set to `false`', async (t) => { + test('it should not decompress data when route `decompress` option is set to `false`', async (t) => { t.plan(6) + const equal = t.assert.equal let usedCustomGlobal = false const customZlibGlobal = { createGunzip: () => (usedCustomGlobal = true) && zlib.createGunzip() } @@ -160,10 +163,10 @@ test('When using routes `decompress` settings :', async (t) => { }, payload: createPayload(zlib.createGzip) }).then((response) => { - t.equal(usedCustomGlobal, true) + equal(usedCustomGlobal, true) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + equal(response.statusCode, 200) + equal(response.body, '@fastify/compress') usedCustomGlobal = false }) @@ -177,10 +180,10 @@ test('When using routes `decompress` settings :', async (t) => { }, payload: createPayload(zlib.createGzip) }) - t.equal(usedCustomGlobal, false) + equal(usedCustomGlobal, false) - t.equal(response.statusCode, 400) - t.strictSame(response.json(), { + equal(response.statusCode, 400) + t.assert.deepEqual(response.json(), { statusCode: 400, code: 'FST_ERR_CTP_INVALID_CONTENT_LENGTH', error: 'Bad Request', @@ -188,7 +191,7 @@ test('When using routes `decompress` settings :', async (t) => { }) }) - t.test('it should throw an error on invalid route `decompress` settings', async (t) => { + test('it should throw an error on invalid route `decompress` settings', async (t) => { t.plan(1) const fastify = Fastify() @@ -199,14 +202,15 @@ test('When using routes `decompress` settings :', async (t) => { reply.send(request.body.name) }) } catch (err) { - t.equal(err.message, 'Unknown value for route decompress configuration') + t.assert.equal(err.message, 'Unknown value for route decompress configuration') } }) }) -test('When using the old routes `{ config: decompress }` option :', async (t) => { - t.test('it should decompress data using the route custom provided `createGunzip` method', async (t) => { +describe('When using the old routes `{ config: decompress }` option :', async () => { + test('it should decompress data using the route custom provided `createGunzip` method', async (t) => { t.plan(8) + const equal = t.assert.equal let usedCustomGlobal = false let usedCustom = false @@ -239,11 +243,11 @@ test('When using the old routes `{ config: decompress }` option :', async (t) => }, payload: createPayload(zlib.createGzip) }).then((response) => { - t.equal(usedCustom, false) - t.equal(usedCustomGlobal, true) + equal(usedCustom, false) + equal(usedCustomGlobal, true) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + equal(response.statusCode, 200) + equal(response.body, '@fastify/compress') usedCustom = false usedCustomGlobal = false @@ -258,14 +262,14 @@ test('When using the old routes `{ config: decompress }` option :', async (t) => }, payload: createPayload(zlib.createGzip) }) - t.equal(usedCustom, true) - t.equal(usedCustomGlobal, false) + equal(usedCustom, true) + equal(usedCustomGlobal, false) - t.equal(response.statusCode, 200) - t.equal(response.body, '@fastify/compress') + equal(response.statusCode, 200) + equal(response.body, '@fastify/compress') }) - t.test('it should use the old routes `{ config: decompress }` options over routes `decompress` options', async (t) => { + test('it should use the old routes `{ config: decompress }` options over routes `decompress` options', async (t) => { t.plan(1) const fastify = Fastify() @@ -283,7 +287,7 @@ test('When using the old routes `{ config: decompress }` option :', async (t) => reply.send(request.body.name) }) } catch (err) { - t.equal(err.message, 'Unknown value for route decompress configuration') + t.assert.equal(err.message, 'Unknown value for route decompress configuration') } }) }) diff --git a/test/utils.test.js b/test/utils.test.js index 6c37465..f62e9d9 100644 --- a/test/utils.test.js +++ b/test/utils.test.js @@ -3,67 +3,71 @@ const { createReadStream } = require('node:fs') const { Socket } = require('node:net') const { Duplex, PassThrough, Readable, Stream, Transform, Writable } = require('node:stream') -const { test } = require('tap') +const { test } = require('node:test') const { isStream, isDeflate, isGzip, intoAsyncIterator } = require('../lib/utils') test('isStream() utility should be able to detect Streams', async (t) => { t.plan(12) + const equal = t.assert.equal - t.equal(isStream(new Stream()), true) - t.equal(isStream(new Readable()), true) - t.equal(isStream(new Writable()), true) - t.equal(isStream(new Duplex()), true) - t.equal(isStream(new Transform()), true) - t.equal(isStream(new PassThrough()), true) + equal(isStream(new Stream()), true) + equal(isStream(new Readable()), true) + equal(isStream(new Writable()), true) + equal(isStream(new Duplex()), true) + equal(isStream(new Transform()), true) + equal(isStream(new PassThrough()), true) - t.equal(isStream(createReadStream('package.json')), true) + equal(isStream(createReadStream('package.json')), true) - t.equal(isStream(new Socket()), true) + equal(isStream(new Socket()), true) - t.equal(isStream({}), false) - t.equal(isStream(null), false) - t.equal(isStream(undefined), false) - t.equal(isStream(''), false) + equal(isStream({}), false) + equal(isStream(null), false) + equal(isStream(undefined), false) + equal(isStream(''), false) }) test('isDeflate() utility should be able to detect deflate compressed Buffer', async (t) => { t.plan(14) - - t.equal(isDeflate(Buffer.alloc(0)), false) - t.equal(isDeflate(Buffer.alloc(0)), false) - t.equal(isDeflate(Buffer.from([0x78])), false) - t.equal(isDeflate(Buffer.from([0x78, 0x00])), false) - t.equal(isDeflate(Buffer.from([0x7a, 0x01])), false) - t.equal(isDeflate(Buffer.from([0x88, 0x01])), false) - t.equal(isDeflate(Buffer.from([0x78, 0x11])), false) - t.equal(isDeflate(Buffer.from([0x78, 0x01])), true) - t.equal(isDeflate(Buffer.from([0x78, 0x9c])), true) - t.equal(isDeflate(Buffer.from([0x78, 0xda])), true) - - t.equal(isDeflate({}), false) - t.equal(isDeflate(null), false) - t.equal(isDeflate(undefined), false) - t.equal(isDeflate(''), false) + const equal = t.assert.equal + + equal(isDeflate(Buffer.alloc(0)), false) + equal(isDeflate(Buffer.alloc(0)), false) + equal(isDeflate(Buffer.from([0x78])), false) + equal(isDeflate(Buffer.from([0x78, 0x00])), false) + equal(isDeflate(Buffer.from([0x7a, 0x01])), false) + equal(isDeflate(Buffer.from([0x88, 0x01])), false) + equal(isDeflate(Buffer.from([0x78, 0x11])), false) + equal(isDeflate(Buffer.from([0x78, 0x01])), true) + equal(isDeflate(Buffer.from([0x78, 0x9c])), true) + equal(isDeflate(Buffer.from([0x78, 0xda])), true) + + equal(isDeflate({}), false) + equal(isDeflate(null), false) + equal(isDeflate(undefined), false) + equal(isDeflate(''), false) }) test('isGzip() utility should be able to detect gzip compressed Buffer', async (t) => { t.plan(10) - - t.equal(isGzip(Buffer.alloc(0)), false) - t.equal(isGzip(Buffer.alloc(1)), false) - t.equal(isGzip(Buffer.alloc(2)), false) - t.equal(isGzip(Buffer.from([0x1f, 0x8b])), false) - t.equal(isGzip(Buffer.from([0x1f, 0x8b, 0x00])), false) - t.equal(isGzip(Buffer.from([0x1f, 0x8b, 0x08])), true) - - t.equal(isGzip({}), false) - t.equal(isGzip(null), false) - t.equal(isGzip(undefined), false) - t.equal(isGzip(''), false) + const equal = t.assert.equal + + equal(isGzip(Buffer.alloc(0)), false) + equal(isGzip(Buffer.alloc(1)), false) + equal(isGzip(Buffer.alloc(2)), false) + equal(isGzip(Buffer.from([0x1f, 0x8b])), false) + equal(isGzip(Buffer.from([0x1f, 0x8b, 0x00])), false) + equal(isGzip(Buffer.from([0x1f, 0x8b, 0x08])), true) + + equal(isGzip({}), false) + equal(isGzip(null), false) + equal(isGzip(undefined), false) + equal(isGzip(''), false) }) test('intoAsyncIterator() utility should handle different data', async (t) => { t.plan(8) + const equal = t.assert.equal const buf = Buffer.from('foo') const str = 'foo' @@ -76,30 +80,30 @@ test('intoAsyncIterator() utility should handle different data', async (t) => { const obj = {} for await (const buffer of intoAsyncIterator(buf)) { - t.equal(buffer, buf) + equal(buffer, buf) } for await (const string of intoAsyncIterator(str)) { - t.equal(string, str) + equal(string, str) } for await (const chunk of intoAsyncIterator(arr)) { - t.equal(chunk, str) + equal(chunk, str) } for await (const chunk of intoAsyncIterator(arrayBuffer)) { - t.equal(chunk.toString(), Buffer.from(arrayBuffer).toString()) + equal(chunk.toString(), Buffer.from(arrayBuffer).toString()) } for await (const chunk of intoAsyncIterator(typedArray)) { - t.equal(chunk.toString(), Buffer.from(typedArray).toString()) + equal(chunk.toString(), Buffer.from(typedArray).toString()) } for await (const chunk of intoAsyncIterator(asyncIterator)) { - t.equal(chunk, str) + equal(chunk, str) } for await (const chunk of intoAsyncIterator(obj)) { - t.equal(chunk, obj) + equal(chunk, obj) } })