Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,10 @@ yarn.lock
# editor files
.vscode
.idea
.zed

#tap files
.tap/

# Claude AI
.claude/
42 changes: 41 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,14 @@ To return an error for unsupported encoding, use the `onUnsupportedEncoding` opt

The plugin compresses payloads based on `content-type`. If absent, it assumes `application/json`.

### Supported payload types

The plugin supports compressing the following payload types:
- Strings and Buffers
- Node.js streams
- Response objects (from the Fetch API)
- ReadableStream objects (from the Web Streams API)

### Global hook
The global compression hook is enabled by default. To disable it, pass `{ global: false }`:
```js
Expand Down Expand Up @@ -87,6 +95,8 @@ fastify.get('/custom-route', {
### `reply.compress`
This plugin adds a `compress` method to `reply` that compresses a stream or string based on the `accept-encoding` header. If a JS object is passed, it will be stringified to JSON.

> ℹ️ Note: When compressing a Response object, the compress middleware will copy headers and status from the Response object, unless they have already been explicitly set on the reply. The middleware will then compress the body stream and handle compression-related headers (like `Content-Encoding` and `Vary`).

The `compress` method uses per-route parameters if configured, otherwise it uses global parameters.

```js
Expand All @@ -96,19 +106,39 @@ import fastify from 'fastify'
const app = fastify()
await app.register(import('@fastify/compress'), { global: false })

app.get('/', (req, reply) => {
// Compress a file stream
app.get('/file', (req, reply) => {
reply
.type('text/plain')
.compress(fs.createReadStream('./package.json'))
})

// Compress a Response object from fetch
app.get('/fetch', async (req, reply) => {
const response = await fetch('https://api.example.com/data')
reply
.type('application/json')
.compress(response)
})

// Compress a ReadableStream
app.get('/stream', (req, reply) => {
const response = new Response('Hello World')
reply
.type('text/plain')
.compress(response.body)
})

await app.listen({ port: 3000 })
```

## Compress Options

### threshold
The minimum byte size for response compression. Defaults to `1024`.

> ℹ️ Note: The threshold setting only applies to string and Buffer payloads. Streams (including Node.js streams, Response objects, and ReadableStream objects) are always compressed regardless of the threshold, as their size cannot be determined in advance.

```js
await fastify.register(
import('@fastify/compress'),
Expand Down Expand Up @@ -318,6 +348,16 @@ await fastify.register(
)
```

## Gotchas

### Handling Unsupported Payload Types

When `@fastify/compress` receives a payload type that it doesn't natively support for compression (excluding the types listed in [Supported payload types](#supported-payload-types)), the behavior depends on the compression method:

- **Using `reply.compress()`**: The plugin will attempt to serialize the payload using Fastify's `serialize` function and then compress the result. This provides a best-effort approach to handle custom objects.

- **Using global compression hook**: To prevent breaking applications, the plugin will pass through unsupported payload types without compression. This fail-safe approach ensures that servers continue to function even when encountering unexpected payload types.

## Acknowledgments

Past sponsors:
Expand Down
93 changes: 86 additions & 7 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

const zlib = require('node:zlib')
const { inherits, format } = require('node:util')
const { Readable: NodeReadable } = require('node:stream')

const fp = require('fastify-plugin')
const encodingNegotiator = require('@fastify/accept-negotiator')
Expand Down Expand Up @@ -161,6 +162,10 @@
.sort((a, b) => opts.encodings.indexOf(a) - opts.encodings.indexOf(b))
: supportedEncodings

params.isCompressiblePayload = typeof opts.isCompressiblePayload === 'function'
? opts.isCompressiblePayload
: isCompressiblePayload

return params
}

Expand Down Expand Up @@ -273,10 +278,41 @@
}

if (typeof payload.pipe !== 'function') {
if (Buffer.byteLength(payload) < params.threshold) {
return next()
// Payload is not a stream, ensure we don't try to compress something we cannot get the length of.
if (!params.isCompressiblePayload(payload)) {
return next(null, payload)
}

// Handle Response objects
if (payload instanceof Response) {
// Copy headers from Response object unless already set
for (const [key, value] of payload.headers.entries()) {
if (!reply.hasHeader(key)) {
reply.header(key, value)
}
}

Check failure on line 294 in index.js

View workflow job for this annotation

GitHub Actions / test / Lint Code

Trailing spaces not allowed
// Set status code if it's still the default 200 and Response has a different status
if (reply.statusCode === 200 && payload.status && payload.status !== 200) {
reply.code(payload.status)
}

Check failure on line 299 in index.js

View workflow job for this annotation

GitHub Actions / test / Lint Code

Trailing spaces not allowed
const responseStream = convertResponseToStream(payload)
if (responseStream) {
payload = responseStream
} else {
// Response has no body or body is null
return next()
}
} else if (payload instanceof ReadableStream) {
// Handle raw ReadableStream objects
payload = NodeReadable.fromWeb(payload)
} else {
if (Buffer.byteLength(payload) < params.threshold) {
return next()
}
payload = Readable.from(intoAsyncIterator(payload))
}
payload = Readable.from(intoAsyncIterator(payload))
}

setVaryHeader(reply)
Expand Down Expand Up @@ -391,16 +427,42 @@
}

if (typeof payload.pipe !== 'function') {
if (!Buffer.isBuffer(payload) && typeof payload !== 'string') {
if (!params.isCompressiblePayload(payload)) {
payload = this.serialize(payload)
}
}

if (typeof payload.pipe !== 'function') {
if (Buffer.byteLength(payload) < params.threshold) {
return this.send(payload)
// Handle Response objects
if (payload instanceof Response) {
// Copy headers from Response object unless already set
for (const [key, value] of payload.headers.entries()) {
if (!this.hasHeader(key)) {
this.header(key, value)
}
}

Check failure on line 444 in index.js

View workflow job for this annotation

GitHub Actions / test / Lint Code

Trailing spaces not allowed
// Set status code if it's still the default 200 and Response has a different status
if (this.statusCode === 200 && payload.status && payload.status !== 200) {
this.code(payload.status)
}

Check failure on line 449 in index.js

View workflow job for this annotation

GitHub Actions / test / Lint Code

Trailing spaces not allowed
const responseStream = convertResponseToStream(payload)
if (responseStream) {
payload = responseStream
} else {
// Response has no body or body is null
return this.send(payload)
}
} else if (payload instanceof ReadableStream) {
// Handle raw ReadableStream objects
payload = NodeReadable.fromWeb(payload)
} else {
if (Buffer.byteLength(payload) < params.threshold) {
return this.send(payload)
}
payload = Readable.from(intoAsyncIterator(payload))
}
payload = Readable.from(intoAsyncIterator(payload))
}

setVaryHeader(this)
Expand Down Expand Up @@ -477,6 +539,14 @@
}
}

function isCompressiblePayload (payload) {
// By the time payloads reach this point, Fastify has already serialized
// objects/arrays/etc to strings, so we only need to check for the actual
// types that make it through: Buffer and string
// Also support Response objects from fetch API and ReadableStream
return Buffer.isBuffer(payload) || typeof payload === 'string' || payload instanceof Response || payload instanceof ReadableStream
}

function shouldCompress (type, compressibleTypes) {
if (compressibleTypes(type)) return true
const data = mimedb[type.split(';', 1)[0].trim().toLowerCase()]
Expand Down Expand Up @@ -512,6 +582,15 @@
return Readable.from(intoAsyncIterator(result))
}

function convertResponseToStream (payload) {
// Handle Response objects from fetch API
if (payload instanceof Response && payload.body) {
// Convert Web ReadableStream to Node.js stream
return NodeReadable.fromWeb(payload.body)
}
return null
}

function zipStream (deflate, encoding) {
return peek({ newline: false, maxBuffer: 10 }, function (data, swap) {
switch (isCompressed(data)) {
Expand Down
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,11 @@
"@fastify/pre-commit": "^2.1.0",
"@types/node": "^22.0.0",
"adm-zip": "^0.5.12",
"axios": "^1.10.0",
"c8": "^10.1.2",
"eslint": "^9.17.0",
"fastify": "^5.0.0",
"got": "^11.8.6",
"jsonstream": "^1.0.3",
"neostandard": "^0.12.0",
"tsd": "^0.32.0",
Expand Down
Loading
Loading