bun-serve-compress 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Rhuan Barreto
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,303 @@
1
+ # bun-serve-compress
2
+
3
+ Transparent HTTP response compression for `Bun.serve()` — gzip, brotli, and zstd.
4
+
5
+ A drop-in replacement for `Bun.serve()` that automatically compresses responses based on the client's `Accept-Encoding` header. No middleware, no configuration required — just swap the import.
6
+
7
+ ## Why?
8
+
9
+ Bun.serve() has no built-in response compression ([oven-sh/bun#2726](https://github.com/oven-sh/bun/issues/2726)). This library fills that gap with:
10
+
11
+ - **Smart algorithm negotiation** — prefers zstd > brotli > gzip by default, respecting client `Accept-Encoding` quality weights
12
+ - **Automatic skip logic** — images, fonts, video, already-compressed responses, small bodies, SSE, and `Cache-Control: no-transform` are never compressed
13
+ - **Sane defaults** — brotli quality 5 (not 11, which is [~30x slower](https://cran.r-project.org/web/packages/brotli/vignettes/benchmarks.html)), gzip level 6, zstd level 3
14
+ - **Zero config** — works out of the box, but fully customizable
15
+ - **Bun-native** — uses `Bun.gzipSync()`, `Bun.zstdCompressSync()`, and `CompressionStream` for maximum performance; `node:zlib` brotliCompressSync for brotli (Bun has no native `Bun.brotliCompressSync()` yet)
16
+ - **HTTP spec compliant** — correct `Vary`, `Content-Encoding`, `Content-Length`, ETag, and `Cache-Control: no-transform` handling
17
+
18
+ ## Install
19
+
20
+ ```bash
21
+ bun add bun-serve-compress
22
+ ```
23
+
24
+ ## Quick Start
25
+
26
+ ```typescript
27
+ import { serve } from "bun-serve-compress";
28
+
29
+ serve({
30
+ port: 3000,
31
+ fetch(req) {
32
+ return new Response("Hello, World!");
33
+ },
34
+ });
35
+ ```
36
+
37
+ That's it. Responses are now compressed automatically.
38
+
39
+ ## Usage with Routes
40
+
41
+ Works with Bun's route handlers, including HTML imports:
42
+
43
+ ```typescript
44
+ import { serve } from "bun-serve-compress";
45
+ import homepage from "./index.html";
46
+
47
+ serve({
48
+ port: 3000,
49
+ routes: {
50
+ "/": homepage, // Bun's HTML bundling works transparently
51
+ "/api/data": () => Response.json({ message: "compressed automatically" }),
52
+ "/health": {
53
+ GET: () => new Response("ok"),
54
+ },
55
+ },
56
+ fetch(req) {
57
+ return new Response("Not Found", { status: 404 });
58
+ },
59
+ });
60
+ ```
61
+
62
+ ## Configuration
63
+
64
+ Pass a `compression` option to customize behavior:
65
+
66
+ ```typescript
67
+ import { serve } from "bun-serve-compress";
68
+
69
+ serve({
70
+ port: 3000,
71
+ compression: {
72
+ // Algorithm preference order (default: ['zstd', 'br', 'gzip'])
73
+ algorithms: ["br", "gzip"],
74
+
75
+ // Minimum body size in bytes to compress (default: 1024)
76
+ minSize: 512,
77
+
78
+ // Per-algorithm settings
79
+ gzip: { level: 6 }, // 1-9 (default: 6)
80
+ brotli: { level: 5 }, // 0-11 (default: 5)
81
+ zstd: { level: 3 }, // 1-22 (default: 3)
82
+
83
+ // Additional MIME types to skip (merged with built-in list)
84
+ skipMimeTypes: ["application/x-custom-binary"],
85
+
86
+ // OR: override the entire skip list (replaces built-in list completely)
87
+ // overrideSkipMimeTypes: ["image/png", "application/zip"],
88
+
89
+ // Custom skip function (called after all other skip checks pass)
90
+ shouldCompress: (req, res) => {
91
+ // Return false to skip compression for this request/response
92
+ return !req.url.includes("/raw/");
93
+ },
94
+ },
95
+ fetch(req) {
96
+ return new Response("Hello!");
97
+ },
98
+ });
99
+ ```
100
+
101
+ ### Disable compression entirely
102
+
103
+ ```typescript
104
+ // Option 1: compression: false
105
+ serve({
106
+ compression: false,
107
+ // ...
108
+ });
109
+
110
+ // Option 2: compression.disable
111
+ serve({
112
+ compression: { disable: true },
113
+ // ...
114
+ });
115
+ ```
116
+
117
+ ## What gets compressed?
118
+
119
+ ### Compressed (by default)
120
+
121
+ - `text/*` (HTML, CSS, plain text, etc.)
122
+ - `application/json`
123
+ - `application/javascript`
124
+ - `application/xml`
125
+ - `image/svg+xml` (exception to image/\* skip — SVG is text-based)
126
+ - Any response over 1KB without a matching skip rule
127
+
128
+ ### Skipped (by default)
129
+
130
+ **By MIME type (prefix match):**
131
+
132
+ - `image/*` (except `image/svg+xml`)
133
+ - `audio/*`
134
+ - `video/*`
135
+ - `font/*`
136
+
137
+ **By MIME type (exact match):**
138
+
139
+ - `application/zip`, `application/gzip`, `application/x-gzip`
140
+ - `application/x-bzip2`, `application/x-7z-compressed`, `application/x-rar-compressed`
141
+ - `application/wasm`
142
+ - `application/octet-stream`
143
+ - `application/pdf`
144
+ - `text/event-stream` (SSE — compression breaks chunked event delivery)
145
+
146
+ **By HTTP semantics:**
147
+
148
+ - Responses with existing `Content-Encoding` header (already compressed)
149
+ - Responses with `Transfer-Encoding` containing a compression algorithm (gzip, deflate, br, zstd) — `Transfer-Encoding: chunked` alone does NOT skip
150
+ - Responses with `Cache-Control: no-transform` (RFC 7234 §5.2.2.4 — intermediaries MUST NOT alter the representation)
151
+ - Responses smaller than `minSize` (default: 1024 bytes)
152
+ - Responses with no body (`null` body)
153
+ - `204 No Content`, `304 Not Modified`, `101 Switching Protocols`
154
+ - `HEAD` requests
155
+
156
+ ## HTTP Correctness
157
+
158
+ The library handles HTTP semantics properly:
159
+
160
+ - **`Content-Encoding`** is set to the chosen algorithm (`gzip`, `br`, or `zstd`)
161
+ - **`Content-Length`** is updated to the compressed size (sync path) or removed (streaming path)
162
+ - **`Vary: Accept-Encoding`** is appended when compression is considered — whether the response is compressed or not (for correct cache behavior). It is not added when compression is skipped entirely (e.g., images, HEAD requests)
163
+ - **Strong ETags** are converted to weak ETags (`"abc"` → `W/"abc"`) when compressing, per RFC 7232 — the compressed body is a different representation
164
+ - **Weak ETags** are preserved as-is (already weak)
165
+ - **`Cache-Control: no-transform`** is respected — responses are passed through unmodified per RFC 7234
166
+ - **Already-compressed responses** are never double-compressed (checked via `Content-Encoding` and `Transfer-Encoding` headers)
167
+ - **Status codes** are preserved through compression (200, 201, 404, 500, etc.)
168
+ - **Custom headers** are preserved through compression (X-Request-Id, etc.)
169
+
170
+ ## Algorithm Negotiation
171
+
172
+ The library parses the client's `Accept-Encoding` header and selects the best algorithm:
173
+
174
+ 1. Parse each algorithm and its quality value (`q=`) from the header
175
+ 2. Filter to only algorithms the server supports (configurable via `algorithms` option)
176
+ 3. Handle wildcard `*` — gives unlisted supported algorithms the wildcard quality
177
+ 4. Handle `identity` — not a compression algorithm, ignored
178
+ 5. Handle `q=0` — explicit rejection of an algorithm
179
+ 6. Sort by client quality descending, then by server preference order as tiebreaker
180
+ 7. Return the best match, or `null` if no acceptable algorithm found
181
+
182
+ Case-insensitive matching is supported (`GZIP`, `GZip`, `gzip` all work).
183
+
184
+ ## Compression Paths
185
+
186
+ | Body type | Strategy | When |
187
+ | ----------------- | ----------------------------------------- | --------------------------------------------------------------------------------- |
188
+ | Known size ≤ 10MB | Sync compression (`Bun.gzipSync`, etc.) | Fastest path for typical responses |
189
+ | Unknown size | Buffer → check minSize → sync compression | Catches small bodies without `Content-Length` (e.g., static `Response` in routes) |
190
+ | Known size > 10MB | `CompressionStream` streaming | Avoids buffering entire body in memory |
191
+
192
+ ### Sync compression details
193
+
194
+ | Algorithm | Implementation | Notes |
195
+ | --------- | ------------------------------------------------------- | ------------------------------------------------ |
196
+ | gzip | `Bun.gzipSync(data, { level })` | Native Bun API |
197
+ | brotli | `brotliCompressSync(data, { params })` from `node:zlib` | Bun has no native `Bun.brotliCompressSync()` yet |
198
+ | zstd | `Bun.zstdCompressSync(data, { level })` | Native Bun API |
199
+
200
+ ### Streaming compression details
201
+
202
+ All three algorithms use `CompressionStream` with Bun's extended format support:
203
+
204
+ - gzip → `new CompressionStream("gzip")`
205
+ - brotli → `new CompressionStream("brotli")` (Bun extension, not in Web standard)
206
+ - zstd → `new CompressionStream("zstd")` (Bun extension, not in Web standard)
207
+
208
+ ## Route Type Support
209
+
210
+ The library handles all Bun.serve() route value types:
211
+
212
+ | Route value | Behavior |
213
+ | ---------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- |
214
+ | `Response` object | Cloned and compressed per request (note: loses Bun's static route fast path — see [Known Limitations](#known-limitations)) |
215
+ | Handler function `(req) => Response` | Wrapped — response is compressed after handler returns |
216
+ | Method object `{ GET: fn, POST: fn }` | Each method handler is wrapped individually |
217
+ | HTML import (`import page from './page.html'`) | Passed through to Bun's bundler pipeline untouched |
218
+ | `false` | Passed through — Bun falls through to the `fetch` handler |
219
+ | `null` / `undefined` | Passed through as-is |
220
+
221
+ ## Exported Utilities
222
+
223
+ The library exports its internal utilities for advanced use cases:
224
+
225
+ ```typescript
226
+ import {
227
+ serve, // Drop-in Bun.serve() replacement
228
+ negotiate, // Parse Accept-Encoding → best algorithm
229
+ shouldSkip, // Check if compression should be skipped
230
+ compress, // Compress a Response object
231
+ addVaryHeader, // Add Vary: Accept-Encoding to a Response
232
+ } from "bun-serve-compress";
233
+
234
+ // Types
235
+ import type {
236
+ CompressionAlgorithm, // "zstd" | "br" | "gzip"
237
+ CompressionOptions, // User-facing config
238
+ AlgorithmOptions, // Per-algorithm { level } config
239
+ ResolvedCompressionOptions, // Fully resolved config with defaults
240
+ } from "bun-serve-compress";
241
+ ```
242
+
243
+ ## Testing
244
+
245
+ 208 tests covering negotiation, skip logic, compression integrity, HTTP semantics, concurrency, large body integrity, Bun-specific compatibility, and version guard. Run with:
246
+
247
+ ```bash
248
+ bun test
249
+ ```
250
+
251
+ ### Test suite inspirations
252
+
253
+ The test suite was designed by studying the test suites of established HTTP compression implementations to ensure comprehensive coverage:
254
+
255
+ | Library / Server | What we learned | Link |
256
+ | ---------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
257
+ | **Express/compression** | `Cache-Control: no-transform` (RFC 7234), Vary header semantics, ETag weak/strong handling, threshold behavior, empty body edge cases, quality weight negotiation | [test/compression.js](https://github.com/expressjs/compression/blob/master/test/compression.js) |
258
+ | **Fastify/fastify-compress** | Case-insensitive Accept-Encoding, Content-Type with charset/boundary params, missing Content-Type, custom header preservation, algorithm restriction | [test/global-compress.test.js](https://github.com/fastify/fastify-compress/blob/master/test/global-compress.test.js) |
259
+ | **Koa/compress** | Unknown algorithm handling (sdch), custom shouldCompress, SVG exception for image/\* skip, default/fallback encoding | [test/index.test.ts](https://github.com/koajs/compress/blob/master/test/index.test.ts) |
260
+ | **Go net/http gziphandler** | Threshold boundary conditions (exact size, off-by-one), parallel compression benchmarks, large body integrity, Accept-Encoding: identity | [gzip_test.go](https://github.com/nytimes/gziphandler/blob/master/gzip_test.go) |
261
+ | **Nginx gzip module** | Transfer-Encoding already set, MIME type prefix matching, no-transform directive | [ngx_http_gzip_module docs](https://nginx.org/en/docs/http/ngx_http_gzip_module.html) |
262
+ | **Hono compress** | Cache-Control no-transform, Transfer-Encoding checks, identity encoding handling | [compress/index.test.ts](https://github.com/honojs/hono/blob/main/src/middleware/compress/index.test.ts) |
263
+ | **Bun test suite** | Static route cloning, fetch auto-decompression, CompressionStream formats, empty body regression, double-compression prevention | [test/regression/issue/](https://github.com/oven-sh/bun/tree/main/test/regression/issue), [test/js/web/fetch/](https://github.com/oven-sh/bun/tree/main/test/js/web/fetch) |
264
+
265
+ Each test file includes a detailed header comment documenting which specific test cases came from which source.
266
+
267
+ ## Known Limitations
268
+
269
+ ### Static route performance trade-off
270
+
271
+ When using static `Response` objects in routes (e.g., `"/": new Response("hello")`), Bun normally serves them via an optimized fast path that bypasses the JS event loop entirely. This library converts static routes into handler functions (to clone and compress per-request), which loses that optimization. For most applications this is negligible — the compression savings far outweigh the routing overhead.
272
+
273
+ ### Future Bun auto-compression
274
+
275
+ Bun's HTTP server has a [TODO comment](https://github.com/oven-sh/bun/issues/2726) to add built-in compression. If/when Bun adds native auto-compression to `Bun.serve()`, this library could cause double-compression. We will update the library to detect and respect any future Bun compression flag. Monitor issue [#2726](https://github.com/oven-sh/bun/issues/2726) for updates.
276
+
277
+ ### Bun's fetch() auto-decompression
278
+
279
+ Bun's `fetch()` client **automatically decompresses** responses and **strips the `Content-Encoding` header**. If you need to verify compression is working in your own tests or debugging, use `fetch(url, { decompress: false })` — this is a Bun-specific option that preserves the raw compressed response.
280
+
281
+ ### Streaming compression quality
282
+
283
+ The `CompressionStream` API (used for bodies > 10MB) does not accept quality/level parameters for all formats. For the sync path (≤ 10MB), compression levels are fully configurable. For most real-world responses, the sync path is used.
284
+
285
+ ## Requirements
286
+
287
+ - **Bun ≥ 1.3.3** (for `CompressionStream` with zstd support)
288
+
289
+ The library checks `Bun.version` on import and throws a clear error if the runtime is unsupported:
290
+
291
+ ```
292
+ bun-serve-compress requires Bun >= 1.3.3, but you are running Bun 1.2.0. Please upgrade Bun: bun upgrade
293
+ ```
294
+
295
+ If loaded outside of Bun (e.g., Node.js), it throws:
296
+
297
+ ```
298
+ bun-serve-compress requires the Bun runtime. This library uses Bun-specific APIs (Bun.serve, Bun.gzipSync, CompressionStream with zstd) and cannot run in Node.js or other runtimes.
299
+ ```
300
+
301
+ ## License
302
+
303
+ MIT
package/package.json ADDED
@@ -0,0 +1,62 @@
1
+ {
2
+ "name": "bun-serve-compress",
3
+ "version": "0.1.0",
4
+ "description": "Transparent HTTP response compression for Bun.serve() — gzip, brotli, zstd",
5
+ "keywords": [
6
+ "accept-encoding",
7
+ "brotli",
8
+ "bun",
9
+ "bun-serve",
10
+ "compression",
11
+ "content-encoding",
12
+ "gzip",
13
+ "http",
14
+ "serve",
15
+ "server",
16
+ "zstd"
17
+ ],
18
+ "homepage": "https://github.com/rhuanbarreto/bun-serve-compress#readme",
19
+ "bugs": {
20
+ "url": "https://github.com/rhuanbarreto/bun-serve-compress/issues"
21
+ },
22
+ "license": "MIT",
23
+ "repository": {
24
+ "type": "git",
25
+ "url": "https://github.com/rhuanbarreto/bun-serve-compress"
26
+ },
27
+ "files": [
28
+ "src",
29
+ "README.md",
30
+ "LICENSE"
31
+ ],
32
+ "main": "src/index.ts",
33
+ "module": "src/index.ts",
34
+ "types": "src/index.ts",
35
+ "exports": {
36
+ ".": {
37
+ "bun": "./src/index.ts",
38
+ "import": "./src/index.ts",
39
+ "types": "./src/index.ts"
40
+ }
41
+ },
42
+ "scripts": {
43
+ "test": "bun test",
44
+ "lint": "oxlint",
45
+ "lint:fix": "oxlint --fix",
46
+ "fmt": "oxfmt",
47
+ "fmt:check": "oxfmt --check",
48
+ "typecheck": "tsc --noEmit",
49
+ "check": "oxlint && oxfmt --check && tsc --noEmit"
50
+ },
51
+ "devDependencies": {
52
+ "bun-types": "^1.3.9",
53
+ "elysia": "^1.4.28",
54
+ "hono": "^4.12.8",
55
+ "oxfmt": "^0.41.0",
56
+ "oxlint": "^1.56.0",
57
+ "typescript": "^5.7"
58
+ },
59
+ "engines": {
60
+ "bun": ">=1.3.3"
61
+ }
62
+ }
@@ -0,0 +1,207 @@
1
+ import { brotliCompressSync, constants as zlibConstants } from "node:zlib";
2
+ import type { CompressionAlgorithm, ResolvedCompressionOptions } from "./types";
3
+
4
+ /**
5
+ * Compress data synchronously using the specified algorithm.
6
+ *
7
+ * Uses Bun's native sync compression functions for gzip and zstd,
8
+ * and node:zlib's brotliCompressSync for brotli (Bun has no native
9
+ * Bun.brotliCompressSync yet).
10
+ */
11
+ function compressSync(
12
+ data: Uint8Array<ArrayBuffer>,
13
+ algorithm: CompressionAlgorithm,
14
+ config: ResolvedCompressionOptions,
15
+ ): Uint8Array<ArrayBuffer> {
16
+ switch (algorithm) {
17
+ case "gzip":
18
+ return Bun.gzipSync(data, { level: config.gzip.level as any }) as Uint8Array<ArrayBuffer>;
19
+
20
+ case "br": {
21
+ const compressed = brotliCompressSync(data, {
22
+ params: {
23
+ [zlibConstants.BROTLI_PARAM_QUALITY]: config.brotli.level,
24
+ },
25
+ });
26
+ return new Uint8Array(
27
+ compressed.buffer,
28
+ compressed.byteOffset,
29
+ compressed.byteLength,
30
+ ) as Uint8Array<ArrayBuffer>;
31
+ }
32
+
33
+ case "zstd":
34
+ return Bun.zstdCompressSync(data, { level: config.zstd.level }) as Uint8Array<ArrayBuffer>;
35
+ }
36
+ }
37
+
38
+ /**
39
+ * Create a compressed ReadableStream using CompressionStream API.
40
+ */
41
+ function compressStream(body: ReadableStream, algorithm: CompressionAlgorithm): ReadableStream {
42
+ // Map algorithm names to CompressionStream format
43
+ let format: string;
44
+ switch (algorithm) {
45
+ case "gzip":
46
+ format = "gzip";
47
+ break;
48
+ case "br":
49
+ // Bun supports "brotli" as a custom format name in CompressionStream
50
+ format = "brotli";
51
+ break;
52
+ case "zstd":
53
+ format = "zstd";
54
+ break;
55
+ }
56
+
57
+ const stream = new CompressionStream(format as CompressionFormat);
58
+ return body.pipeThrough(stream as any);
59
+ }
60
+
61
+ /**
62
+ * Append a value to the Vary header, preserving existing values.
63
+ */
64
+ function appendVary(headers: Headers, value: string): void {
65
+ const existing = headers.get("vary");
66
+ if (existing) {
67
+ // Don't add if already present or if Vary is *
68
+ if (existing === "*") return;
69
+ const values = existing.split(",").map((v) => v.trim().toLowerCase());
70
+ if (values.includes(value.toLowerCase())) return;
71
+ headers.set("vary", `${existing}, ${value}`);
72
+ } else {
73
+ headers.set("vary", value);
74
+ }
75
+ }
76
+
77
+ /**
78
+ * Build response headers for a compressed response.
79
+ */
80
+ function buildHeaders(
81
+ original: Headers,
82
+ algorithm: CompressionAlgorithm,
83
+ compressedSize: number | null,
84
+ ): Headers {
85
+ const headers = new Headers(original);
86
+
87
+ // Set Content-Encoding
88
+ headers.set("content-encoding", algorithm);
89
+
90
+ // Update or remove Content-Length
91
+ if (compressedSize !== null) {
92
+ headers.set("content-length", compressedSize.toString());
93
+ } else {
94
+ headers.delete("content-length");
95
+ }
96
+
97
+ // Append Vary: Accept-Encoding
98
+ appendVary(headers, "Accept-Encoding");
99
+
100
+ // Handle ETag — if present and strong, make it weak since body changed
101
+ const etag = headers.get("etag");
102
+ if (etag && !etag.startsWith("W/")) {
103
+ headers.set("etag", `W/${etag}`);
104
+ }
105
+
106
+ return headers;
107
+ }
108
+
109
+ /**
110
+ * Compress an HTTP Response.
111
+ *
112
+ * Chooses between sync (buffered) and streaming compression based on the response body type:
113
+ * - If the body can be read as an ArrayBuffer (non-streaming), use sync compression
114
+ * - If the body is a ReadableStream, use CompressionStream
115
+ *
116
+ * Also performs a final minSize check after buffering — this catches cases where
117
+ * Content-Length was not set on the original response (e.g., static Route responses).
118
+ *
119
+ * Returns a new Response with compressed body and updated headers.
120
+ */
121
+ export async function compress(
122
+ res: Response,
123
+ algorithm: CompressionAlgorithm,
124
+ config: ResolvedCompressionOptions,
125
+ ): Promise<Response> {
126
+ // Check if we should use streaming or buffered compression
127
+ // If bodyUsed is true, we can't read it — shouldn't happen but guard against it
128
+ if (res.bodyUsed) return res;
129
+
130
+ const body = res.body;
131
+ if (!body) return res;
132
+
133
+ // Try buffered (sync) compression first — faster for small/medium responses
134
+ // We check if we can read the full body. If Content-Length is known and reasonable
135
+ // (< 10MB), use sync. Otherwise use streaming.
136
+ const contentLength = res.headers.get("content-length");
137
+ const knownSize = contentLength ? parseInt(contentLength, 10) : null;
138
+ const MAX_BUFFER_SIZE = 10 * 1024 * 1024; // 10MB
139
+
140
+ if (knownSize !== null && knownSize <= MAX_BUFFER_SIZE) {
141
+ // Known size, fits in memory — sync path
142
+ const buffer = new Uint8Array(await res.arrayBuffer()) as Uint8Array<ArrayBuffer>;
143
+ const compressed = compressSync(buffer, algorithm, config);
144
+
145
+ return new Response(compressed as BodyInit, {
146
+ status: res.status,
147
+ statusText: res.statusText,
148
+ headers: buildHeaders(res.headers, algorithm, compressed.byteLength),
149
+ });
150
+ }
151
+
152
+ if (knownSize !== null) {
153
+ // Known size but too large — streaming path
154
+ const compressedStream = compressStream(body, algorithm);
155
+
156
+ return new Response(compressedStream, {
157
+ status: res.status,
158
+ statusText: res.statusText,
159
+ headers: buildHeaders(res.headers, algorithm, null),
160
+ });
161
+ }
162
+
163
+ // Unknown size (no Content-Length) — buffer to check minSize, then compress
164
+ // This handles static Response objects that don't set Content-Length
165
+ const buffer = new Uint8Array(await res.arrayBuffer()) as Uint8Array<ArrayBuffer>;
166
+
167
+ if (buffer.byteLength < config.minSize) {
168
+ // Below threshold — return uncompressed with original body
169
+ return new Response(buffer as BodyInit, {
170
+ status: res.status,
171
+ statusText: res.statusText,
172
+ headers: new Headers(res.headers),
173
+ });
174
+ }
175
+
176
+ const compressed = compressSync(buffer, algorithm, config);
177
+
178
+ return new Response(compressed as BodyInit, {
179
+ status: res.status,
180
+ statusText: res.statusText,
181
+ headers: buildHeaders(res.headers, algorithm, compressed.byteLength),
182
+ });
183
+ }
184
+
185
+ /**
186
+ * Add Vary: Accept-Encoding header to a response without compressing it.
187
+ * Used when we skip compression but still need correct caching behavior.
188
+ */
189
+ export function addVaryHeader(res: Response): Response {
190
+ // If the response already has the correct Vary header, return as-is
191
+ const vary = res.headers.get("vary");
192
+ if (vary) {
193
+ if (vary === "*") return res;
194
+ const values = vary.split(",").map((v) => v.trim().toLowerCase());
195
+ if (values.includes("accept-encoding")) return res;
196
+ }
197
+
198
+ // Clone headers and add Vary
199
+ const headers = new Headers(res.headers);
200
+ appendVary(headers, "Accept-Encoding");
201
+
202
+ return new Response(res.body, {
203
+ status: res.status,
204
+ statusText: res.statusText,
205
+ headers,
206
+ });
207
+ }
@@ -0,0 +1,79 @@
1
+ import type { CompressionAlgorithm, ResolvedCompressionOptions } from "./types";
2
+
3
+ /**
4
+ * Default algorithm preference order.
5
+ * zstd is fastest with best ratio, brotli has great ratio, gzip is universal fallback.
6
+ */
7
+ export const DEFAULT_ALGORITHMS: CompressionAlgorithm[] = ["zstd", "br", "gzip"];
8
+
9
+ /** Default compression levels per algorithm */
10
+ export const DEFAULT_GZIP_LEVEL = 6;
11
+ export const DEFAULT_BROTLI_LEVEL = 5; // NOT 11 — max quality is ~30x slower
12
+ export const DEFAULT_ZSTD_LEVEL = 3;
13
+
14
+ /** Minimum response size in bytes to trigger compression */
15
+ export const DEFAULT_MIN_SIZE = 1024;
16
+
17
+ /**
18
+ * MIME types that should NOT be compressed (exact matches).
19
+ * These are already compressed or binary formats where compression adds overhead.
20
+ */
21
+ export const SKIP_MIME_TYPES = new Set<string>([
22
+ // Archives (already compressed)
23
+ "application/zip",
24
+ "application/gzip",
25
+ "application/x-gzip",
26
+ "application/x-bzip2",
27
+ "application/x-7z-compressed",
28
+ "application/x-rar-compressed",
29
+
30
+ // Binary formats
31
+ "application/wasm",
32
+ "application/octet-stream",
33
+ "application/pdf",
34
+
35
+ // SSE — compression breaks chunked event delivery
36
+ "text/event-stream",
37
+ ]);
38
+
39
+ /**
40
+ * MIME type prefixes that should NOT be compressed.
41
+ * Entire categories of binary/compressed content.
42
+ */
43
+ export const SKIP_MIME_PREFIXES: string[] = [
44
+ "image/", // except image/svg+xml — handled specially
45
+ "audio/",
46
+ "video/",
47
+ "font/",
48
+ ];
49
+
50
+ /**
51
+ * MIME types that are exceptions to the prefix skip rules.
52
+ * These are text-based formats within otherwise-binary categories.
53
+ */
54
+ export const COMPRESSIBLE_EXCEPTIONS = new Set<string>(["image/svg+xml"]);
55
+
56
+ /**
57
+ * HTTP status codes that indicate no body — skip compression.
58
+ */
59
+ export const NO_BODY_STATUSES = new Set<number>([
60
+ 101, // Switching Protocols (WebSocket)
61
+ 204, // No Content
62
+ 304, // Not Modified
63
+ ]);
64
+
65
+ /**
66
+ * Build the default resolved config.
67
+ */
68
+ export function getDefaultResolvedConfig(): ResolvedCompressionOptions {
69
+ return {
70
+ disable: false,
71
+ algorithms: [...DEFAULT_ALGORITHMS],
72
+ gzip: { level: DEFAULT_GZIP_LEVEL },
73
+ brotli: { level: DEFAULT_BROTLI_LEVEL },
74
+ zstd: { level: DEFAULT_ZSTD_LEVEL },
75
+ minSize: DEFAULT_MIN_SIZE,
76
+ skipMimeTypes: new Set(SKIP_MIME_TYPES),
77
+ skipMimePrefixes: [...SKIP_MIME_PREFIXES],
78
+ };
79
+ }
package/src/index.ts ADDED
@@ -0,0 +1,10 @@
1
+ export { serve } from "./serve";
2
+ export { negotiate } from "./negotiate";
3
+ export { shouldSkip } from "./skip";
4
+ export { compress, addVaryHeader } from "./compress";
5
+ export type {
6
+ CompressionAlgorithm,
7
+ CompressionOptions,
8
+ AlgorithmOptions,
9
+ ResolvedCompressionOptions,
10
+ } from "./types";
@@ -0,0 +1,106 @@
1
+ import type { CompressionAlgorithm } from "./types";
2
+
3
+ interface EncodingEntry {
4
+ algorithm: string;
5
+ quality: number;
6
+ }
7
+
8
+ /**
9
+ * Parse an Accept-Encoding header into entries with quality values.
10
+ *
11
+ * Examples:
12
+ * "gzip, br;q=0.8, zstd;q=1.0" → [{algorithm:"gzip",quality:1}, {algorithm:"br",quality:0.8}, {algorithm:"zstd",quality:1}]
13
+ * "*;q=0.5" → [{algorithm:"*",quality:0.5}]
14
+ */
15
+ function parseAcceptEncoding(header: string): EncodingEntry[] {
16
+ const entries: EncodingEntry[] = [];
17
+
18
+ for (const part of header.split(",")) {
19
+ const trimmed = part.trim();
20
+ if (!trimmed) continue;
21
+
22
+ const [algorithm, ...params] = trimmed.split(";").map((s) => s.trim());
23
+ let quality = 1.0;
24
+
25
+ for (const param of params) {
26
+ const match = param.match(/^q\s*=\s*([0-9.]+)$/i);
27
+ if (match) {
28
+ quality = parseFloat(match[1]);
29
+ if (isNaN(quality)) quality = 1.0;
30
+ quality = Math.max(0, Math.min(1, quality));
31
+ }
32
+ }
33
+
34
+ entries.push({ algorithm: algorithm.toLowerCase(), quality });
35
+ }
36
+
37
+ return entries;
38
+ }
39
+
40
+ /**
41
+ * Negotiate the best compression algorithm based on the Accept-Encoding header
42
+ * and the server's preferred algorithm order.
43
+ *
44
+ * Returns the chosen algorithm, or null if no acceptable algorithm is found.
45
+ *
46
+ * Selection logic:
47
+ * 1. Parse client's Accept-Encoding into {algorithm, quality} pairs
48
+ * 2. Filter to only algorithms we support and the client accepts (q > 0)
49
+ * 3. Handle wildcard (*) — gives unlisted supported algorithms the wildcard quality
50
+ * 4. Sort by client quality descending, then by server preference order
51
+ * 5. Return the top result
52
+ */
53
+ export function negotiate(
54
+ acceptEncoding: string,
55
+ preferredOrder: CompressionAlgorithm[],
56
+ ): CompressionAlgorithm | null {
57
+ if (!acceptEncoding) return null;
58
+
59
+ const entries = parseAcceptEncoding(acceptEncoding);
60
+ if (entries.length === 0) return null;
61
+
62
+ // Build a map of algorithm → quality from client preferences
63
+ const clientPrefs = new Map<string, number>();
64
+ let wildcardQuality: number | null = null;
65
+
66
+ for (const entry of entries) {
67
+ if (entry.algorithm === "*") {
68
+ wildcardQuality = entry.quality;
69
+ } else {
70
+ clientPrefs.set(entry.algorithm, entry.quality);
71
+ }
72
+ }
73
+
74
+ // Build candidates: supported algorithms that the client accepts
75
+ const candidates: { algorithm: CompressionAlgorithm; quality: number; serverRank: number }[] = [];
76
+
77
+ for (let i = 0; i < preferredOrder.length; i++) {
78
+ const algo = preferredOrder[i];
79
+
80
+ let quality: number | null = null;
81
+ if (clientPrefs.has(algo)) {
82
+ quality = clientPrefs.get(algo)!;
83
+ } else if (wildcardQuality !== null) {
84
+ quality = wildcardQuality;
85
+ }
86
+
87
+ // Skip if client doesn't accept this algorithm or explicitly rejects it (q=0)
88
+ if (quality === null || quality === 0) continue;
89
+
90
+ candidates.push({
91
+ algorithm: algo,
92
+ quality,
93
+ serverRank: i,
94
+ });
95
+ }
96
+
97
+ if (candidates.length === 0) return null;
98
+
99
+ // Sort: highest quality first, then by server preference order (lower rank = more preferred)
100
+ candidates.sort((a, b) => {
101
+ if (a.quality !== b.quality) return b.quality - a.quality;
102
+ return a.serverRank - b.serverRank;
103
+ });
104
+
105
+ return candidates[0].algorithm;
106
+ }
package/src/serve.ts ADDED
@@ -0,0 +1,267 @@
1
+ import { getDefaultResolvedConfig, SKIP_MIME_TYPES } from "./constants";
2
+ import { compress, addVaryHeader } from "./compress";
3
+ import { negotiate } from "./negotiate";
4
+ import { shouldSkip } from "./skip";
5
+ import type { CompressionOptions, ResolvedCompressionOptions } from "./types";
6
+
7
+ /**
8
+ * Minimum supported Bun version (semver range).
9
+ * Requires Bun >= 1.3.3 for CompressionStream with zstd support.
10
+ */
11
+ const MIN_BUN_VERSION_RANGE = ">=1.3.3";
12
+ const MIN_BUN_VERSION_DISPLAY = "1.3.3";
13
+
14
+ /**
15
+ * Check that the current Bun version meets the minimum requirement.
16
+ * Uses Bun's built-in semver utility for reliable version comparison.
17
+ * Throws a clear error if not.
18
+ */
19
+ function checkBunVersion(): void {
20
+ if (typeof Bun === "undefined" || !Bun.version) {
21
+ throw new Error(
22
+ "bun-serve-compress requires the Bun runtime. " +
23
+ "This library uses Bun-specific APIs (Bun.serve, Bun.gzipSync, CompressionStream with zstd) " +
24
+ "and cannot run in Node.js or other runtimes.",
25
+ );
26
+ }
27
+
28
+ if (!Bun.semver.satisfies(Bun.version, MIN_BUN_VERSION_RANGE)) {
29
+ throw new Error(
30
+ `bun-serve-compress requires Bun >= ${MIN_BUN_VERSION_DISPLAY}, but you are running Bun ${Bun.version}. ` +
31
+ "Please upgrade Bun: bun upgrade",
32
+ );
33
+ }
34
+ }
35
+
36
+ // Run version check on module load
37
+ checkBunVersion();
38
+
39
+ /**
40
+ * Resolve user-provided compression options into a fully-populated config
41
+ * with all defaults applied.
42
+ */
43
+ function resolveConfig(options?: CompressionOptions | false): ResolvedCompressionOptions {
44
+ const defaults = getDefaultResolvedConfig();
45
+
46
+ if (options === false || options?.disable) {
47
+ return { ...defaults, disable: true };
48
+ }
49
+
50
+ if (!options) return defaults;
51
+
52
+ const config: ResolvedCompressionOptions = {
53
+ disable: false,
54
+ algorithms: options.algorithms ?? defaults.algorithms,
55
+ gzip: { level: options.gzip?.level ?? defaults.gzip.level },
56
+ brotli: { level: options.brotli?.level ?? defaults.brotli.level },
57
+ zstd: { level: options.zstd?.level ?? defaults.zstd.level },
58
+ minSize: options.minSize ?? defaults.minSize,
59
+ skipMimeTypes: defaults.skipMimeTypes,
60
+ skipMimePrefixes: defaults.skipMimePrefixes,
61
+ shouldCompress: options.shouldCompress,
62
+ };
63
+
64
+ // Handle custom skip MIME types
65
+ if (options.overrideSkipMimeTypes) {
66
+ config.skipMimeTypes = new Set(options.overrideSkipMimeTypes);
67
+ config.skipMimePrefixes = []; // user took full control
68
+ } else if (options.skipMimeTypes) {
69
+ // Merge with defaults
70
+ config.skipMimeTypes = new Set([...SKIP_MIME_TYPES, ...options.skipMimeTypes]);
71
+ }
72
+
73
+ return config;
74
+ }
75
+
76
+ /**
77
+ * The compression middleware logic applied to each response.
78
+ */
79
+ function compressResponse(
80
+ req: Request,
81
+ res: Response,
82
+ config: ResolvedCompressionOptions,
83
+ ): Response | Promise<Response> {
84
+ // Check if compression should be skipped
85
+ if (shouldSkip(req, res, config)) {
86
+ return res;
87
+ }
88
+
89
+ // Negotiate the best algorithm
90
+ const acceptEncoding = req.headers.get("accept-encoding") ?? "";
91
+ const algorithm = negotiate(acceptEncoding, config.algorithms);
92
+
93
+ if (!algorithm) {
94
+ // No acceptable algorithm, but add Vary header for caching
95
+ return addVaryHeader(res);
96
+ }
97
+
98
+ // Compress the response
99
+ return compress(res, algorithm, config);
100
+ }
101
+
102
+ /**
103
+ * Wrap a fetch handler to add compression.
104
+ */
105
+ const HTTP_METHODS = new Set(["GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"]);
106
+
107
+ // eslint-disable-next-line typescript/ban-types, typescript/no-unsafe-function-type -- Bun's fetch handler type is complex
108
+ type AnyFunction = Function;
109
+
110
+ function wrapFetch(originalFetch: AnyFunction, config: ResolvedCompressionOptions): AnyFunction {
111
+ return async function (this: any, req: Request, server: any) {
112
+ const response = await originalFetch.call(this, req, server);
113
+
114
+ // WebSocket upgrade or void return — pass through
115
+ if (!response) return response;
116
+
117
+ return compressResponse(req, response as Response, config);
118
+ };
119
+ }
120
+
121
+ /**
122
+ * Wrap route handlers to add compression.
123
+ *
124
+ * Routes can be:
125
+ * - Response objects (static)
126
+ * - Handler functions (req => Response)
127
+ * - HTML imports (special Bun objects — pass through untouched)
128
+ * - Method-specific objects { GET: handler, POST: handler }
129
+ */
130
+ function wrapRoutes(
131
+ routes: Record<string, any>,
132
+ config: ResolvedCompressionOptions,
133
+ ): Record<string, any> {
134
+ const wrapped: Record<string, any> = {};
135
+
136
+ for (const [path, handler] of Object.entries(routes)) {
137
+ wrapped[path] = wrapRouteHandler(handler, config);
138
+ }
139
+
140
+ return wrapped;
141
+ }
142
+
143
+ /**
144
+ * Wrap a single route handler.
145
+ */
146
+ function wrapRouteHandler(handler: any, config: ResolvedCompressionOptions): any {
147
+ // false: Bun falls through to the fetch handler. Pass through as-is.
148
+ // null/undefined: no handler. Pass through as-is.
149
+ if (handler === false || handler === null || handler === undefined) {
150
+ return handler;
151
+ }
152
+
153
+ // HTML import — Bun handles these specially for frontend bundling.
154
+ // They are objects with specific internal properties that Bun's serve recognizes.
155
+ // We must NOT wrap these — let Bun handle them natively.
156
+ // HTML imports are typically objects (not Response, not Function) that Bun processes
157
+ // into its asset pipeline. We detect them by checking they're not a standard type.
158
+ if (isHtmlImport(handler)) {
159
+ return handler;
160
+ }
161
+
162
+ // Response object (static route) — wrap in a function that clones and compresses per request
163
+ if (handler instanceof Response) {
164
+ return function (req: Request) {
165
+ const cloned = handler.clone();
166
+ return compressResponse(req, cloned, config);
167
+ };
168
+ }
169
+
170
+ // Handler function
171
+ if (typeof handler === "function") {
172
+ return async function (this: any, req: Request, server: any) {
173
+ const response = await handler.call(this, req, server);
174
+ if (!response) return response;
175
+ return compressResponse(req, response, config);
176
+ };
177
+ }
178
+
179
+ // Method-specific object: { GET: handler, POST: handler, ... }
180
+ if (typeof handler === "object" && !Array.isArray(handler)) {
181
+ const hasMethodKey = Object.keys(handler).some((key) => HTTP_METHODS.has(key.toUpperCase()));
182
+
183
+ if (hasMethodKey) {
184
+ const wrappedMethods: Record<string, any> = {};
185
+ for (const [method, methodHandler] of Object.entries(handler)) {
186
+ wrappedMethods[method] = wrapRouteHandler(methodHandler, config);
187
+ }
188
+ return wrappedMethods;
189
+ }
190
+ }
191
+
192
+ // Unknown type — pass through unchanged (let Bun handle or error)
193
+ return handler;
194
+ }
195
+
196
+ /**
197
+ * Detect if a route handler is an HTML import (Bun's frontend bundling feature).
198
+ *
199
+ * When you do `import page from './page.html'` in Bun, it creates a special
200
+ * module object that Bun.serve() recognizes for its built-in bundler pipeline.
201
+ * These are NOT Response objects or functions — they're opaque module objects.
202
+ *
203
+ * We must pass these through untouched so Bun's asset pipeline works correctly.
204
+ */
205
+ function isHtmlImport(handler: any): boolean {
206
+ // HTML imports are not Response, not Function, not null/undefined
207
+ // They are objects that Bun's serve() knows how to handle internally.
208
+ // The safest detection: it's an object with a default export or
209
+ // is a module namespace object from an HTML import.
210
+ if (typeof handler !== "object") return false;
211
+ if (handler instanceof Response) return false;
212
+ if (handler instanceof ReadableStream) return false;
213
+ if (Array.isArray(handler)) return false;
214
+
215
+ // Check for Bun's HTML module marker
216
+ // HTML imports have a specific shape — they're typically the module itself
217
+ // with properties that Bun uses internally for bundling
218
+ // A method-specific handler would have HTTP method keys (GET, POST, etc.)
219
+ const keys = Object.keys(handler);
220
+ const hasMethodKey = keys.some((key) => HTTP_METHODS.has(key.toUpperCase()));
221
+ if (hasMethodKey) return false;
222
+
223
+ // If it's an object without HTTP method keys, it's likely an HTML import
224
+ // or some other Bun-specific handler — pass through
225
+ return true;
226
+ }
227
+
228
+ /**
229
+ * Drop-in replacement for Bun.serve() that adds transparent response compression.
230
+ *
231
+ * Usage:
232
+ * ```ts
233
+ * import { serve } from 'bun-serve-compress';
234
+ *
235
+ * serve({
236
+ * port: 3000,
237
+ * compression: { algorithms: ['zstd', 'br', 'gzip'] },
238
+ * fetch(req) {
239
+ * return new Response('Hello!');
240
+ * },
241
+ * });
242
+ * ```
243
+ */
244
+ export function serve(options: any): any {
245
+ // Extract compression config
246
+ const { compression, ...serveOptions } = options;
247
+
248
+ // Resolve config with defaults
249
+ const config = resolveConfig(compression);
250
+
251
+ // If compression is disabled, just pass through to Bun.serve
252
+ if (config.disable) {
253
+ return Bun.serve(serveOptions);
254
+ }
255
+
256
+ // Wrap fetch handler if present
257
+ if (serveOptions.fetch) {
258
+ serveOptions.fetch = wrapFetch(serveOptions.fetch, config);
259
+ }
260
+
261
+ // Wrap routes if present
262
+ if (serveOptions.routes) {
263
+ serveOptions.routes = wrapRoutes(serveOptions.routes, config);
264
+ }
265
+
266
+ return Bun.serve(serveOptions);
267
+ }
package/src/skip.ts ADDED
@@ -0,0 +1,117 @@
1
+ import { COMPRESSIBLE_EXCEPTIONS, NO_BODY_STATUSES } from "./constants";
2
+ import type { ResolvedCompressionOptions } from "./types";
3
+
4
+ /**
5
+ * Extract the MIME type from a Content-Type header value.
6
+ * Strips parameters like charset, boundary, etc.
7
+ *
8
+ * "text/html; charset=utf-8" → "text/html"
9
+ */
10
+ function extractMimeType(contentType: string): string {
11
+ return contentType.split(";")[0].trim().toLowerCase();
12
+ }
13
+
14
+ /**
15
+ * Check if a MIME type matches the skip list.
16
+ */
17
+ function mimeMatchesSkipList(
18
+ mime: string,
19
+ skipTypes: Set<string>,
20
+ skipPrefixes: string[],
21
+ ): boolean {
22
+ // Exceptions first — these are compressible despite matching prefix rules
23
+ // (e.g., image/svg+xml is text-based despite the image/* prefix skip)
24
+ if (COMPRESSIBLE_EXCEPTIONS.has(mime)) return false;
25
+
26
+ // Exact match
27
+ if (skipTypes.has(mime)) return true;
28
+
29
+ // Prefix match (e.g., "image/", "audio/")
30
+ for (const prefix of skipPrefixes) {
31
+ if (mime.startsWith(prefix)) return true;
32
+ }
33
+
34
+ return false;
35
+ }
36
+
37
+ /**
38
+ * Check if Cache-Control header contains the no-transform directive.
39
+ * Per RFC 7234 Section 5.2.2.4, intermediaries MUST NOT alter the
40
+ * representation when no-transform is present.
41
+ */
42
+ function hasNoTransform(res: Response): boolean {
43
+ const cacheControl = res.headers.get("cache-control");
44
+ if (!cacheControl) return false;
45
+ return cacheControl
46
+ .split(",")
47
+ .some((directive) => directive.trim().toLowerCase() === "no-transform");
48
+ }
49
+
50
+ /**
51
+ * Determine whether compression should be skipped for this request/response pair.
52
+ *
53
+ * Returns true if compression should be SKIPPED (response passed through as-is).
54
+ */
55
+ export function shouldSkip(
56
+ req: Request,
57
+ res: Response,
58
+ config: ResolvedCompressionOptions,
59
+ ): boolean {
60
+ // 1. Compression disabled globally
61
+ if (config.disable) return true;
62
+
63
+ // 2. HEAD requests have no body to compress
64
+ if (req.method === "HEAD") return true;
65
+
66
+ // 3. Status codes that indicate no body
67
+ if (NO_BODY_STATUSES.has(res.status)) return true;
68
+
69
+ // 4. Response already has Content-Encoding (already compressed)
70
+ if (res.headers.has("content-encoding")) return true;
71
+
72
+ // 5. Response already has Transfer-Encoding set (already encoded)
73
+ const transferEncoding = res.headers.get("transfer-encoding");
74
+ if (transferEncoding) {
75
+ const encodings = transferEncoding.toLowerCase();
76
+ // Skip if there's a content encoding like deflate or gzip in Transfer-Encoding
77
+ // (chunked alone is fine — it's just framing)
78
+ if (
79
+ encodings.includes("gzip") ||
80
+ encodings.includes("deflate") ||
81
+ encodings.includes("compress") ||
82
+ encodings.includes("br") ||
83
+ encodings.includes("zstd")
84
+ ) {
85
+ return true;
86
+ }
87
+ }
88
+
89
+ // 6. Cache-Control: no-transform — MUST NOT alter representation (RFC 7234)
90
+ if (hasNoTransform(res)) return true;
91
+
92
+ // 7. No body
93
+ if (res.body === null) return true;
94
+
95
+ // 8. Check Content-Type against skip list
96
+ const contentType = res.headers.get("content-type");
97
+ if (contentType) {
98
+ const mime = extractMimeType(contentType);
99
+ if (mimeMatchesSkipList(mime, config.skipMimeTypes, config.skipMimePrefixes)) {
100
+ return true;
101
+ }
102
+ }
103
+
104
+ // 9. Body size below minimum threshold (only if Content-Length is known)
105
+ const contentLength = res.headers.get("content-length");
106
+ if (contentLength !== null) {
107
+ const size = parseInt(contentLength, 10);
108
+ if (!isNaN(size) && size < config.minSize) return true;
109
+ }
110
+
111
+ // 10. User's custom shouldCompress function
112
+ if (config.shouldCompress && !config.shouldCompress(req, res)) {
113
+ return true;
114
+ }
115
+
116
+ return false;
117
+ }
package/src/types.ts ADDED
@@ -0,0 +1,66 @@
1
+ /**
2
+ * Supported compression algorithms.
3
+ * Names match the Accept-Encoding header values.
4
+ */
5
+ export type CompressionAlgorithm = "zstd" | "br" | "gzip";
6
+
7
+ /**
8
+ * Per-algorithm quality/level settings.
9
+ */
10
+ export interface AlgorithmOptions {
11
+ /**
12
+ * Compression level.
13
+ * - gzip: 1-9 (default 6)
14
+ * - brotli: 0-11 (default 5, NOT 11 which is too slow for real-time)
15
+ * - zstd: 1-22 (default 3)
16
+ */
17
+ level?: number;
18
+ }
19
+
20
+ /**
21
+ * Compression configuration options.
22
+ */
23
+ export interface CompressionOptions {
24
+ /** Disable compression entirely. Default: false */
25
+ disable?: boolean;
26
+
27
+ /** Algorithm preference order. Default: ['zstd', 'br', 'gzip'] */
28
+ algorithms?: CompressionAlgorithm[];
29
+
30
+ /** Per-algorithm settings */
31
+ gzip?: AlgorithmOptions;
32
+ brotli?: AlgorithmOptions;
33
+ zstd?: AlgorithmOptions;
34
+
35
+ /** Minimum response body size in bytes to compress. Default: 1024 */
36
+ minSize?: number;
37
+
38
+ /** Additional MIME types to skip (merged with built-in skip list) */
39
+ skipMimeTypes?: string[];
40
+
41
+ /** Override the entire skip list instead of merging with built-in list */
42
+ overrideSkipMimeTypes?: string[];
43
+
44
+ /**
45
+ * Custom function to decide whether to compress a response.
46
+ * Return true to compress, false to skip.
47
+ * Called after all other skip checks pass.
48
+ */
49
+ shouldCompress?: (req: Request, res: Response) => boolean;
50
+ }
51
+
52
+ /**
53
+ * Resolved compression config with all defaults applied.
54
+ * All fields are guaranteed to be present.
55
+ */
56
+ export interface ResolvedCompressionOptions {
57
+ disable: boolean;
58
+ algorithms: CompressionAlgorithm[];
59
+ gzip: Required<AlgorithmOptions>;
60
+ brotli: Required<AlgorithmOptions>;
61
+ zstd: Required<AlgorithmOptions>;
62
+ minSize: number;
63
+ skipMimeTypes: Set<string>;
64
+ skipMimePrefixes: string[];
65
+ shouldCompress?: (req: Request, res: Response) => boolean;
66
+ }