@osmix/pbf 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/CHANGELOG.md +7 -0
  2. package/README.md +144 -0
  3. package/dist/blobs-to-blocks.d.ts +5 -0
  4. package/dist/blobs-to-blocks.js +21 -0
  5. package/dist/blocks-to-pbf.d.ts +16 -0
  6. package/dist/blocks-to-pbf.js +73 -0
  7. package/dist/index.d.ts +8 -0
  8. package/dist/index.js +8 -0
  9. package/dist/pbf-to-blobs.d.ts +6 -0
  10. package/dist/pbf-to-blobs.js +48 -0
  11. package/dist/pbf-to-blocks.d.ts +20 -0
  12. package/dist/pbf-to-blocks.js +53 -0
  13. package/dist/proto/fileformat.d.ts +26 -0
  14. package/dist/proto/fileformat.js +56 -0
  15. package/dist/proto/osmformat.d.ts +91 -0
  16. package/dist/proto/osmformat.js +458 -0
  17. package/dist/spec.d.ts +5 -0
  18. package/dist/spec.js +9 -0
  19. package/dist/utils.d.ts +27 -0
  20. package/dist/utils.js +92 -0
  21. package/package.json +49 -0
  22. package/src/blobs-to-blocks.ts +28 -0
  23. package/src/blocks-to-pbf.ts +98 -0
  24. package/src/index.ts +8 -0
  25. package/src/pbf-to-blobs.ts +56 -0
  26. package/src/pbf-to-blocks.ts +77 -0
  27. package/src/proto/fileformat.proto +68 -0
  28. package/src/proto/fileformat.ts +70 -0
  29. package/src/proto/osmformat.proto +262 -0
  30. package/src/proto/osmformat.ts +488 -0
  31. package/src/spec.ts +10 -0
  32. package/src/utils.ts +90 -0
  33. package/test/blobs-to-blocks.test.ts +73 -0
  34. package/test/helpers.ts +66 -0
  35. package/test/pbf-to-blobs.test.ts +85 -0
  36. package/test/read.bench.ts +42 -0
  37. package/test/read.test.ts +45 -0
  38. package/test/streams.test.ts +92 -0
  39. package/test/utils.bun.test.ts +327 -0
  40. package/test/utils.test.ts +56 -0
  41. package/test/utils.ts +65 -0
  42. package/test/verify-pbf-reading.bun.test.ts +39 -0
  43. package/test/write.test.ts +86 -0
  44. package/tsconfig.json +9 -0
  45. package/vitest.config.ts +7 -0
@@ -0,0 +1,66 @@
1
+ import { osmBlockToPbfBlobBytes } from "../src/blocks-to-pbf"
2
+ import type { OsmPbfBlock, OsmPbfHeaderBlock } from "../src/proto/osmformat"
3
+ import { concatUint8 } from "../src/utils"
4
+
5
+ const encoder = new TextEncoder()
6
+
7
+ export function createSampleHeader(): OsmPbfHeaderBlock {
8
+ return {
9
+ bbox: { left: 0, right: 1, top: 1, bottom: 0 },
10
+ required_features: ["OsmSchema-V0.6"],
11
+ optional_features: ["DenseNodes"],
12
+ writingprogram: "osmix-tests",
13
+ }
14
+ }
15
+
16
+ export function createSamplePrimitiveBlock(): OsmPbfBlock {
17
+ return {
18
+ stringtable: [
19
+ encoder.encode(""),
20
+ encoder.encode("name"),
21
+ encoder.encode("value"),
22
+ ],
23
+ primitivegroup: [
24
+ {
25
+ nodes: [],
26
+ dense: {
27
+ id: [1, 2],
28
+ lat: [1_000, 500],
29
+ lon: [1_500, 600],
30
+ keys_vals: [1, 2, 0],
31
+ },
32
+ ways: [
33
+ {
34
+ id: 10,
35
+ keys: [1],
36
+ vals: [2],
37
+ refs: [1, 1, 0],
38
+ },
39
+ ],
40
+ relations: [],
41
+ },
42
+ ],
43
+ } as const
44
+ }
45
+
46
+ export async function createSamplePbfFileBytes() {
47
+ const header = createSampleHeader()
48
+ const primitiveBlock = createSamplePrimitiveBlock()
49
+ const headerBytes = await osmBlockToPbfBlobBytes(header)
50
+ const primitiveBytes = await osmBlockToPbfBlobBytes(primitiveBlock)
51
+ return {
52
+ header,
53
+ primitiveBlock,
54
+ fileBytes: concatUint8(headerBytes, primitiveBytes),
55
+ }
56
+ }
57
+
58
+ export function isHeaderBlock(value: unknown): value is OsmPbfHeaderBlock {
59
+ return (
60
+ typeof value === "object" && value != null && "required_features" in value
61
+ )
62
+ }
63
+
64
+ export function isPrimitiveBlock(value: unknown): value is OsmPbfBlock {
65
+ return typeof value === "object" && value != null && "primitivegroup" in value
66
+ }
@@ -0,0 +1,85 @@
1
+ import Pbf from "pbf"
2
+ import { assert, describe, it } from "vitest"
3
+ import { osmPbfBlobsToBlocksGenerator } from "../src/blobs-to-blocks"
4
+ import { createOsmPbfBlobGenerator } from "../src/pbf-to-blobs"
5
+ import { writeBlob, writeBlobHeader } from "../src/proto/fileformat"
6
+ import { writeHeaderBlock } from "../src/proto/osmformat"
7
+ import { concatUint8, uint32BE } from "../src/utils"
8
+ import {
9
+ createSampleHeader,
10
+ createSamplePbfFileBytes,
11
+ isHeaderBlock,
12
+ isPrimitiveBlock,
13
+ } from "./helpers"
14
+
15
+ describe("createOsmPbfBlobGenerator", () => {
16
+ it("yields compressed blobs across fragmented chunks", async () => {
17
+ const { header, primitiveBlock, fileBytes } =
18
+ await createSamplePbfFileBytes()
19
+ const generate = createOsmPbfBlobGenerator()
20
+ const yielded: Uint8Array<ArrayBuffer>[] = []
21
+
22
+ let offset = 0
23
+ const chunkSizes = [1, 9]
24
+ for (const size of chunkSizes) {
25
+ const chunk = fileBytes.slice(offset, offset + size)
26
+ offset += size
27
+ for (const blob of generate(chunk)) yielded.push(blob)
28
+ }
29
+ if (offset < fileBytes.length) {
30
+ for (const blob of generate(fileBytes.slice(offset))) yielded.push(blob)
31
+ }
32
+
33
+ assert.equal(yielded.length, 2)
34
+
35
+ const blocks = osmPbfBlobsToBlocksGenerator(
36
+ (async function* () {
37
+ for (const blob of yielded) yield blob
38
+ })(),
39
+ )
40
+ const { value: headerBlock, done } = await blocks.next()
41
+ assert.isFalse(done)
42
+ if (!isHeaderBlock(headerBlock)) {
43
+ assert.fail("Expected first block to be a header")
44
+ }
45
+ assert.deepEqual(headerBlock.bbox, header.bbox)
46
+ assert.deepEqual(headerBlock.required_features, header.required_features)
47
+ assert.deepEqual(headerBlock.optional_features, header.optional_features)
48
+
49
+ const { value: primitive } = await blocks.next()
50
+ if (!isPrimitiveBlock(primitive)) {
51
+ assert.fail("Expected primitive block after header")
52
+ }
53
+ assert.lengthOf(
54
+ primitive.primitivegroup,
55
+ primitiveBlock.primitivegroup.length,
56
+ )
57
+ assert.exists(primitive.primitivegroup[0])
58
+ assert.exists(primitiveBlock.primitivegroup[0])
59
+ const dense = primitive.primitivegroup[0].dense
60
+ assert.exists(dense)
61
+ assert.deepEqual(dense?.id, primitiveBlock.primitivegroup[0].dense?.id)
62
+ assert.deepEqual(dense?.lat, primitiveBlock.primitivegroup[0].dense?.lat)
63
+ assert.deepEqual(dense?.lon, primitiveBlock.primitivegroup[0].dense?.lon)
64
+ })
65
+
66
+ it("throws when a blob omits zlib data", () => {
67
+ const headerBlock = createSampleHeader()
68
+ const headerPbf = new Pbf()
69
+ writeHeaderBlock(headerBlock, headerPbf)
70
+ const headerContent = headerPbf.finish()
71
+
72
+ const blobPbf = new Pbf()
73
+ writeBlob({ raw_size: headerContent.length, raw: headerContent }, blobPbf)
74
+ const blob = blobPbf.finish()
75
+
76
+ const blobHeaderPbf = new Pbf()
77
+ writeBlobHeader({ type: "OSMHeader", datasize: blob.length }, blobHeaderPbf)
78
+ const blobHeader = blobHeaderPbf.finish()
79
+
80
+ const chunk = concatUint8(uint32BE(blobHeader.byteLength), blobHeader, blob)
81
+ const generate = createOsmPbfBlobGenerator()
82
+ const iterator = generate(chunk)
83
+ assert.throws(() => iterator.next(), /Blob has no zlib data/)
84
+ })
85
+ })
@@ -0,0 +1,42 @@
1
+ import {
2
+ getFixtureFile,
3
+ getFixtureFileReadStream,
4
+ PBFs,
5
+ } from "@osmix/shared/test/fixtures"
6
+ import { assert, beforeAll, bench, describe } from "vitest"
7
+ import {
8
+ OsmPbfBytesToBlocksTransformStream,
9
+ readOsmPbf,
10
+ } from "../src/pbf-to-blocks"
11
+ import { createOsmEntityCounter, testOsmPbfReader } from "./utils"
12
+
13
+ describe.each(Object.entries(PBFs))("%s", (_name, pbf) => {
14
+ beforeAll(() => getFixtureFile(pbf.url))
15
+
16
+ bench("parse with generators", async () => {
17
+ const file = await getFixtureFile(pbf.url)
18
+ const osm = await readOsmPbf(file)
19
+
20
+ await testOsmPbfReader(osm, pbf)
21
+ })
22
+
23
+ bench("parse streaming", async () => {
24
+ const { onGroup, count } = createOsmEntityCounter()
25
+
26
+ await getFixtureFileReadStream(pbf.url)
27
+ .pipeThrough(new OsmPbfBytesToBlocksTransformStream())
28
+ .pipeTo(
29
+ new WritableStream({
30
+ write: (block) => {
31
+ if ("primitivegroup" in block) {
32
+ for (const group of block.primitivegroup) onGroup(group)
33
+ }
34
+ },
35
+ }),
36
+ )
37
+
38
+ assert.equal(count.nodes, pbf.nodes)
39
+ assert.equal(count.ways, pbf.ways)
40
+ assert.equal(count.relations, pbf.relations)
41
+ })
42
+ })
@@ -0,0 +1,45 @@
1
+ import {
2
+ getFixtureFile,
3
+ getFixtureFileReadStream,
4
+ PBFs,
5
+ } from "@osmix/shared/test/fixtures"
6
+ import { assert, beforeAll, describe, it } from "vitest"
7
+ import {
8
+ OsmPbfBytesToBlocksTransformStream,
9
+ readOsmPbf,
10
+ } from "../src/pbf-to-blocks"
11
+ import { createOsmEntityCounter, testOsmPbfReader } from "./utils"
12
+
13
+ describe("read", () => {
14
+ describe.each(Object.entries(PBFs))("%s", async (_name, pbf) => {
15
+ beforeAll(() => getFixtureFile(pbf.url))
16
+
17
+ it("from stream", async () => {
18
+ const { onGroup, count } = createOsmEntityCounter()
19
+
20
+ await getFixtureFileReadStream(pbf.url)
21
+ .pipeThrough(new OsmPbfBytesToBlocksTransformStream())
22
+ .pipeTo(
23
+ new WritableStream({
24
+ write: (block) => {
25
+ if ("primitivegroup" in block) {
26
+ for (const group of block.primitivegroup) onGroup(group)
27
+ } else {
28
+ assert.deepEqual(block.bbox, pbf.bbox)
29
+ }
30
+ },
31
+ }),
32
+ )
33
+
34
+ assert.equal(count.nodes, pbf.nodes)
35
+ assert.equal(count.ways, pbf.ways)
36
+ assert.equal(count.relations, pbf.relations)
37
+ })
38
+
39
+ it("from buffer", async () => {
40
+ const fileData = await getFixtureFile(pbf.url)
41
+ const osm = await readOsmPbf(fileData)
42
+ await testOsmPbfReader(osm, pbf)
43
+ })
44
+ })
45
+ })
@@ -0,0 +1,92 @@
1
+ import { assert, describe, expect, it } from "vitest"
2
+ import { OsmBlocksToPbfBytesTransformStream } from "../src/blocks-to-pbf"
3
+ import { OsmPbfBytesToBlocksTransformStream } from "../src/pbf-to-blocks"
4
+ import { concatUint8 } from "../src/utils"
5
+ import {
6
+ createSamplePbfFileBytes,
7
+ createSamplePrimitiveBlock,
8
+ isHeaderBlock,
9
+ isPrimitiveBlock,
10
+ } from "./helpers"
11
+
12
+ describe("transform streams", () => {
13
+ it("requires the header to be written before data blocks", async () => {
14
+ const input = new ReadableStream({
15
+ start(controller) {
16
+ controller.enqueue(createSamplePrimitiveBlock())
17
+ controller.close()
18
+ },
19
+ })
20
+
21
+ await expect(
22
+ input
23
+ .pipeThrough(new OsmBlocksToPbfBytesTransformStream())
24
+ .pipeTo(new WritableStream()),
25
+ ).rejects.toThrow("Header first in ReadableStream of blocks.")
26
+ })
27
+
28
+ it("serialises blocks into the expected PBF byte sequence", async () => {
29
+ const { header, primitiveBlock, fileBytes } =
30
+ await createSamplePbfFileBytes()
31
+ const chunks: Uint8Array[] = []
32
+
33
+ const input = new ReadableStream({
34
+ start(controller) {
35
+ controller.enqueue(header)
36
+ controller.enqueue(primitiveBlock)
37
+ controller.close()
38
+ },
39
+ })
40
+
41
+ await input.pipeThrough(new OsmBlocksToPbfBytesTransformStream()).pipeTo(
42
+ new WritableStream<Uint8Array>({
43
+ write(chunk) {
44
+ chunks.push(chunk)
45
+ },
46
+ }),
47
+ )
48
+
49
+ assert.deepEqual(concatUint8(...chunks), fileBytes)
50
+ })
51
+
52
+ it("parses streamed bytes back into header and primitive blocks", async () => {
53
+ const { header, primitiveBlock, fileBytes } =
54
+ await createSamplePbfFileBytes()
55
+ assert.exists(primitiveBlock.primitivegroup[0])
56
+ const blocks: unknown[] = []
57
+
58
+ const input = new ReadableStream({
59
+ start(controller) {
60
+ controller.enqueue(fileBytes.slice(0, 7).buffer)
61
+ controller.enqueue(fileBytes.slice(7).buffer)
62
+ controller.close()
63
+ },
64
+ })
65
+
66
+ await input.pipeThrough(new OsmPbfBytesToBlocksTransformStream()).pipeTo(
67
+ new WritableStream({
68
+ write(chunk) {
69
+ blocks.push(chunk)
70
+ },
71
+ }),
72
+ )
73
+
74
+ assert.equal(blocks.length, 2)
75
+ const headerBlock = blocks[0]
76
+ if (!isHeaderBlock(headerBlock)) {
77
+ assert.fail("Expected header block")
78
+ }
79
+ assert.deepEqual(headerBlock.bbox, header.bbox)
80
+ assert.deepEqual(headerBlock.required_features, header.required_features)
81
+ const block = blocks[1]
82
+ if (!isPrimitiveBlock(block)) {
83
+ assert.fail("Expected primitive block")
84
+ }
85
+ assert.lengthOf(block.primitivegroup, primitiveBlock.primitivegroup.length)
86
+ assert.exists(block.primitivegroup)
87
+ assert.exists(block.primitivegroup[0])
88
+ const dense = block.primitivegroup[0].dense
89
+ assert.exists(dense)
90
+ assert.deepEqual(dense.id, primitiveBlock.primitivegroup[0].dense?.id)
91
+ })
92
+ })
@@ -0,0 +1,327 @@
1
+ import { describe, expect, test } from "vitest"
2
+ import { concatUint8, isBun, toAsyncGenerator, uint32BE } from "../src/utils"
3
+
4
+ /**
5
+ * Bun decompress.
6
+ */
7
+ async function bunDecompress(
8
+ data: Uint8Array<ArrayBuffer>,
9
+ ): Promise<Uint8Array<ArrayBuffer>> {
10
+ const { inflate } = await import("node:zlib")
11
+ const result = await new Promise<ArrayBuffer>((resolve, reject) => {
12
+ inflate(data, (error, result) => {
13
+ if (error) reject(error)
14
+ else resolve(result.buffer as ArrayBuffer)
15
+ })
16
+ })
17
+ return new Uint8Array(result)
18
+ }
19
+
20
+ /**
21
+ * Bun compression.
22
+ */
23
+ export async function bunCompress(
24
+ data: Uint8Array<ArrayBuffer>,
25
+ ): Promise<Uint8Array<ArrayBuffer>> {
26
+ const { deflate } = await import("node:zlib")
27
+ const result = await new Promise<ArrayBuffer>((resolve, reject) => {
28
+ deflate(data, (error, result) => {
29
+ if (error) reject(error)
30
+ else resolve(result.buffer as ArrayBuffer)
31
+ })
32
+ })
33
+ return new Uint8Array(result)
34
+ }
35
+
36
+ describe.runIf(isBun())("utils", () => {
37
+ test("wraps values into an async generator", async () => {
38
+ const generator = toAsyncGenerator(3)
39
+ const first = await generator.next()
40
+ expect(first).toEqual({ value: 3, done: false })
41
+ const done = await generator.next()
42
+ expect(done).toEqual({ value: undefined, done: true })
43
+ })
44
+
45
+ test("consumes readable streams", async () => {
46
+ const stream = new ReadableStream<number>({
47
+ start(controller) {
48
+ controller.enqueue(1)
49
+ controller.enqueue(2)
50
+ controller.close()
51
+ },
52
+ })
53
+ const values: number[] = []
54
+ for await (const value of toAsyncGenerator(stream)) values.push(value)
55
+ expect(values).toEqual([1, 2])
56
+ })
57
+
58
+ test("throws on nullish inputs", async () => {
59
+ const invalidInput = null as unknown as never
60
+ await expect(toAsyncGenerator(invalidInput).next()).rejects.toThrow(
61
+ "Value is null",
62
+ )
63
+ })
64
+
65
+ test("concatenates Uint8Array segments", () => {
66
+ const a = Uint8Array.of(1, 2)
67
+ const b = Uint8Array.of(3)
68
+ expect(concatUint8(a, b)).toEqual(Uint8Array.of(1, 2, 3))
69
+ })
70
+
71
+ test("encodes big-endian 32-bit integers", () => {
72
+ expect(uint32BE(0x01020304)).toEqual(Uint8Array.of(1, 2, 3, 4))
73
+ })
74
+
75
+ test("compresses and decompresses data with deflate", async () => {
76
+ const input = new TextEncoder().encode("osmix") as Uint8Array<ArrayBuffer>
77
+ const compressed = await bunCompress(input)
78
+ expect(compressed).not.toEqual(input)
79
+ const decompressed = await bunDecompress(compressed)
80
+ expect(decompressed).toEqual(input)
81
+ })
82
+
83
+ test("compresses and decompresses larger data", async () => {
84
+ const input = new TextEncoder().encode(
85
+ "a".repeat(1000),
86
+ ) as Uint8Array<ArrayBuffer>
87
+ const compressed = await bunCompress(input)
88
+ expect(compressed.length).toBeLessThan(input.length)
89
+ const decompressed = await bunDecompress(compressed)
90
+ expect(decompressed).toEqual(input)
91
+ })
92
+
93
+ test("handles Uint8Array input", async () => {
94
+ const input = Uint8Array.of(1, 2, 3, 4, 5)
95
+ const compressed = await bunCompress(input)
96
+ const decompressed = await bunDecompress(compressed)
97
+ expect(decompressed).toEqual(input)
98
+ })
99
+
100
+ test("uses Bun runtime with Node.js zlib compatibility", () => {
101
+ // This test verifies that Bun is available in the runtime
102
+ expect(isBun()).toBe(true)
103
+ })
104
+
105
+ test("Node.js zlib methods work in Bun", async () => {
106
+ const { deflateSync, inflateSync } = await import("node:zlib")
107
+ const input = new TextEncoder().encode("test bun compression with zlib")
108
+ const compressed = deflateSync(input)
109
+ expect(compressed.length).toBeGreaterThan(0)
110
+ expect(compressed).not.toEqual(input)
111
+
112
+ const decompressed = inflateSync(compressed)
113
+ expect(new Uint8Array(decompressed)).toEqual(input)
114
+ })
115
+
116
+ test("compress/decompress are compatible with OSM PBF zlib format", async () => {
117
+ // Test that our compress/decompress functions produce zlib-compatible data
118
+ // This is critical for OSM PBF compatibility
119
+ const { deflateSync, inflateSync } = await import("node:zlib")
120
+ const input = new TextEncoder().encode(
121
+ "OSM PBF uses zlib format (deflate with headers)",
122
+ ) as Uint8Array<ArrayBuffer>
123
+
124
+ // Compress with our function
125
+ const ourCompressed = await bunCompress(input)
126
+
127
+ // Decompress with Node.js zlib (what OSM PBF uses)
128
+ const decompressedWithNodeZlib = inflateSync(ourCompressed)
129
+ expect(new Uint8Array(decompressedWithNodeZlib)).toEqual(input)
130
+
131
+ // Compress with Node.js zlib
132
+ const nodeCompressed = deflateSync(input)
133
+
134
+ // Decompress with our function
135
+ const decompressedWithOurs = await bunDecompress(
136
+ new Uint8Array(nodeCompressed),
137
+ )
138
+ expect(decompressedWithOurs).toEqual(input)
139
+ })
140
+ })
141
+
142
+ describe.skip("CompressionStream polyfill", () => {
143
+ test("compresses data using deflate format", async () => {
144
+ const input = new TextEncoder().encode("test compression stream")
145
+ const compressor = new CompressionStream("deflate")
146
+
147
+ const writer = compressor.writable.getWriter()
148
+ writer.write(input)
149
+ writer.close()
150
+
151
+ const chunks: Uint8Array[] = []
152
+ const reader = compressor.readable.getReader()
153
+ while (true) {
154
+ const { done, value } = await reader.read()
155
+ if (done) break
156
+ chunks.push(value)
157
+ }
158
+
159
+ const compressed = concatUint8(...chunks)
160
+ expect(compressed.length).toBeGreaterThan(0)
161
+ expect(compressed).not.toEqual(input)
162
+
163
+ // Verify it's valid deflate data by decompressing
164
+ const decompressed = await bunDecompress(new Uint8Array(compressed))
165
+ expect(decompressed).toEqual(input)
166
+ })
167
+
168
+ test("returns proper Uint8Array<ArrayBuffer> instances", async () => {
169
+ const input = new TextEncoder().encode("type safety check")
170
+ const compressor = new CompressionStream("deflate")
171
+
172
+ const writer = compressor.writable.getWriter()
173
+ writer.write(input)
174
+ writer.close()
175
+
176
+ const reader = compressor.readable.getReader()
177
+ const { value } = await reader.read()
178
+
179
+ expect(value).toBeDefined()
180
+ if (!value) throw new Error("No value read")
181
+
182
+ // Verify it's a Uint8Array
183
+ expect(value).toBeInstanceOf(Uint8Array)
184
+ // Verify the buffer is an ArrayBuffer (not Buffer or SharedArrayBuffer)
185
+ expect(value.buffer).toBeInstanceOf(ArrayBuffer)
186
+ // Verify it's not a Node.js Buffer
187
+ expect(value.constructor.name).toBe("Uint8Array")
188
+ })
189
+
190
+ test("handles multiple writes", async () => {
191
+ const compressor = new CompressionStream("deflate")
192
+ const writer = compressor.writable.getWriter()
193
+
194
+ // Write multiple chunks
195
+ writer.write(new TextEncoder().encode("first "))
196
+ writer.write(new TextEncoder().encode("second "))
197
+ writer.write(new TextEncoder().encode("third"))
198
+ writer.close()
199
+
200
+ const chunks: Uint8Array[] = []
201
+ const reader = compressor.readable.getReader()
202
+ while (true) {
203
+ const { done, value } = await reader.read()
204
+ if (done) break
205
+ chunks.push(value)
206
+ }
207
+
208
+ const compressed = concatUint8(...chunks)
209
+ const decompressed = await bunDecompress(new Uint8Array(compressed))
210
+ expect(new TextDecoder().decode(decompressed)).toBe("first second third")
211
+ })
212
+ })
213
+
214
+ describe.skip("DecompressionStream polyfill", () => {
215
+ test("decompresses deflate data", async () => {
216
+ const input = new TextEncoder().encode(
217
+ "test decompression stream",
218
+ ) as Uint8Array<ArrayBuffer>
219
+ const compressed = await bunCompress(input)
220
+
221
+ const decompressor = new DecompressionStream("deflate")
222
+ const writer = decompressor.writable.getWriter()
223
+ writer.write(compressed as Uint8Array<ArrayBuffer>)
224
+ writer.close()
225
+
226
+ const chunks: Uint8Array[] = []
227
+ const reader = decompressor.readable.getReader()
228
+ while (true) {
229
+ const { done, value } = await reader.read()
230
+ if (done) break
231
+ chunks.push(value)
232
+ }
233
+
234
+ const decompressed = concatUint8(...chunks)
235
+ expect(decompressed).toEqual(input)
236
+ })
237
+
238
+ test("returns proper Uint8Array<ArrayBuffer> instances", async () => {
239
+ const input = new TextEncoder().encode(
240
+ "type safety check",
241
+ ) as Uint8Array<ArrayBuffer>
242
+ const compressed = await bunCompress(input)
243
+
244
+ const decompressor = new DecompressionStream("deflate")
245
+ const writer = decompressor.writable.getWriter()
246
+ writer.write(compressed as Uint8Array<ArrayBuffer>)
247
+ writer.close()
248
+
249
+ const reader = decompressor.readable.getReader()
250
+ const { value } = await reader.read()
251
+
252
+ expect(value).toBeDefined()
253
+ if (!value) throw new Error("No value read")
254
+
255
+ // Verify it's a Uint8Array
256
+ expect(value).toBeInstanceOf(Uint8Array)
257
+ // Verify the buffer is an ArrayBuffer (not Buffer or SharedArrayBuffer)
258
+ expect(value.buffer).toBeInstanceOf(ArrayBuffer)
259
+ // Verify it's not a Node.js Buffer
260
+ expect(value.constructor.name).toBe("Uint8Array")
261
+ })
262
+
263
+ test("handles chunked compressed data", async () => {
264
+ const input = new TextEncoder().encode(
265
+ "test chunked data",
266
+ ) as Uint8Array<ArrayBuffer>
267
+ const compressed = await bunCompress(input)
268
+
269
+ const decompressor = new DecompressionStream("deflate")
270
+ const writer = decompressor.writable.getWriter()
271
+
272
+ // Write compressed data in chunks
273
+ const chunkSize = 5
274
+ for (let i = 0; i < compressed.length; i += chunkSize) {
275
+ const chunk = compressed.slice(i, i + chunkSize)
276
+ writer.write(chunk)
277
+ }
278
+ writer.close()
279
+
280
+ const chunks: Uint8Array[] = []
281
+ const reader = decompressor.readable.getReader()
282
+ while (true) {
283
+ const { done, value } = await reader.read()
284
+ if (done) break
285
+ chunks.push(value)
286
+ }
287
+
288
+ const decompressed = concatUint8(...chunks)
289
+ expect(decompressed).toEqual(input)
290
+ })
291
+
292
+ test("round-trip compression and decompression", async () => {
293
+ const input = new TextEncoder().encode("round trip test data")
294
+
295
+ // Compress
296
+ const compressor = new CompressionStream("deflate")
297
+ const compressorWriter = compressor.writable.getWriter()
298
+ compressorWriter.write(input)
299
+ compressorWriter.close()
300
+
301
+ const compressedChunks: Uint8Array[] = []
302
+ const compressorReader = compressor.readable.getReader()
303
+ while (true) {
304
+ const { done, value } = await compressorReader.read()
305
+ if (done) break
306
+ compressedChunks.push(value)
307
+ }
308
+ const compressed = concatUint8(...compressedChunks)
309
+
310
+ // Decompress
311
+ const decompressor = new DecompressionStream("deflate")
312
+ const decompressorWriter = decompressor.writable.getWriter()
313
+ decompressorWriter.write(new Uint8Array(compressed))
314
+ decompressorWriter.close()
315
+
316
+ const decompressedChunks: Uint8Array[] = []
317
+ const decompressorReader = decompressor.readable.getReader()
318
+ while (true) {
319
+ const { done, value } = await decompressorReader.read()
320
+ if (done) break
321
+ decompressedChunks.push(value)
322
+ }
323
+ const decompressed = concatUint8(...decompressedChunks)
324
+
325
+ expect(decompressed).toEqual(input)
326
+ })
327
+ })