@php-wasm/stream-compression 0.0.1 → 0.9.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/LICENSE +339 -0
  2. package/index.cjs +1 -0
  3. package/index.d.ts +143 -0
  4. package/index.js +546 -0
  5. package/package.json +36 -29
  6. package/{src/test/vitest-setup-file.ts → test/vitest-setup-file.d.ts} +0 -1
  7. package/utils/append-bytes.d.ts +7 -0
  8. package/utils/collect-bytes.d.ts +8 -0
  9. package/utils/collect-file.d.ts +8 -0
  10. package/utils/collect-string.d.ts +8 -0
  11. package/utils/concat-bytes.d.ts +9 -0
  12. package/utils/concat-string.d.ts +6 -0
  13. package/utils/concat-uint8-array.d.ts +7 -0
  14. package/utils/filter-stream.d.ts +7 -0
  15. package/utils/iterable-stream-polyfill.d.ts +1 -0
  16. package/utils/iterator-to-stream.d.ts +8 -0
  17. package/utils/limit-bytes.d.ts +8 -0
  18. package/utils/prepend-bytes.d.ts +7 -0
  19. package/utils/skip-first-bytes.d.ts +7 -0
  20. package/utils/skip-last-bytes.d.ts +7 -0
  21. package/utils/streamed-file.d.ts +39 -0
  22. package/zip/decode-remote-zip.d.ts +14 -0
  23. package/zip/decode-zip.d.ts +82 -0
  24. package/zip/encode-zip.d.ts +7 -0
  25. package/{src/zip/index.ts → zip/index.d.ts} +0 -2
  26. package/zip/types.d.ts +66 -0
  27. package/.eslintrc.json +0 -18
  28. package/project.json +0 -34
  29. package/src/index.ts +0 -7
  30. package/src/test/append-bytes.spec.ts +0 -25
  31. package/src/test/decode-zip.spec.ts +0 -22
  32. package/src/test/encode-zip.spec.ts +0 -47
  33. package/src/test/fixtures/hello-dolly.zip +0 -0
  34. package/src/test/prepend-bytes.spec.ts +0 -25
  35. package/src/test/skip-first-bytes.spec.ts +0 -41
  36. package/src/test/skip-last-bytes.spec.ts +0 -27
  37. package/src/utils/append-bytes.ts +0 -16
  38. package/src/utils/collect-bytes.ts +0 -24
  39. package/src/utils/collect-file.ts +0 -16
  40. package/src/utils/collect-string.ts +0 -25
  41. package/src/utils/concat-bytes.ts +0 -38
  42. package/src/utils/concat-string.ts +0 -17
  43. package/src/utils/concat-uint8-array.ts +0 -17
  44. package/src/utils/filter-stream.ts +0 -15
  45. package/src/utils/iterable-stream-polyfill.ts +0 -35
  46. package/src/utils/iterator-to-stream.ts +0 -39
  47. package/src/utils/limit-bytes.ts +0 -40
  48. package/src/utils/prepend-bytes.ts +0 -18
  49. package/src/utils/skip-first-bytes.ts +0 -21
  50. package/src/utils/skip-last-bytes.ts +0 -24
  51. package/src/utils/streamed-file.ts +0 -58
  52. package/src/zip/decode-remote-zip.ts +0 -409
  53. package/src/zip/decode-zip.ts +0 -349
  54. package/src/zip/encode-zip.ts +0 -278
  55. package/src/zip/types.ts +0 -76
  56. package/tsconfig.json +0 -23
  57. package/tsconfig.lib.json +0 -14
  58. package/tsconfig.spec.json +0 -25
  59. package/vite.config.ts +0 -55
package/index.js ADDED
@@ -0,0 +1,546 @@
1
+ import "@php-wasm/node-polyfills";
2
+ import { Semaphore as B } from "@php-wasm/util";
3
+ function U(...e) {
4
+ const n = new Uint8Array(
5
+ e.reduce((r, a) => r + a.length, 0)
6
+ );
7
+ let t = 0;
8
+ for (const r of e)
9
+ n.set(r, t), t += r.length;
10
+ return n;
11
+ }
12
+ function S(e) {
13
+ if (e === void 0) {
14
+ let n = new Uint8Array();
15
+ return new TransformStream({
16
+ transform(t) {
17
+ n = U(n, t);
18
+ },
19
+ flush(t) {
20
+ t.enqueue(n);
21
+ }
22
+ });
23
+ } else {
24
+ const n = new ArrayBuffer(e || 0);
25
+ let t = 0;
26
+ return new TransformStream({
27
+ transform(r) {
28
+ new Uint8Array(n).set(r, t), t += r.byteLength;
29
+ },
30
+ flush(r) {
31
+ r.enqueue(new Uint8Array(n));
32
+ }
33
+ });
34
+ }
35
+ }
36
+ function A(e, n) {
37
+ if (n === 0)
38
+ return new ReadableStream({
39
+ start(a) {
40
+ a.close();
41
+ }
42
+ });
43
+ const t = e.getReader({ mode: "byob" });
44
+ let r = 0;
45
+ return new ReadableStream({
46
+ async pull(a) {
47
+ const { value: i, done: s } = await t.read(
48
+ new Uint8Array(n - r)
49
+ );
50
+ if (s) {
51
+ t.releaseLock(), a.close();
52
+ return;
53
+ }
54
+ r += i.length, a.enqueue(i), r >= n && (t.releaseLock(), a.close());
55
+ },
56
+ cancel() {
57
+ t.cancel();
58
+ }
59
+ });
60
+ }
61
+ async function c(e, n) {
62
+ return n !== void 0 && (e = A(e, n)), await e.pipeThrough(S(n)).getReader().read().then(({ value: t }) => t);
63
+ }
64
+ async function re(e, n) {
65
+ return new File([await c(n)], e);
66
+ }
67
+ function _(e) {
68
+ if (e instanceof ReadableStream)
69
+ return e;
70
+ let n;
71
+ return Symbol.asyncIterator in e ? n = e[Symbol.asyncIterator]() : Symbol.iterator in e ? n = e[Symbol.iterator]() : n = e, new ReadableStream({
72
+ async pull(t) {
73
+ const { done: r, value: a } = await n.next();
74
+ if (r) {
75
+ t.close();
76
+ return;
77
+ }
78
+ t.enqueue(a);
79
+ }
80
+ });
81
+ }
82
+ class ae extends File {
83
+ /**
84
+ * Creates a new StreamedFile instance.
85
+ *
86
+ * @param readableStream The readable stream containing the file data.
87
+ * @param name The name of the file.
88
+ * @param type The MIME type of the file.
89
+ */
90
+ constructor(n, t, r) {
91
+ super([], t, { type: r }), this.readableStream = n;
92
+ }
93
+ /**
94
+ * Overrides the slice() method of the File class.
95
+ *
96
+ * @returns A Blob representing a portion of the file.
97
+ */
98
+ slice() {
99
+ throw new Error("slice() is not possible on a StreamedFile");
100
+ }
101
+ /**
102
+ * Returns the readable stream associated with the file.
103
+ *
104
+ * @returns The readable stream.
105
+ */
106
+ stream() {
107
+ return this.readableStream;
108
+ }
109
+ /**
110
+ * Loads the file data into memory and then returns it as a string.
111
+ *
112
+ * @returns File data as text.
113
+ */
114
+ async text() {
115
+ return new TextDecoder().decode(await this.arrayBuffer());
116
+ }
117
+ /**
118
+ * Loads the file data into memory and then returns it as an ArrayBuffer.
119
+ *
120
+ * @returns File data as an ArrayBuffer.
121
+ */
122
+ async arrayBuffer() {
123
+ return await c(this.stream());
124
+ }
125
+ }
126
+ ReadableStream.prototype[Symbol.asyncIterator] || (ReadableStream.prototype[Symbol.asyncIterator] = async function* () {
127
+ const e = this.getReader();
128
+ try {
129
+ for (; ; ) {
130
+ const { done: n, value: t } = await e.read();
131
+ if (n)
132
+ return;
133
+ yield t;
134
+ }
135
+ } finally {
136
+ e.releaseLock();
137
+ }
138
+ }, ReadableStream.prototype.iterate = // @ts-ignore
139
+ ReadableStream.prototype[Symbol.asyncIterator]);
140
+ const F = 32, h = 67324752, w = 33639248, g = 101010256, N = 0, E = 8;
141
+ function b(e) {
142
+ return new TransformStream({
143
+ transform(n, t) {
144
+ e(n) && t.enqueue(n);
145
+ }
146
+ });
147
+ }
148
+ function q(e) {
149
+ let n = !1;
150
+ return new TransformStream({
151
+ async transform(t, r) {
152
+ n || (n = !0, r.enqueue(e)), r.enqueue(t);
153
+ }
154
+ });
155
+ }
156
+ function x(e) {
157
+ return new TransformStream({
158
+ async transform(n, t) {
159
+ t.enqueue(n);
160
+ },
161
+ async flush(n) {
162
+ n.enqueue(e);
163
+ }
164
+ });
165
+ }
166
+ function p(e, n) {
167
+ return M(e, n).pipeThrough(
168
+ new TransformStream({
169
+ async transform(t, r) {
170
+ const a = new File(
171
+ [t.bytes],
172
+ new TextDecoder().decode(t.path),
173
+ {
174
+ type: t.isDirectory ? "directory" : void 0
175
+ }
176
+ );
177
+ r.enqueue(a);
178
+ }
179
+ })
180
+ );
181
+ }
182
+ const k = () => !0;
183
+ function M(e, n = k) {
184
+ return new ReadableStream({
185
+ async pull(r) {
186
+ const a = await v(e);
187
+ if (!a) {
188
+ r.close();
189
+ return;
190
+ }
191
+ r.enqueue(a);
192
+ }
193
+ }).pipeThrough(
194
+ b(({ signature: r }) => r === h)
195
+ ).pipeThrough(
196
+ b(n)
197
+ );
198
+ }
199
+ async function v(e) {
200
+ const t = new DataView((await c(e, 4)).buffer).getUint32(0, !0);
201
+ return t === h ? await L(e, !0) : t === w ? await T(e, !0) : t === g ? await O(e, !0) : null;
202
+ }
203
+ async function L(e, n = !1) {
204
+ if (!n && new DataView((await c(e, 4)).buffer).getUint32(0, !0) !== h)
205
+ return null;
206
+ const t = new DataView((await c(e, 26)).buffer), r = t.getUint16(22, !0), a = t.getUint16(24, !0), i = {
207
+ signature: h,
208
+ version: t.getUint32(0, !0),
209
+ generalPurpose: t.getUint16(2, !0),
210
+ compressionMethod: t.getUint16(4, !0),
211
+ lastModifiedTime: t.getUint16(6, !0),
212
+ lastModifiedDate: t.getUint16(8, !0),
213
+ crc: t.getUint32(10, !0),
214
+ compressedSize: t.getUint32(14, !0),
215
+ uncompressedSize: t.getUint32(18, !0)
216
+ };
217
+ i.path = await c(e, r), i.isDirectory = R(i.path), i.extra = await c(e, a);
218
+ let s = A(e, i.compressedSize);
219
+ if (i.compressionMethod === E) {
220
+ const o = new Uint8Array(10);
221
+ o.set([31, 139, 8]);
222
+ const f = new Uint8Array(8), u = new DataView(f.buffer);
223
+ u.setUint32(0, i.crc, !0), u.setUint32(4, i.uncompressedSize % 2 ** 32, !0), s = s.pipeThrough(q(o)).pipeThrough(x(f)).pipeThrough(new DecompressionStream("gzip"));
224
+ }
225
+ return i.bytes = await s.pipeThrough(S(i.uncompressedSize)).getReader().read().then(({ value: o }) => o), i;
226
+ }
227
+ async function T(e, n = !1) {
228
+ if (!n && new DataView((await c(e, 4)).buffer).getUint32(0, !0) !== w)
229
+ return null;
230
+ const t = new DataView((await c(e, 42)).buffer), r = t.getUint16(24, !0), a = t.getUint16(26, !0), i = t.getUint16(28, !0), s = {
231
+ signature: w,
232
+ versionCreated: t.getUint16(0, !0),
233
+ versionNeeded: t.getUint16(2, !0),
234
+ generalPurpose: t.getUint16(4, !0),
235
+ compressionMethod: t.getUint16(6, !0),
236
+ lastModifiedTime: t.getUint16(8, !0),
237
+ lastModifiedDate: t.getUint16(10, !0),
238
+ crc: t.getUint32(12, !0),
239
+ compressedSize: t.getUint32(16, !0),
240
+ uncompressedSize: t.getUint32(20, !0),
241
+ diskNumber: t.getUint16(30, !0),
242
+ internalAttributes: t.getUint16(32, !0),
243
+ externalAttributes: t.getUint32(34, !0),
244
+ firstByteAt: t.getUint32(38, !0)
245
+ };
246
+ return s.lastByteAt = s.firstByteAt + F + r + i + a + s.compressedSize - 1, s.path = await c(e, r), s.isDirectory = R(s.path), s.extra = await c(e, a), s.fileComment = await c(
247
+ e,
248
+ i
249
+ ), s;
250
+ }
251
+ function R(e) {
252
+ return e[e.byteLength - 1] == "/".charCodeAt(0);
253
+ }
254
+ async function O(e, n = !1) {
255
+ if (!n && new DataView((await c(e, 4)).buffer).getUint32(0, !0) !== g)
256
+ return null;
257
+ const t = new DataView((await c(e, 18)).buffer), r = {
258
+ signature: g,
259
+ numberOfDisks: t.getUint16(0, !0),
260
+ centralDirectoryStartDisk: t.getUint16(2, !0),
261
+ numberCentralDirectoryRecordsOnThisDisk: t.getUint16(4, !0),
262
+ numberCentralDirectoryRecords: t.getUint16(6, !0),
263
+ centralDirectorySize: t.getUint32(8, !0),
264
+ centralDirectoryOffset: t.getUint32(12, !0)
265
+ }, a = t.getUint16(16, !0);
266
+ return r.comment = await c(e, a), r;
267
+ }
268
+ const P = 110 * 1024, I = 10 * 1024, z = 1024 * 1024 * 1, V = new B({ concurrency: 10 }), D = () => !0;
269
+ async function ie(e, n = D) {
270
+ if (n === D) {
271
+ const d = await fetch(e);
272
+ return p(d.body);
273
+ }
274
+ const t = await C(e);
275
+ if (t <= z) {
276
+ const d = await fetch(e);
277
+ return p(d.body);
278
+ }
279
+ const r = await fetch(e, {
280
+ headers: {
281
+ // 0-0 looks weird, doesn't it?
282
+ // The Range header is inclusive so it's actually
283
+ // a valid header asking for the first byte.
284
+ Range: "bytes=0-0",
285
+ "Accept-Encoding": "none"
286
+ }
287
+ }), [a, i] = r.body.tee(), s = a.getReader(), { value: o } = await s.read(), { done: f } = await s.read();
288
+ if (s.releaseLock(), a.cancel(), !((o == null ? void 0 : o.length) === 1 && f))
289
+ return p(i);
290
+ i.cancel();
291
+ const l = await $(e, t);
292
+ return H(l).pipeThrough(b(n)).pipeThrough(G()).pipeThrough(
293
+ W(l)
294
+ );
295
+ }
296
+ function H(e) {
297
+ let n;
298
+ return new ReadableStream({
299
+ async start() {
300
+ n = await Z(e);
301
+ },
302
+ async pull(t) {
303
+ const r = await T(
304
+ n
305
+ );
306
+ if (!r) {
307
+ t.close();
308
+ return;
309
+ }
310
+ t.enqueue(r);
311
+ }
312
+ });
313
+ }
314
+ async function Z(e) {
315
+ const n = P;
316
+ let t = new Uint8Array(), r = e.length;
317
+ do {
318
+ r = Math.max(0, r - n);
319
+ const a = Math.min(
320
+ r + n - 1,
321
+ e.length - 1
322
+ ), i = await c(
323
+ await e.streamBytes(r, a)
324
+ );
325
+ t = U(i, t);
326
+ const s = new DataView(i.buffer);
327
+ for (let o = s.byteLength - 4; o >= 0; o--) {
328
+ if (s.getUint32(o, !0) !== g)
329
+ continue;
330
+ const u = o + 12 + 4;
331
+ if (t.byteLength < u + 4)
332
+ throw new Error("Central directory not found");
333
+ const l = s.getUint32(u, !0);
334
+ if (l < r) {
335
+ const d = await c(
336
+ await e.streamBytes(l, r - 1)
337
+ );
338
+ t = U(
339
+ d,
340
+ t
341
+ );
342
+ } else
343
+ l > r && (t = t.slice(
344
+ l - r
345
+ ));
346
+ return new Blob([t]).stream();
347
+ }
348
+ } while (r >= 0);
349
+ throw new Error("Central directory not found");
350
+ }
351
+ function G() {
352
+ let e = 0, n = [];
353
+ return new TransformStream({
354
+ transform(t, r) {
355
+ t.firstByteAt > e + I && (r.enqueue(n), n = []), e = t.lastByteAt, n.push(t);
356
+ },
357
+ flush(t) {
358
+ t.enqueue(n);
359
+ }
360
+ });
361
+ }
362
+ function W(e) {
363
+ let n = !1, t = 0, r;
364
+ const a = [], i = new WritableStream({
365
+ write(o, f) {
366
+ o.length && (++t, Y(e, o).then((u) => {
367
+ a.push([o, u]);
368
+ }).catch((u) => {
369
+ f.error(u);
370
+ }).finally(() => {
371
+ --t;
372
+ }));
373
+ },
374
+ abort() {
375
+ n = !0, r.close();
376
+ },
377
+ async close() {
378
+ n = !0;
379
+ }
380
+ });
381
+ return {
382
+ readable: new ReadableStream({
383
+ start(o) {
384
+ r = o;
385
+ },
386
+ async pull(o) {
387
+ for (; ; ) {
388
+ if (n && !a.length && t === 0) {
389
+ o.close();
390
+ return;
391
+ }
392
+ if (!a.length) {
393
+ await new Promise((m) => setTimeout(m, 50));
394
+ continue;
395
+ }
396
+ const [l, d] = a[0], y = await L(d);
397
+ if (!y) {
398
+ a.shift();
399
+ continue;
400
+ }
401
+ if (l.find(
402
+ (m) => m.path === y.path
403
+ )) {
404
+ o.enqueue(y);
405
+ break;
406
+ }
407
+ }
408
+ }
409
+ }),
410
+ writable: i
411
+ };
412
+ }
413
+ async function Y(e, n) {
414
+ const t = await V.acquire();
415
+ try {
416
+ const r = n[n.length - 1];
417
+ return await e.streamBytes(
418
+ n[0].firstByteAt,
419
+ r.lastByteAt
420
+ );
421
+ } finally {
422
+ t();
423
+ }
424
+ }
425
+ async function C(e) {
426
+ return await fetch(e, { method: "HEAD" }).then((n) => n.headers.get("Content-Length")).then((n) => {
427
+ if (!n)
428
+ throw new Error("Content-Length header is missing");
429
+ const t = parseInt(n, 10);
430
+ if (isNaN(t) || t < 0)
431
+ throw new Error("Content-Length header is invalid");
432
+ return t;
433
+ });
434
+ }
435
+ async function $(e, n) {
436
+ return n === void 0 && (n = await C(e)), {
437
+ length: n,
438
+ streamBytes: async (t, r) => await fetch(e, {
439
+ headers: {
440
+ // The Range header is inclusive, so we need to subtract 1
441
+ Range: `bytes=${t}-${r - 1}`,
442
+ "Accept-Encoding": "none"
443
+ }
444
+ }).then((a) => a.body)
445
+ };
446
+ }
447
+ function se(e) {
448
+ return _(e).pipeThrough(K());
449
+ }
450
+ function K() {
451
+ const e = /* @__PURE__ */ new Map();
452
+ let n = 0;
453
+ return new TransformStream({
454
+ async transform(t, r) {
455
+ const a = new Uint8Array(await t.arrayBuffer());
456
+ let i = await c(
457
+ new Blob([a]).stream().pipeThrough(new CompressionStream("gzip"))
458
+ );
459
+ const s = new DataView(i.buffer).getUint32(
460
+ i.byteLength - 8,
461
+ !0
462
+ );
463
+ i = i.slice(10, i.byteLength - 8);
464
+ const o = new TextEncoder().encode(t.name), f = {
465
+ signature: h,
466
+ version: 2,
467
+ generalPurpose: 0,
468
+ compressionMethod: t.type === "directory" || i.byteLength === 0 ? N : E,
469
+ lastModifiedTime: 0,
470
+ lastModifiedDate: 0,
471
+ crc: s,
472
+ compressedSize: i.byteLength,
473
+ uncompressedSize: a.byteLength,
474
+ path: o,
475
+ extra: new Uint8Array(0)
476
+ };
477
+ e.set(n, f);
478
+ const u = j(f);
479
+ r.enqueue(u), n += u.byteLength, r.enqueue(i), n += i.byteLength;
480
+ },
481
+ flush(t) {
482
+ const r = n;
483
+ let a = 0;
484
+ for (const [
485
+ o,
486
+ f
487
+ ] of e.entries()) {
488
+ const u = {
489
+ ...f,
490
+ signature: w,
491
+ fileComment: new Uint8Array(0),
492
+ diskNumber: 1,
493
+ internalAttributes: 0,
494
+ externalAttributes: 0,
495
+ firstByteAt: o
496
+ }, l = J(
497
+ u,
498
+ o
499
+ );
500
+ t.enqueue(l), a += l.byteLength;
501
+ }
502
+ const i = {
503
+ signature: g,
504
+ numberOfDisks: 1,
505
+ centralDirectoryOffset: r,
506
+ centralDirectorySize: a,
507
+ centralDirectoryStartDisk: 1,
508
+ numberCentralDirectoryRecordsOnThisDisk: e.size,
509
+ numberCentralDirectoryRecords: e.size,
510
+ comment: new Uint8Array(0)
511
+ }, s = Q(i);
512
+ t.enqueue(s), e.clear();
513
+ }
514
+ });
515
+ }
516
+ function j(e) {
517
+ const n = new ArrayBuffer(
518
+ 30 + e.path.byteLength + e.extra.byteLength
519
+ ), t = new DataView(n);
520
+ t.setUint32(0, e.signature, !0), t.setUint16(4, e.version, !0), t.setUint16(6, e.generalPurpose, !0), t.setUint16(8, e.compressionMethod, !0), t.setUint16(10, e.lastModifiedDate, !0), t.setUint16(12, e.lastModifiedTime, !0), t.setUint32(14, e.crc, !0), t.setUint32(18, e.compressedSize, !0), t.setUint32(22, e.uncompressedSize, !0), t.setUint16(26, e.path.byteLength, !0), t.setUint16(28, e.extra.byteLength, !0);
521
+ const r = new Uint8Array(n);
522
+ return r.set(e.path, 30), r.set(e.extra, 30 + e.path.byteLength), r;
523
+ }
524
+ function J(e, n) {
525
+ const t = new ArrayBuffer(
526
+ 46 + e.path.byteLength + e.extra.byteLength
527
+ ), r = new DataView(t);
528
+ r.setUint32(0, e.signature, !0), r.setUint16(4, e.versionCreated, !0), r.setUint16(6, e.versionNeeded, !0), r.setUint16(8, e.generalPurpose, !0), r.setUint16(10, e.compressionMethod, !0), r.setUint16(12, e.lastModifiedDate, !0), r.setUint16(14, e.lastModifiedTime, !0), r.setUint32(16, e.crc, !0), r.setUint32(20, e.compressedSize, !0), r.setUint32(24, e.uncompressedSize, !0), r.setUint16(28, e.path.byteLength, !0), r.setUint16(30, e.extra.byteLength, !0), r.setUint16(32, e.fileComment.byteLength, !0), r.setUint16(34, e.diskNumber, !0), r.setUint16(36, e.internalAttributes, !0), r.setUint32(38, e.externalAttributes, !0), r.setUint32(42, n, !0);
529
+ const a = new Uint8Array(t);
530
+ return a.set(e.path, 46), a.set(e.extra, 46 + e.path.byteLength), a;
531
+ }
532
+ function Q(e) {
533
+ const n = new ArrayBuffer(22 + e.comment.byteLength), t = new DataView(n);
534
+ t.setUint32(0, e.signature, !0), t.setUint16(4, e.numberOfDisks, !0), t.setUint16(6, e.centralDirectoryStartDisk, !0), t.setUint16(8, e.numberCentralDirectoryRecordsOnThisDisk, !0), t.setUint16(10, e.numberCentralDirectoryRecords, !0), t.setUint32(12, e.centralDirectorySize, !0), t.setUint32(16, e.centralDirectoryOffset, !0), t.setUint16(20, e.comment.byteLength, !0);
535
+ const r = new Uint8Array(n);
536
+ return r.set(e.comment, 22), r;
537
+ }
538
+ export {
539
+ ae as StreamedFile,
540
+ c as collectBytes,
541
+ re as collectFile,
542
+ ie as decodeRemoteZip,
543
+ p as decodeZip,
544
+ se as encodeZip,
545
+ _ as iteratorToStream
546
+ };
package/package.json CHANGED
@@ -1,31 +1,38 @@
1
1
  {
2
- "name": "@php-wasm/stream-compression",
3
- "version": "0.0.1",
4
- "description": "Stream-based compression bindings.",
5
- "repository": {
6
- "type": "git",
7
- "url": "https://github.com/WordPress/wordpress-playground"
8
- },
9
- "homepage": "https://developer.wordpress.org/playground",
10
- "author": "The WordPress contributors",
11
- "contributors": [
12
- {
13
- "name": "Adam Zielinski",
14
- "email": "adam@adamziel.com",
15
- "url": "https://github.com/adamziel"
16
- }
17
- ],
18
- "exports": {
19
- ".": {
20
- "import": "./index.js",
21
- "require": "./index.cjs"
22
- },
23
- "./package.json": "./package.json"
24
- },
25
- "publishConfig": {
26
- "access": "public",
27
- "directory": "../../../dist/packages/php-wasm/stream-compression"
28
- },
29
- "license": "GPL-2.0-or-later",
30
- "type": "module"
2
+ "name": "@php-wasm/stream-compression",
3
+ "version": "0.9.16",
4
+ "description": "Stream-based compression bindings.",
5
+ "repository": {
6
+ "type": "git",
7
+ "url": "https://github.com/WordPress/wordpress-playground"
8
+ },
9
+ "homepage": "https://developer.wordpress.org/playground",
10
+ "author": "The WordPress contributors",
11
+ "contributors": [
12
+ {
13
+ "name": "Adam Zielinski",
14
+ "email": "adam@adamziel.com",
15
+ "url": "https://github.com/adamziel"
16
+ }
17
+ ],
18
+ "exports": {
19
+ ".": {
20
+ "import": "./index.js",
21
+ "require": "./index.cjs"
22
+ },
23
+ "./package.json": "./package.json"
24
+ },
25
+ "types": "index.d.ts",
26
+ "publishConfig": {
27
+ "access": "public",
28
+ "directory": "../../../dist/packages/php-wasm/stream-compression"
29
+ },
30
+ "license": "GPL-2.0-or-later",
31
+ "type": "module",
32
+ "gitHead": "47110de9a9efc876f7b432c88d904ef081d1365c",
33
+ "dependencies": {
34
+ "@php-wasm/node-polyfills": "0.9.16",
35
+ "@php-wasm/util": "0.9.16"
36
+ },
37
+ "main": "index.js"
31
38
  }
@@ -4,4 +4,3 @@
4
4
  *
5
5
  * @see tests.setupFiles in vite.config.ts
6
6
  */
7
- import '@php-wasm/node-polyfills';
@@ -0,0 +1,7 @@
1
+ /**
2
+ * Appends bytes to a stream.
3
+ *
4
+ * @param bytes The bytes to append.
5
+ * @returns A transform stream that will append the specified bytes.
6
+ */
7
+ export declare function appendBytes(bytes: Uint8Array): TransformStream<Uint8Array, Uint8Array>;
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Collects the contents of the entire stream into a single Uint8Array.
3
+ *
4
+ * @param stream The stream to collect.
5
+ * @param bytes Optional. The number of bytes to read from the stream.
6
+ * @returns The string contents of the stream.
7
+ */
8
+ export declare function collectBytes(stream: ReadableStream<Uint8Array>, bytes?: number): Promise<Uint8Array>;
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Collects the contents of the entire stream into a single File object.
3
+ *
4
+ * @param stream The stream to collect.
5
+ * @param fileName The name of the file
6
+ * @returns The string contents of the stream.
7
+ */
8
+ export declare function collectFile(fileName: string, stream: ReadableStream<Uint8Array>): Promise<File>;
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Collects the contents of the entire stream into a single string.
3
+ *
4
+ * @param stream The stream to collect.
5
+ * @param bytes Optional. The number of bytes to read from the stream.
6
+ * @returns The string contents of the stream.
7
+ */
8
+ export declare function collectString(stream: ReadableStream<Uint8Array>, bytes?: number): Promise<string | undefined>;
@@ -0,0 +1,9 @@
1
+ /**
2
+ * Concatenates the contents of the stream into a single Uint8Array.
3
+ *
4
+ * @param totalBytes Optional. The number of bytes to concatenate. Used to
5
+ * pre-allocate the buffer. If not provided, the buffer will
6
+ * be dynamically resized as needed.
7
+ * @returns A stream that will emit a single UInt8Array entry before closing.
8
+ */
9
+ export declare function concatBytes(totalBytes?: number): TransformStream<Uint8Array, Uint8Array>;
@@ -0,0 +1,6 @@
1
+ /**
2
+ * Concatenate all chunks into a single string.
3
+ *
4
+ * @returns A stream that will emit a single string entry before closing.
5
+ */
6
+ export declare function concatString(): TransformStream<string, string>;
@@ -0,0 +1,7 @@
1
+ /**
2
+ * Concatenates multiple Uint8Arrays into a single Uint8Array.
3
+ *
4
+ * @param arrays The arrays to concatenate.
5
+ * @returns A new Uint8Array containing the contents of all the arrays.
6
+ */
7
+ export declare function concatUint8Array(...arrays: Uint8Array[]): Uint8Array;
@@ -0,0 +1,7 @@
1
+ /**
2
+ * Filter the stream based on a predicate.
3
+ *
4
+ * @param predicate The predicate to filter the stream with.
5
+ * @returns A new stream that will only contain chunks that pass the predicate.
6
+ */
7
+ export declare function filterStream<T>(predicate: (chunk: T) => boolean): TransformStream<T, T>;
@@ -0,0 +1 @@
1
+ export type IterableReadableStream<R> = ReadableStream<R> & AsyncIterable<R>;