@php-wasm/stream-compression 0.0.1 → 0.9.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/LICENSE +339 -0
  2. package/index.cjs +1 -0
  3. package/index.d.ts +143 -0
  4. package/index.js +546 -0
  5. package/package.json +36 -29
  6. package/{src/test/vitest-setup-file.ts → test/vitest-setup-file.d.ts} +0 -1
  7. package/utils/append-bytes.d.ts +7 -0
  8. package/utils/collect-bytes.d.ts +8 -0
  9. package/utils/collect-file.d.ts +8 -0
  10. package/utils/collect-string.d.ts +8 -0
  11. package/utils/concat-bytes.d.ts +9 -0
  12. package/utils/concat-string.d.ts +6 -0
  13. package/utils/concat-uint8-array.d.ts +7 -0
  14. package/utils/filter-stream.d.ts +7 -0
  15. package/utils/iterable-stream-polyfill.d.ts +1 -0
  16. package/utils/iterator-to-stream.d.ts +8 -0
  17. package/utils/limit-bytes.d.ts +8 -0
  18. package/utils/prepend-bytes.d.ts +7 -0
  19. package/utils/skip-first-bytes.d.ts +7 -0
  20. package/utils/skip-last-bytes.d.ts +7 -0
  21. package/utils/streamed-file.d.ts +39 -0
  22. package/zip/decode-remote-zip.d.ts +14 -0
  23. package/zip/decode-zip.d.ts +82 -0
  24. package/zip/encode-zip.d.ts +7 -0
  25. package/{src/zip/index.ts → zip/index.d.ts} +0 -2
  26. package/zip/types.d.ts +66 -0
  27. package/.eslintrc.json +0 -18
  28. package/project.json +0 -34
  29. package/src/index.ts +0 -7
  30. package/src/test/append-bytes.spec.ts +0 -25
  31. package/src/test/decode-zip.spec.ts +0 -22
  32. package/src/test/encode-zip.spec.ts +0 -47
  33. package/src/test/fixtures/hello-dolly.zip +0 -0
  34. package/src/test/prepend-bytes.spec.ts +0 -25
  35. package/src/test/skip-first-bytes.spec.ts +0 -41
  36. package/src/test/skip-last-bytes.spec.ts +0 -27
  37. package/src/utils/append-bytes.ts +0 -16
  38. package/src/utils/collect-bytes.ts +0 -24
  39. package/src/utils/collect-file.ts +0 -16
  40. package/src/utils/collect-string.ts +0 -25
  41. package/src/utils/concat-bytes.ts +0 -38
  42. package/src/utils/concat-string.ts +0 -17
  43. package/src/utils/concat-uint8-array.ts +0 -17
  44. package/src/utils/filter-stream.ts +0 -15
  45. package/src/utils/iterable-stream-polyfill.ts +0 -35
  46. package/src/utils/iterator-to-stream.ts +0 -39
  47. package/src/utils/limit-bytes.ts +0 -40
  48. package/src/utils/prepend-bytes.ts +0 -18
  49. package/src/utils/skip-first-bytes.ts +0 -21
  50. package/src/utils/skip-last-bytes.ts +0 -24
  51. package/src/utils/streamed-file.ts +0 -58
  52. package/src/zip/decode-remote-zip.ts +0 -409
  53. package/src/zip/decode-zip.ts +0 -349
  54. package/src/zip/encode-zip.ts +0 -278
  55. package/src/zip/types.ts +0 -76
  56. package/tsconfig.json +0 -23
  57. package/tsconfig.lib.json +0 -14
  58. package/tsconfig.spec.json +0 -25
  59. package/vite.config.ts +0 -55
@@ -1,17 +0,0 @@
1
- /**
2
- * Concatenates multiple Uint8Arrays into a single Uint8Array.
3
- *
4
- * @param arrays The arrays to concatenate.
5
- * @returns A new Uint8Array containing the contents of all the arrays.
6
- */
7
- export function concatUint8Array(...arrays: Uint8Array[]) {
8
- const result = new Uint8Array(
9
- arrays.reduce((sum, array) => sum + array.length, 0)
10
- );
11
- let offset = 0;
12
- for (const array of arrays) {
13
- result.set(array, offset);
14
- offset += array.length;
15
- }
16
- return result;
17
- }
@@ -1,15 +0,0 @@
1
- /**
2
- * Filter the stream based on a predicate.
3
- *
4
- * @param predicate The predicate to filter the stream with.
5
- * @returns A new stream that will only contain chunks that pass the predicate.
6
- */
7
- export function filterStream<T>(predicate: (chunk: T) => boolean) {
8
- return new TransformStream<T, T>({
9
- transform(chunk, controller) {
10
- if (predicate(chunk)) {
11
- controller.enqueue(chunk);
12
- }
13
- },
14
- });
15
- }
@@ -1,35 +0,0 @@
1
- /**
2
- * Polyfill for ReadableStream[Symbol.asyncIterator]
3
- * This enables the use of for-await-of loops with ReadableStreams
4
- *
5
- * @example
6
- * ```ts
7
- * for await (const entry of stream) {
8
- * // ...
9
- * }
10
- * ```
11
- */
12
- // @ts-ignore
13
- if (!ReadableStream.prototype[Symbol.asyncIterator]) {
14
- // @ts-ignore
15
- ReadableStream.prototype[Symbol.asyncIterator] = async function* () {
16
- const reader = this.getReader();
17
- try {
18
- while (true) {
19
- const { done, value } = await reader.read();
20
- if (done) {
21
- return;
22
- }
23
- yield value;
24
- }
25
- } finally {
26
- reader.releaseLock();
27
- }
28
- };
29
- // @ts-ignore
30
- ReadableStream.prototype.iterate =
31
- // @ts-ignore
32
- ReadableStream.prototype[Symbol.asyncIterator];
33
- }
34
-
35
- export type IterableReadableStream<R> = ReadableStream<R> & AsyncIterable<R>;
@@ -1,39 +0,0 @@
1
- import { IterableReadableStream } from './iterable-stream-polyfill';
2
-
3
- /**
4
- * Converts an iterator or iterable to a stream.
5
- *
6
- * @param iteratorOrIterable The iterator or iterable to convert.
7
- * @returns A stream that will yield the values from the iterator or iterable.
8
- */
9
- export function iteratorToStream<T>(
10
- iteratorOrIterable:
11
- | AsyncIterator<T>
12
- | Iterator<T>
13
- | AsyncIterable<T>
14
- | Iterable<T>
15
- ) {
16
- if (iteratorOrIterable instanceof ReadableStream) {
17
- return iteratorOrIterable as IterableReadableStream<T>;
18
- }
19
-
20
- let iterator: AsyncIterator<T> | Iterator<T>;
21
- if (Symbol.asyncIterator in iteratorOrIterable) {
22
- iterator = iteratorOrIterable[Symbol.asyncIterator]();
23
- } else if (Symbol.iterator in iteratorOrIterable) {
24
- iterator = iteratorOrIterable[Symbol.iterator]();
25
- } else {
26
- iterator = iteratorOrIterable;
27
- }
28
-
29
- return new ReadableStream<T>({
30
- async pull(controller) {
31
- const { done, value } = await iterator.next();
32
- if (done) {
33
- controller.close();
34
- return;
35
- }
36
- controller.enqueue(value);
37
- },
38
- }) as IterableReadableStream<T>;
39
- }
@@ -1,40 +0,0 @@
1
- /**
2
- * Limit the number of bytes read from a stream.
3
- *
4
- * @param stream The stream to limit.
5
- * @param bytes The number of bytes to read from the stream.
6
- * @returns A new stream that will read at most `bytes` bytes from `stream`.
7
- */
8
- export function limitBytes(stream: ReadableStream<Uint8Array>, bytes: number) {
9
- if (bytes === 0) {
10
- return new ReadableStream({
11
- start(controller) {
12
- controller.close();
13
- },
14
- });
15
- }
16
- const reader = stream.getReader({ mode: 'byob' });
17
- let offset = 0;
18
- return new ReadableStream({
19
- async pull(controller) {
20
- const { value, done } = await reader.read(
21
- new Uint8Array(bytes - offset)
22
- );
23
- if (done) {
24
- reader.releaseLock();
25
- controller.close();
26
- return;
27
- }
28
- offset += value.length;
29
- controller.enqueue(value);
30
-
31
- if (offset >= bytes) {
32
- reader.releaseLock();
33
- controller.close();
34
- }
35
- },
36
- cancel() {
37
- reader.cancel();
38
- },
39
- });
40
- }
@@ -1,18 +0,0 @@
1
- /**
2
- * Prepend bytes to a stream.
3
- *
4
- * @param bytes The bytes to prepend.
5
- * @returns A transform stream that will prepend the specified bytes.
6
- */
7
- export function prependBytes(bytes: Uint8Array) {
8
- let isPrepended = false;
9
- return new TransformStream<Uint8Array, Uint8Array>({
10
- async transform(chunk, controller) {
11
- if (!isPrepended) {
12
- isPrepended = true;
13
- controller.enqueue(bytes);
14
- }
15
- controller.enqueue(chunk);
16
- },
17
- });
18
- }
@@ -1,21 +0,0 @@
1
- /**
2
- * Skips the first `length` bytes of a stream.
3
- *
4
- * @param length The number of bytes to skip.
5
- * @returns A transform stream that will skip the specified number of bytes.
6
- */
7
- export function skipFirstBytes(length: number) {
8
- let totalBytesSkipped = 0;
9
- return new TransformStream<Uint8Array, Uint8Array>({
10
- async transform(chunk, controller) {
11
- if (totalBytesSkipped + chunk.byteLength < length) {
12
- totalBytesSkipped += chunk.byteLength;
13
- return;
14
- }
15
-
16
- const bytesToSkip = length - totalBytesSkipped;
17
- totalBytesSkipped = length;
18
- controller.enqueue(chunk.slice(bytesToSkip));
19
- },
20
- });
21
- }
@@ -1,24 +0,0 @@
1
- /**
2
- * Skips the first `length` bytes of a stream.
3
- *
4
- * @param length The number of bytes to skip.
5
- * @returns A transform stream that will skip the specified number of bytes.
6
- */
7
- export function skipLastBytes(skip: number, streamLength: number) {
8
- let currentOffset = 0;
9
- const lastOffset = streamLength - skip;
10
- return new TransformStream({
11
- async transform(chunk, controller) {
12
- if (currentOffset + chunk.byteLength >= lastOffset) {
13
- const lastChunkOffset = lastOffset - currentOffset;
14
- if (lastChunkOffset === 0) {
15
- return;
16
- }
17
- chunk = chunk.slice(0, lastChunkOffset);
18
- }
19
-
20
- currentOffset += chunk.byteLength;
21
- controller.enqueue(chunk);
22
- },
23
- });
24
- }
@@ -1,58 +0,0 @@
1
- import { collectBytes } from './collect-bytes';
2
-
3
- /**
4
- * Represents a file that is streamed and not fully
5
- * loaded into memory.
6
- */
7
- export class StreamedFile extends File {
8
- /**
9
- * Creates a new StreamedFile instance.
10
- *
11
- * @param readableStream The readable stream containing the file data.
12
- * @param name The name of the file.
13
- * @param type The MIME type of the file.
14
- */
15
- constructor(
16
- private readableStream: ReadableStream<Uint8Array>,
17
- name: string,
18
- type?: string
19
- ) {
20
- super([], name, { type });
21
- }
22
-
23
- /**
24
- * Overrides the slice() method of the File class.
25
- *
26
- * @returns A Blob representing a portion of the file.
27
- */
28
- override slice(): Blob {
29
- throw new Error('slice() is not possible on a StreamedFile');
30
- }
31
-
32
- /**
33
- * Returns the readable stream associated with the file.
34
- *
35
- * @returns The readable stream.
36
- */
37
- override stream() {
38
- return this.readableStream;
39
- }
40
-
41
- /**
42
- * Loads the file data into memory and then returns it as a string.
43
- *
44
- * @returns File data as text.
45
- */
46
- override async text() {
47
- return new TextDecoder().decode(await this.arrayBuffer());
48
- }
49
-
50
- /**
51
- * Loads the file data into memory and then returns it as an ArrayBuffer.
52
- *
53
- * @returns File data as an ArrayBuffer.
54
- */
55
- override async arrayBuffer() {
56
- return await collectBytes(this.stream());
57
- }
58
- }
@@ -1,409 +0,0 @@
1
- import { Semaphore } from '@php-wasm/util';
2
- import { filterStream } from '../utils/filter-stream';
3
- import { concatUint8Array } from '../utils/concat-uint8-array';
4
- import { collectBytes } from '../utils/collect-bytes';
5
- import {
6
- readCentralDirectoryEntry,
7
- readFileEntry,
8
- decodeZip,
9
- } from './decode-zip';
10
- import { CentralDirectoryEntry, FileEntry } from './types';
11
- import { SIGNATURE_CENTRAL_DIRECTORY_END } from './types';
12
- import { IterableReadableStream } from '../utils/iterable-stream-polyfill';
13
-
14
- const CENTRAL_DIRECTORY_END_SCAN_CHUNK_SIZE = 110 * 1024;
15
- const BATCH_DOWNLOAD_OF_FILES_IF_CLOSER_THAN = 10 * 1024;
16
- const PREFER_RANGES_IF_FILE_LARGER_THAN = 1024 * 1024 * 1;
17
- const fetchSemaphore = new Semaphore({ concurrency: 10 });
18
-
19
- const DEFAULT_PREDICATE = () => true;
20
-
21
- /**
22
- * Streams the contents of a remote zip file.
23
- *
24
- * If the zip is large and the predicate is filtering the zip contents,
25
- * only the matching files will be downloaded using the Range header
26
- * (if supported by the server).
27
- *
28
- * @param url The URL of the zip file.
29
- * @param predicate Optional. A function that returns true if the file should be downloaded.
30
- * @returns A stream of zip entries.
31
- */
32
- export async function decodeRemoteZip(
33
- url: string,
34
- predicate: (
35
- dirEntry: CentralDirectoryEntry | FileEntry
36
- ) => boolean = DEFAULT_PREDICATE
37
- ) {
38
- if (predicate === DEFAULT_PREDICATE) {
39
- // If we're not filtering the zip contents, let's just
40
- // grab the entire zip.
41
- const response = await fetch(url);
42
- return decodeZip(response.body!);
43
- }
44
-
45
- const contentLength = await fetchContentLength(url);
46
- if (contentLength <= PREFER_RANGES_IF_FILE_LARGER_THAN) {
47
- // If the zip is small enough, let's just grab it.
48
- const response = await fetch(url);
49
- return decodeZip(response.body!);
50
- }
51
-
52
- // Ensure ranges query support:
53
- // Fetch one byte
54
- const response = await fetch(url, {
55
- headers: {
56
- // 0-0 looks weird, doesn't it?
57
- // The Range header is inclusive so it's actually
58
- // a valid header asking for the first byte.
59
- Range: 'bytes=0-0',
60
- 'Accept-Encoding': 'none',
61
- },
62
- });
63
-
64
- // Fork the stream so that we can reuse it in case
65
- // the Range header is unsupported and we're now streaming
66
- // the entire file
67
- const [peekStream, responseStream] = response.body!.tee();
68
-
69
- // Read from the forked stream and close it.
70
- const peekReader = peekStream.getReader();
71
- const { value: peekBytes } = await peekReader.read();
72
- const { done: peekDone } = await peekReader.read();
73
- peekReader.releaseLock();
74
- peekStream.cancel();
75
-
76
- // Confirm our Range query worked as intended:
77
- const rangesSupported = peekBytes?.length === 1 && peekDone;
78
- if (!rangesSupported) {
79
- // Uh-oh, we're actually streaming the entire file.
80
- // Let's reuse the forked stream as our response stream.
81
- return decodeZip(responseStream);
82
- }
83
-
84
- // We're good, let's clean up the other branch of the response stream.
85
- responseStream.cancel();
86
- const source = await createFetchSource(url, contentLength);
87
- return streamCentralDirectoryEntries(source)
88
- .pipeThrough(filterStream(predicate))
89
- .pipeThrough(partitionNearbyEntries())
90
- .pipeThrough(
91
- fetchPartitionedEntries(source)
92
- ) as IterableReadableStream<FileEntry>;
93
- }
94
-
95
- /**
96
- * Streams the central directory entries of a zip file.
97
- *
98
- * @param source
99
- * @returns
100
- */
101
- function streamCentralDirectoryEntries(source: BytesSource) {
102
- let centralDirectoryStream: ReadableStream<Uint8Array>;
103
-
104
- return new ReadableStream<CentralDirectoryEntry>({
105
- async start() {
106
- centralDirectoryStream = await streamCentralDirectoryBytes(source);
107
- },
108
- async pull(controller) {
109
- const entry = await readCentralDirectoryEntry(
110
- centralDirectoryStream
111
- );
112
- if (!entry) {
113
- controller.close();
114
- return;
115
- }
116
- controller.enqueue(entry);
117
- },
118
- });
119
- }
120
-
121
- /**
122
- * Streams the central directory bytes of a zip file.
123
- *
124
- * @param source
125
- * @returns
126
- */
127
- async function streamCentralDirectoryBytes(source: BytesSource) {
128
- const chunkSize = CENTRAL_DIRECTORY_END_SCAN_CHUNK_SIZE;
129
- let centralDirectory: Uint8Array = new Uint8Array();
130
-
131
- let chunkStart = source.length;
132
- do {
133
- chunkStart = Math.max(0, chunkStart - chunkSize);
134
- const chunkEnd = Math.min(
135
- chunkStart + chunkSize - 1,
136
- source.length - 1
137
- );
138
- const bytes = await collectBytes(
139
- await source.streamBytes(chunkStart, chunkEnd)
140
- );
141
- centralDirectory = concatUint8Array(bytes!, centralDirectory);
142
-
143
- // Scan the buffer for the signature
144
- const view = new DataView(bytes!.buffer);
145
- for (let i = view.byteLength - 4; i >= 0; i--) {
146
- if (view.getUint32(i, true) !== SIGNATURE_CENTRAL_DIRECTORY_END) {
147
- continue;
148
- }
149
-
150
- // Confirm we have enough data to read the offset and the
151
- // length of the central directory.
152
- const centralDirectoryLengthAt = i + 12;
153
- const centralDirectoryOffsetAt = centralDirectoryLengthAt + 4;
154
- if (centralDirectory.byteLength < centralDirectoryOffsetAt + 4) {
155
- throw new Error('Central directory not found');
156
- }
157
-
158
- // Read where the central directory starts
159
- const dirStart = view.getUint32(centralDirectoryOffsetAt, true);
160
- if (dirStart < chunkStart) {
161
- // We're missing some bytes, let's grab them
162
- const missingBytes = await collectBytes(
163
- await source.streamBytes(dirStart, chunkStart - 1)
164
- );
165
- centralDirectory = concatUint8Array(
166
- missingBytes!,
167
- centralDirectory
168
- );
169
- } else if (dirStart > chunkStart) {
170
- // We've read too many bytes, let's trim them
171
- centralDirectory = centralDirectory.slice(
172
- dirStart - chunkStart
173
- );
174
- }
175
- return new Blob([centralDirectory]).stream();
176
- }
177
- } while (chunkStart >= 0);
178
-
179
- throw new Error('Central directory not found');
180
- }
181
-
182
- /**
183
- * Partitions files that are no further apart in the zip
184
- * archive than BATCH_DOWNLOAD_OF_FILES_IF_CLOSER_THAN.
185
- * It may download some extra files living within the gaps
186
- * between the partitions.
187
- */
188
- function partitionNearbyEntries() {
189
- let lastFileEndsAt = 0;
190
- let currentChunk: CentralDirectoryEntry[] = [];
191
- return new TransformStream<CentralDirectoryEntry, CentralDirectoryEntry[]>({
192
- transform(zipEntry, controller) {
193
- // Byte distance too large, flush and start a new chunk
194
- if (
195
- zipEntry.firstByteAt >
196
- lastFileEndsAt + BATCH_DOWNLOAD_OF_FILES_IF_CLOSER_THAN
197
- ) {
198
- controller.enqueue(currentChunk);
199
- currentChunk = [];
200
- }
201
- lastFileEndsAt = zipEntry.lastByteAt;
202
- currentChunk.push(zipEntry);
203
- },
204
- flush(controller) {
205
- controller.enqueue(currentChunk);
206
- },
207
- });
208
- }
209
-
210
- /**
211
- * Fetches a chunk of files from the zip archive.
212
- *
213
- * If any extra files are present in the received
214
- * bytes stream, they are filtered out.
215
- */
216
- function fetchPartitionedEntries(
217
- source: BytesSource
218
- ): ReadableWritablePair<FileEntry, CentralDirectoryEntry[]> {
219
- /**
220
- * This function implements a ReadableStream and a WritableStream
221
- * instead of a TransformStream. This is intentional.
222
- *
223
- * In TransformStream, the `transform` function may return a
224
- * promise. The next call to `transform` will be delayed until
225
- * the promise resolves. This is a problem for us because we
226
- * want to issue many fetch() requests in parallel.
227
- *
228
- * The only way to do that seems to be creating separate ReadableStream
229
- * and WritableStream implementations.
230
- */
231
- let isWritableClosed = false;
232
- let requestsInProgress = 0;
233
- let readableController: ReadableStreamDefaultController<FileEntry>;
234
- const byteStreams: Array<
235
- [CentralDirectoryEntry[], ReadableStream<Uint8Array>]
236
- > = [];
237
- /**
238
- * Receives chunks of CentralDirectoryEntries, and fetches
239
- * the corresponding byte ranges from the remote zip file.
240
- */
241
- const writable = new WritableStream<CentralDirectoryEntry[]>({
242
- write(zipEntries, controller) {
243
- if (!zipEntries.length) {
244
- return;
245
- }
246
- ++requestsInProgress;
247
- // If the write() method returns a promise, the next
248
- // call will be delayed until the promise resolves.
249
- // Let's not return the promise, then.
250
- // This will effectively issue many requests in parallel.
251
- requestChunkRange(source, zipEntries)
252
- .then((byteStream) => {
253
- byteStreams.push([zipEntries, byteStream]);
254
- })
255
- .catch((e) => {
256
- controller.error(e);
257
- })
258
- .finally(() => {
259
- --requestsInProgress;
260
- });
261
- },
262
- abort() {
263
- isWritableClosed = true;
264
- readableController.close();
265
- },
266
- async close() {
267
- isWritableClosed = true;
268
- },
269
- });
270
- /**
271
- * Decodes zipped bytes into FileEntry objects.
272
- */
273
- const readable = new ReadableStream<FileEntry>({
274
- start(controller) {
275
- readableController = controller;
276
- },
277
- async pull(controller) {
278
- while (true) {
279
- const allChunksProcessed =
280
- isWritableClosed &&
281
- !byteStreams.length &&
282
- requestsInProgress === 0;
283
- if (allChunksProcessed) {
284
- controller.close();
285
- return;
286
- }
287
-
288
- // There's no bytes available, but the writable
289
- // stream is still open or there are still requests
290
- // in progress. Let's wait for more bytes.
291
- const waitingForMoreBytes = !byteStreams.length;
292
- if (waitingForMoreBytes) {
293
- await new Promise((resolve) => setTimeout(resolve, 50));
294
- continue;
295
- }
296
-
297
- const [requestedPaths, stream] = byteStreams[0];
298
- const file = await readFileEntry(stream);
299
- // The stream is exhausted, let's remove it from the queue
300
- // and try the next one.
301
- const streamExhausted = !file;
302
- if (streamExhausted) {
303
- byteStreams.shift();
304
- continue;
305
- }
306
-
307
- // There may be some extra files between the ones we're
308
- // interested in. Let's filter out any files that got
309
- // intertwined in the byte stream.
310
- const isOneOfRequestedPaths = requestedPaths.find(
311
- (entry) => entry.path === file.path
312
- );
313
- if (!isOneOfRequestedPaths) {
314
- continue;
315
- }
316
-
317
- // Finally! We've got a file we're interested in.
318
- controller.enqueue(file);
319
- break;
320
- }
321
- },
322
- });
323
-
324
- return {
325
- readable,
326
- writable,
327
- };
328
- }
329
-
330
- /**
331
- * Requests a chunk of bytes from the bytes source.
332
- *
333
- * @param source
334
- * @param zipEntries
335
- */
336
- async function requestChunkRange(
337
- source: BytesSource,
338
- zipEntries: CentralDirectoryEntry[]
339
- ) {
340
- const release = await fetchSemaphore.acquire();
341
- try {
342
- const lastZipEntry = zipEntries[zipEntries.length - 1];
343
- const substream = await source.streamBytes(
344
- zipEntries[0].firstByteAt,
345
- lastZipEntry.lastByteAt
346
- );
347
- return substream;
348
- } finally {
349
- release();
350
- }
351
- }
352
-
353
- /**
354
- * Fetches the Content-Length header from a remote URL.
355
- */
356
- async function fetchContentLength(url: string) {
357
- return await fetch(url, { method: 'HEAD' })
358
- .then((response) => response.headers.get('Content-Length'))
359
- .then((contentLength) => {
360
- if (!contentLength) {
361
- throw new Error('Content-Length header is missing');
362
- }
363
-
364
- const parsedLength = parseInt(contentLength, 10);
365
- if (isNaN(parsedLength) || parsedLength < 0) {
366
- throw new Error('Content-Length header is invalid');
367
- }
368
- return parsedLength;
369
- });
370
- }
371
-
372
- /**
373
- * Private and experimental API: Range-based data sources.
374
- *
375
- * The idea is that if we can read arbitrary byte ranges from
376
- * a file, we can retrieve a specific subset of a zip file.
377
- */
378
- type BytesSource = {
379
- length: number;
380
- streamBytes: (
381
- start: number,
382
- end: number
383
- ) => Promise<ReadableStream<Uint8Array>>;
384
- };
385
-
386
- /**
387
- * Creates a BytesSource enabling fetching ranges of bytes
388
- * from a remote URL.
389
- */
390
- async function createFetchSource(
391
- url: string,
392
- contentLength?: number
393
- ): Promise<BytesSource> {
394
- if (contentLength === undefined) {
395
- contentLength = await fetchContentLength(url);
396
- }
397
-
398
- return {
399
- length: contentLength,
400
- streamBytes: async (from: number, to: number) =>
401
- await fetch(url, {
402
- headers: {
403
- // The Range header is inclusive, so we need to subtract 1
404
- Range: `bytes=${from}-${to - 1}`,
405
- 'Accept-Encoding': 'none',
406
- },
407
- }).then((response) => response.body!),
408
- };
409
- }