@php-wasm/stream-compression 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.json +18 -0
- package/README.md +11 -0
- package/package.json +31 -0
- package/project.json +34 -0
- package/src/index.ts +7 -0
- package/src/test/append-bytes.spec.ts +25 -0
- package/src/test/decode-zip.spec.ts +22 -0
- package/src/test/encode-zip.spec.ts +47 -0
- package/src/test/fixtures/hello-dolly.zip +0 -0
- package/src/test/prepend-bytes.spec.ts +25 -0
- package/src/test/skip-first-bytes.spec.ts +41 -0
- package/src/test/skip-last-bytes.spec.ts +27 -0
- package/src/test/vitest-setup-file.ts +7 -0
- package/src/utils/append-bytes.ts +16 -0
- package/src/utils/collect-bytes.ts +24 -0
- package/src/utils/collect-file.ts +16 -0
- package/src/utils/collect-string.ts +25 -0
- package/src/utils/concat-bytes.ts +38 -0
- package/src/utils/concat-string.ts +17 -0
- package/src/utils/concat-uint8-array.ts +17 -0
- package/src/utils/filter-stream.ts +15 -0
- package/src/utils/iterable-stream-polyfill.ts +35 -0
- package/src/utils/iterator-to-stream.ts +39 -0
- package/src/utils/limit-bytes.ts +40 -0
- package/src/utils/prepend-bytes.ts +18 -0
- package/src/utils/skip-first-bytes.ts +21 -0
- package/src/utils/skip-last-bytes.ts +24 -0
- package/src/utils/streamed-file.ts +58 -0
- package/src/zip/decode-remote-zip.ts +409 -0
- package/src/zip/decode-zip.ts +349 -0
- package/src/zip/encode-zip.ts +278 -0
- package/src/zip/index.ts +5 -0
- package/src/zip/types.ts +76 -0
- package/tsconfig.json +23 -0
- package/tsconfig.lib.json +14 -0
- package/tsconfig.spec.json +25 -0
- package/vite.config.ts +55 -0
package/.eslintrc.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": ["../../../.eslintrc.json"],
|
|
3
|
+
"ignorePatterns": ["!**/*"],
|
|
4
|
+
"overrides": [
|
|
5
|
+
{
|
|
6
|
+
"files": ["*.ts", "*.tsx", "*.js", "*.jsx"],
|
|
7
|
+
"rules": {}
|
|
8
|
+
},
|
|
9
|
+
{
|
|
10
|
+
"files": ["*.ts", "*.tsx"],
|
|
11
|
+
"rules": {}
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"files": ["*.js", "*.jsx"],
|
|
15
|
+
"rules": {}
|
|
16
|
+
}
|
|
17
|
+
]
|
|
18
|
+
}
|
package/README.md
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
# php-wasm-stream-compression
|
|
2
|
+
|
|
3
|
+
This library was generated with [Nx](https://nx.dev).
|
|
4
|
+
|
|
5
|
+
## Building
|
|
6
|
+
|
|
7
|
+
Run `nx build php-wasm-stream-compression` to build the library.
|
|
8
|
+
|
|
9
|
+
## Running unit tests
|
|
10
|
+
|
|
11
|
+
Run `nx test php-wasm-stream-compression` to execute the unit tests via [Jest](https://jestjs.io).
|
package/package.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@php-wasm/stream-compression",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Stream-based compression bindings.",
|
|
5
|
+
"repository": {
|
|
6
|
+
"type": "git",
|
|
7
|
+
"url": "https://github.com/WordPress/wordpress-playground"
|
|
8
|
+
},
|
|
9
|
+
"homepage": "https://developer.wordpress.org/playground",
|
|
10
|
+
"author": "The WordPress contributors",
|
|
11
|
+
"contributors": [
|
|
12
|
+
{
|
|
13
|
+
"name": "Adam Zielinski",
|
|
14
|
+
"email": "adam@adamziel.com",
|
|
15
|
+
"url": "https://github.com/adamziel"
|
|
16
|
+
}
|
|
17
|
+
],
|
|
18
|
+
"exports": {
|
|
19
|
+
".": {
|
|
20
|
+
"import": "./index.js",
|
|
21
|
+
"require": "./index.cjs"
|
|
22
|
+
},
|
|
23
|
+
"./package.json": "./package.json"
|
|
24
|
+
},
|
|
25
|
+
"publishConfig": {
|
|
26
|
+
"access": "public",
|
|
27
|
+
"directory": "../../../dist/packages/php-wasm/stream-compression"
|
|
28
|
+
},
|
|
29
|
+
"license": "GPL-2.0-or-later",
|
|
30
|
+
"type": "module"
|
|
31
|
+
}
|
package/project.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "php-wasm-stream-compression",
|
|
3
|
+
"$schema": "../../../node_modules/nx/schemas/project-schema.json",
|
|
4
|
+
"sourceRoot": "packages/php-wasm/stream-compression/src",
|
|
5
|
+
"projectType": "library",
|
|
6
|
+
"targets": {
|
|
7
|
+
"build": {
|
|
8
|
+
"executor": "@nx/vite:build",
|
|
9
|
+
"outputs": ["{options.outputPath}"],
|
|
10
|
+
"options": {
|
|
11
|
+
"outputPath": "dist/packages/php-wasm/stream-compression"
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
"test": {
|
|
15
|
+
"executor": "@nx/vite:test",
|
|
16
|
+
"outputs": ["{options.reportsDirectory}"],
|
|
17
|
+
"options": {
|
|
18
|
+
"passWithNoTests": true,
|
|
19
|
+
"reportsDirectory": "../../../coverage/packages/php-wasm/stream-compression"
|
|
20
|
+
}
|
|
21
|
+
},
|
|
22
|
+
"lint": {
|
|
23
|
+
"executor": "@nx/linter:eslint",
|
|
24
|
+
"outputs": ["{options.outputFile}"],
|
|
25
|
+
"options": {
|
|
26
|
+
"lintFilePatterns": [
|
|
27
|
+
"packages/php-wasm/stream-compression/**/*.ts",
|
|
28
|
+
"packages/php-wasm/stream-compression/package.json"
|
|
29
|
+
]
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
},
|
|
33
|
+
"tags": ["scope:independent-from-php-binaries"]
|
|
34
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import '@php-wasm/node-polyfills';
|
|
2
|
+
|
|
3
|
+
export { collectBytes } from './utils/collect-bytes';
|
|
4
|
+
export { collectFile } from './utils/collect-file';
|
|
5
|
+
export { iteratorToStream } from './utils/iterator-to-stream';
|
|
6
|
+
export { StreamedFile } from './utils/streamed-file';
|
|
7
|
+
export { encodeZip, decodeZip, decodeRemoteZip } from './zip';
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { appendBytes } from '../utils/append-bytes';
|
|
2
|
+
|
|
3
|
+
describe('appendBytes', () => {
|
|
4
|
+
it('Should append the specified number of bytes', async () => {
|
|
5
|
+
const stream = new ReadableStream<Uint8Array>({
|
|
6
|
+
type: 'bytes',
|
|
7
|
+
start(controller) {
|
|
8
|
+
controller.enqueue(new Uint8Array([1, 2, 3]));
|
|
9
|
+
controller.close();
|
|
10
|
+
},
|
|
11
|
+
}).pipeThrough(appendBytes(new Uint8Array([4, 5])));
|
|
12
|
+
|
|
13
|
+
const reader = stream.getReader();
|
|
14
|
+
const result1 = await reader.read();
|
|
15
|
+
expect(result1.value).toEqual(new Uint8Array([1, 2, 3]));
|
|
16
|
+
expect(result1.done).toBe(false);
|
|
17
|
+
|
|
18
|
+
const result2 = await reader.read();
|
|
19
|
+
expect(result2.value).toEqual(new Uint8Array([4, 5]));
|
|
20
|
+
expect(result2.done).toBe(false);
|
|
21
|
+
|
|
22
|
+
const result3 = await reader.read();
|
|
23
|
+
expect(result3.done).toBe(true);
|
|
24
|
+
});
|
|
25
|
+
});
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { decodeZip } from '../zip/decode-zip';
|
|
2
|
+
import { readFile } from 'fs/promises';
|
|
3
|
+
|
|
4
|
+
describe('decodeZip', () => {
|
|
5
|
+
it('Should uncompress compress files', async () => {
|
|
6
|
+
const zipBytes = await readFile(
|
|
7
|
+
__dirname + '/fixtures/hello-dolly.zip'
|
|
8
|
+
);
|
|
9
|
+
const zipStream = decodeZip(new Blob([zipBytes]).stream());
|
|
10
|
+
|
|
11
|
+
const files = [];
|
|
12
|
+
for await (const file of zipStream) {
|
|
13
|
+
files.push(file);
|
|
14
|
+
}
|
|
15
|
+
expect(files.length).toBe(3);
|
|
16
|
+
expect(files[0].name).toBe('hello-dolly/');
|
|
17
|
+
expect(files[1].name).toBe('hello-dolly/hello.php');
|
|
18
|
+
expect(files[1].size).toBe(2593);
|
|
19
|
+
expect(files[2].name).toBe('hello-dolly/readme.txt');
|
|
20
|
+
expect(files[2].size).toBe(624);
|
|
21
|
+
});
|
|
22
|
+
});
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { collectBytes } from '../utils/collect-bytes';
|
|
2
|
+
import { encodeZip } from '../zip/encode-zip';
|
|
3
|
+
import { decodeZip } from '../zip/decode-zip';
|
|
4
|
+
|
|
5
|
+
describe('compressFiles', () => {
|
|
6
|
+
it('Should compress files into a zip archive', async () => {
|
|
7
|
+
const files: File[] = [
|
|
8
|
+
new File(
|
|
9
|
+
[new Uint8Array([1, 2, 3, 4, 5])],
|
|
10
|
+
'wp-content/plugins/hello.php'
|
|
11
|
+
),
|
|
12
|
+
new File(
|
|
13
|
+
[new Uint8Array([1, 2, 3, 4, 5])],
|
|
14
|
+
'wp-content/plugins/hello/hello.php'
|
|
15
|
+
),
|
|
16
|
+
new File(
|
|
17
|
+
[new Uint8Array([1, 2, 3, 4, 5])],
|
|
18
|
+
'wp-content/plugins/hello/hello2.php'
|
|
19
|
+
),
|
|
20
|
+
new File(
|
|
21
|
+
[new Uint8Array([1, 2, 3, 4, 5])],
|
|
22
|
+
'wp-content/plugins/hello/hello3.php'
|
|
23
|
+
),
|
|
24
|
+
];
|
|
25
|
+
|
|
26
|
+
const zipBytes = await collectBytes(
|
|
27
|
+
encodeZip(files[Symbol.iterator]())
|
|
28
|
+
);
|
|
29
|
+
const zipStream = decodeZip(new Blob([zipBytes!]).stream());
|
|
30
|
+
|
|
31
|
+
const reader = zipStream.getReader();
|
|
32
|
+
let i = 0;
|
|
33
|
+
for (i = 0; i < files.length; i++) {
|
|
34
|
+
const { value: receivedFile, done } = await reader.read();
|
|
35
|
+
const receivedBytes = new Uint8Array(
|
|
36
|
+
await receivedFile!.arrayBuffer()
|
|
37
|
+
);
|
|
38
|
+
const expectedBytes = new Uint8Array(await files[i].arrayBuffer());
|
|
39
|
+
expect(receivedBytes).toEqual(expectedBytes);
|
|
40
|
+
expect(done).toBe(false);
|
|
41
|
+
}
|
|
42
|
+
expect(i).toBe(files.length);
|
|
43
|
+
|
|
44
|
+
const { done } = await reader.read();
|
|
45
|
+
expect(done).toBe(true);
|
|
46
|
+
});
|
|
47
|
+
});
|
|
Binary file
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { prependBytes } from '../utils/prepend-bytes';
|
|
2
|
+
|
|
3
|
+
describe('prependBytes', () => {
|
|
4
|
+
it('Should prepend the specified number of bytes', async () => {
|
|
5
|
+
const stream = new ReadableStream<Uint8Array>({
|
|
6
|
+
type: 'bytes',
|
|
7
|
+
start(controller) {
|
|
8
|
+
controller.enqueue(new Uint8Array([4, 5]));
|
|
9
|
+
controller.close();
|
|
10
|
+
},
|
|
11
|
+
}).pipeThrough(prependBytes(new Uint8Array([1, 2, 3])));
|
|
12
|
+
|
|
13
|
+
const reader = stream.getReader();
|
|
14
|
+
const result1 = await reader.read();
|
|
15
|
+
expect(result1.value).toEqual(new Uint8Array([1, 2, 3]));
|
|
16
|
+
expect(result1.done).toBe(false);
|
|
17
|
+
|
|
18
|
+
const result2 = await reader.read();
|
|
19
|
+
expect(result2.value).toEqual(new Uint8Array([4, 5]));
|
|
20
|
+
expect(result2.done).toBe(false);
|
|
21
|
+
|
|
22
|
+
const result3 = await reader.read();
|
|
23
|
+
expect(result3.done).toBe(true);
|
|
24
|
+
});
|
|
25
|
+
});
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { skipFirstBytes } from '../utils/skip-first-bytes';
|
|
2
|
+
|
|
3
|
+
describe('skipFirstBytes', () => {
|
|
4
|
+
it('Should skip the specified number of bytes', async () => {
|
|
5
|
+
const stream = new ReadableStream<Uint8Array>({
|
|
6
|
+
type: 'bytes',
|
|
7
|
+
start(controller) {
|
|
8
|
+
controller.enqueue(new Uint8Array([1, 2, 3, 4, 5]));
|
|
9
|
+
controller.close();
|
|
10
|
+
},
|
|
11
|
+
}).pipeThrough(skipFirstBytes(3));
|
|
12
|
+
|
|
13
|
+
const reader = stream.getReader();
|
|
14
|
+
const result1 = await reader.read();
|
|
15
|
+
expect(result1.value).toEqual(new Uint8Array([4, 5]));
|
|
16
|
+
expect(result1.done).toBe(false);
|
|
17
|
+
|
|
18
|
+
const result2 = await reader.read();
|
|
19
|
+
expect(result2.done).toBe(true);
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
it('Should skip the specified number of bytes across multiple pulls', async () => {
|
|
23
|
+
const stream = new ReadableStream<Uint8Array>({
|
|
24
|
+
type: 'bytes',
|
|
25
|
+
start(controller) {
|
|
26
|
+
controller.enqueue(new Uint8Array([1]));
|
|
27
|
+
controller.enqueue(new Uint8Array([2, 3]));
|
|
28
|
+
controller.enqueue(new Uint8Array([4, 5, 6]));
|
|
29
|
+
controller.close();
|
|
30
|
+
},
|
|
31
|
+
}).pipeThrough(skipFirstBytes(4));
|
|
32
|
+
|
|
33
|
+
const reader = stream.getReader();
|
|
34
|
+
const result1 = await reader.read();
|
|
35
|
+
expect(result1.value).toEqual(new Uint8Array([5, 6]));
|
|
36
|
+
expect(result1.done).toBe(false);
|
|
37
|
+
|
|
38
|
+
const result2 = await reader.read();
|
|
39
|
+
expect(result2.done).toBe(true);
|
|
40
|
+
});
|
|
41
|
+
});
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { skipLastBytes } from '../utils/skip-last-bytes';
|
|
2
|
+
|
|
3
|
+
describe('skipLastBytes', () => {
|
|
4
|
+
it('Should skip the specified number of bytes', async () => {
|
|
5
|
+
const stream = new ReadableStream<Uint8Array>({
|
|
6
|
+
type: 'bytes',
|
|
7
|
+
start(controller) {
|
|
8
|
+
controller.enqueue(new Uint8Array([1, 2, 3, 4, 5]));
|
|
9
|
+
controller.enqueue(new Uint8Array([6, 7]));
|
|
10
|
+
controller.enqueue(new Uint8Array([8, 9]));
|
|
11
|
+
controller.close();
|
|
12
|
+
},
|
|
13
|
+
}).pipeThrough(skipLastBytes(3, 9));
|
|
14
|
+
|
|
15
|
+
const reader = stream.getReader();
|
|
16
|
+
const result1 = await reader.read();
|
|
17
|
+
expect(result1.value).toEqual(new Uint8Array([1, 2, 3, 4, 5]));
|
|
18
|
+
expect(result1.done).toBe(false);
|
|
19
|
+
|
|
20
|
+
const result2 = await reader.read();
|
|
21
|
+
expect(result2.value).toEqual(new Uint8Array([6]));
|
|
22
|
+
expect(result2.done).toBe(false);
|
|
23
|
+
|
|
24
|
+
const result3 = await reader.read();
|
|
25
|
+
expect(result3.done).toBe(true);
|
|
26
|
+
});
|
|
27
|
+
});
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Appends bytes to a stream.
|
|
3
|
+
*
|
|
4
|
+
* @param bytes The bytes to append.
|
|
5
|
+
* @returns A transform stream that will append the specified bytes.
|
|
6
|
+
*/
|
|
7
|
+
export function appendBytes(bytes: Uint8Array) {
|
|
8
|
+
return new TransformStream<Uint8Array, Uint8Array>({
|
|
9
|
+
async transform(chunk, controller) {
|
|
10
|
+
controller.enqueue(chunk);
|
|
11
|
+
},
|
|
12
|
+
async flush(controller) {
|
|
13
|
+
controller.enqueue(bytes);
|
|
14
|
+
},
|
|
15
|
+
});
|
|
16
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { concatBytes } from './concat-bytes';
|
|
2
|
+
import { limitBytes } from './limit-bytes';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Collects the contents of the entire stream into a single Uint8Array.
|
|
6
|
+
*
|
|
7
|
+
* @param stream The stream to collect.
|
|
8
|
+
* @param bytes Optional. The number of bytes to read from the stream.
|
|
9
|
+
* @returns The string contents of the stream.
|
|
10
|
+
*/
|
|
11
|
+
export async function collectBytes(
|
|
12
|
+
stream: ReadableStream<Uint8Array>,
|
|
13
|
+
bytes?: number
|
|
14
|
+
) {
|
|
15
|
+
if (bytes !== undefined) {
|
|
16
|
+
stream = limitBytes(stream, bytes);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
return await stream
|
|
20
|
+
.pipeThrough(concatBytes(bytes))
|
|
21
|
+
.getReader()
|
|
22
|
+
.read()
|
|
23
|
+
.then(({ value }) => value!);
|
|
24
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { collectBytes } from './collect-bytes';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Collects the contents of the entire stream into a single File object.
|
|
5
|
+
*
|
|
6
|
+
* @param stream The stream to collect.
|
|
7
|
+
* @param fileName The name of the file
|
|
8
|
+
* @returns The string contents of the stream.
|
|
9
|
+
*/
|
|
10
|
+
export async function collectFile(
|
|
11
|
+
fileName: string,
|
|
12
|
+
stream: ReadableStream<Uint8Array>
|
|
13
|
+
) {
|
|
14
|
+
// @TODO: use StreamingFile
|
|
15
|
+
return new File([await collectBytes(stream)], fileName);
|
|
16
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { concatString } from './concat-string';
|
|
2
|
+
import { limitBytes } from './limit-bytes';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Collects the contents of the entire stream into a single string.
|
|
6
|
+
*
|
|
7
|
+
* @param stream The stream to collect.
|
|
8
|
+
* @param bytes Optional. The number of bytes to read from the stream.
|
|
9
|
+
* @returns The string contents of the stream.
|
|
10
|
+
*/
|
|
11
|
+
export async function collectString(
|
|
12
|
+
stream: ReadableStream<Uint8Array>,
|
|
13
|
+
bytes?: number
|
|
14
|
+
) {
|
|
15
|
+
if (bytes !== undefined) {
|
|
16
|
+
stream = limitBytes(stream, bytes);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
return await stream
|
|
20
|
+
.pipeThrough(new TextDecoderStream())
|
|
21
|
+
.pipeThrough(concatString())
|
|
22
|
+
.getReader()
|
|
23
|
+
.read()
|
|
24
|
+
.then(({ value }) => value);
|
|
25
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { concatUint8Array } from './concat-uint8-array';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Concatenates the contents of the stream into a single Uint8Array.
|
|
5
|
+
*
|
|
6
|
+
* @param totalBytes Optional. The number of bytes to concatenate. Used to
|
|
7
|
+
* pre-allocate the buffer. If not provided, the buffer will
|
|
8
|
+
* be dynamically resized as needed.
|
|
9
|
+
* @returns A stream that will emit a single UInt8Array entry before closing.
|
|
10
|
+
*/
|
|
11
|
+
export function concatBytes(totalBytes?: number) {
|
|
12
|
+
if (totalBytes === undefined) {
|
|
13
|
+
let acc = new Uint8Array();
|
|
14
|
+
return new TransformStream<Uint8Array, Uint8Array>({
|
|
15
|
+
transform(chunk) {
|
|
16
|
+
acc = concatUint8Array(acc, chunk);
|
|
17
|
+
},
|
|
18
|
+
|
|
19
|
+
flush(controller) {
|
|
20
|
+
controller.enqueue(acc);
|
|
21
|
+
},
|
|
22
|
+
});
|
|
23
|
+
} else {
|
|
24
|
+
const buffer = new ArrayBuffer(totalBytes || 0);
|
|
25
|
+
let offset = 0;
|
|
26
|
+
return new TransformStream<Uint8Array, Uint8Array>({
|
|
27
|
+
transform(chunk) {
|
|
28
|
+
const view = new Uint8Array(buffer);
|
|
29
|
+
view.set(chunk, offset);
|
|
30
|
+
offset += chunk.byteLength;
|
|
31
|
+
},
|
|
32
|
+
|
|
33
|
+
flush(controller) {
|
|
34
|
+
controller.enqueue(new Uint8Array(buffer));
|
|
35
|
+
},
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Concatenate all chunks into a single string.
|
|
3
|
+
*
|
|
4
|
+
* @returns A stream that will emit a single string entry before closing.
|
|
5
|
+
*/
|
|
6
|
+
export function concatString() {
|
|
7
|
+
const chunks: string[] = [];
|
|
8
|
+
return new TransformStream<string, string>({
|
|
9
|
+
transform(chunk) {
|
|
10
|
+
chunks.push(chunk);
|
|
11
|
+
},
|
|
12
|
+
|
|
13
|
+
flush(controller) {
|
|
14
|
+
controller.enqueue(chunks.join(''));
|
|
15
|
+
},
|
|
16
|
+
});
|
|
17
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Concatenates multiple Uint8Arrays into a single Uint8Array.
|
|
3
|
+
*
|
|
4
|
+
* @param arrays The arrays to concatenate.
|
|
5
|
+
* @returns A new Uint8Array containing the contents of all the arrays.
|
|
6
|
+
*/
|
|
7
|
+
export function concatUint8Array(...arrays: Uint8Array[]) {
|
|
8
|
+
const result = new Uint8Array(
|
|
9
|
+
arrays.reduce((sum, array) => sum + array.length, 0)
|
|
10
|
+
);
|
|
11
|
+
let offset = 0;
|
|
12
|
+
for (const array of arrays) {
|
|
13
|
+
result.set(array, offset);
|
|
14
|
+
offset += array.length;
|
|
15
|
+
}
|
|
16
|
+
return result;
|
|
17
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Filter the stream based on a predicate.
|
|
3
|
+
*
|
|
4
|
+
* @param predicate The predicate to filter the stream with.
|
|
5
|
+
* @returns A new stream that will only contain chunks that pass the predicate.
|
|
6
|
+
*/
|
|
7
|
+
export function filterStream<T>(predicate: (chunk: T) => boolean) {
|
|
8
|
+
return new TransformStream<T, T>({
|
|
9
|
+
transform(chunk, controller) {
|
|
10
|
+
if (predicate(chunk)) {
|
|
11
|
+
controller.enqueue(chunk);
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
});
|
|
15
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Polyfill for ReadableStream[Symbol.asyncIterator]
|
|
3
|
+
* This enables the use of for-await-of loops with ReadableStreams
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* ```ts
|
|
7
|
+
* for await (const entry of stream) {
|
|
8
|
+
* // ...
|
|
9
|
+
* }
|
|
10
|
+
* ```
|
|
11
|
+
*/
|
|
12
|
+
// @ts-ignore
|
|
13
|
+
if (!ReadableStream.prototype[Symbol.asyncIterator]) {
|
|
14
|
+
// @ts-ignore
|
|
15
|
+
ReadableStream.prototype[Symbol.asyncIterator] = async function* () {
|
|
16
|
+
const reader = this.getReader();
|
|
17
|
+
try {
|
|
18
|
+
while (true) {
|
|
19
|
+
const { done, value } = await reader.read();
|
|
20
|
+
if (done) {
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
yield value;
|
|
24
|
+
}
|
|
25
|
+
} finally {
|
|
26
|
+
reader.releaseLock();
|
|
27
|
+
}
|
|
28
|
+
};
|
|
29
|
+
// @ts-ignore
|
|
30
|
+
ReadableStream.prototype.iterate =
|
|
31
|
+
// @ts-ignore
|
|
32
|
+
ReadableStream.prototype[Symbol.asyncIterator];
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export type IterableReadableStream<R> = ReadableStream<R> & AsyncIterable<R>;
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { IterableReadableStream } from './iterable-stream-polyfill';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Converts an iterator or iterable to a stream.
|
|
5
|
+
*
|
|
6
|
+
* @param iteratorOrIterable The iterator or iterable to convert.
|
|
7
|
+
* @returns A stream that will yield the values from the iterator or iterable.
|
|
8
|
+
*/
|
|
9
|
+
export function iteratorToStream<T>(
|
|
10
|
+
iteratorOrIterable:
|
|
11
|
+
| AsyncIterator<T>
|
|
12
|
+
| Iterator<T>
|
|
13
|
+
| AsyncIterable<T>
|
|
14
|
+
| Iterable<T>
|
|
15
|
+
) {
|
|
16
|
+
if (iteratorOrIterable instanceof ReadableStream) {
|
|
17
|
+
return iteratorOrIterable as IterableReadableStream<T>;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
let iterator: AsyncIterator<T> | Iterator<T>;
|
|
21
|
+
if (Symbol.asyncIterator in iteratorOrIterable) {
|
|
22
|
+
iterator = iteratorOrIterable[Symbol.asyncIterator]();
|
|
23
|
+
} else if (Symbol.iterator in iteratorOrIterable) {
|
|
24
|
+
iterator = iteratorOrIterable[Symbol.iterator]();
|
|
25
|
+
} else {
|
|
26
|
+
iterator = iteratorOrIterable;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
return new ReadableStream<T>({
|
|
30
|
+
async pull(controller) {
|
|
31
|
+
const { done, value } = await iterator.next();
|
|
32
|
+
if (done) {
|
|
33
|
+
controller.close();
|
|
34
|
+
return;
|
|
35
|
+
}
|
|
36
|
+
controller.enqueue(value);
|
|
37
|
+
},
|
|
38
|
+
}) as IterableReadableStream<T>;
|
|
39
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Limit the number of bytes read from a stream.
|
|
3
|
+
*
|
|
4
|
+
* @param stream The stream to limit.
|
|
5
|
+
* @param bytes The number of bytes to read from the stream.
|
|
6
|
+
* @returns A new stream that will read at most `bytes` bytes from `stream`.
|
|
7
|
+
*/
|
|
8
|
+
export function limitBytes(stream: ReadableStream<Uint8Array>, bytes: number) {
|
|
9
|
+
if (bytes === 0) {
|
|
10
|
+
return new ReadableStream({
|
|
11
|
+
start(controller) {
|
|
12
|
+
controller.close();
|
|
13
|
+
},
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
const reader = stream.getReader({ mode: 'byob' });
|
|
17
|
+
let offset = 0;
|
|
18
|
+
return new ReadableStream({
|
|
19
|
+
async pull(controller) {
|
|
20
|
+
const { value, done } = await reader.read(
|
|
21
|
+
new Uint8Array(bytes - offset)
|
|
22
|
+
);
|
|
23
|
+
if (done) {
|
|
24
|
+
reader.releaseLock();
|
|
25
|
+
controller.close();
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
offset += value.length;
|
|
29
|
+
controller.enqueue(value);
|
|
30
|
+
|
|
31
|
+
if (offset >= bytes) {
|
|
32
|
+
reader.releaseLock();
|
|
33
|
+
controller.close();
|
|
34
|
+
}
|
|
35
|
+
},
|
|
36
|
+
cancel() {
|
|
37
|
+
reader.cancel();
|
|
38
|
+
},
|
|
39
|
+
});
|
|
40
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Prepend bytes to a stream.
|
|
3
|
+
*
|
|
4
|
+
* @param bytes The bytes to prepend.
|
|
5
|
+
* @returns A transform stream that will prepend the specified bytes.
|
|
6
|
+
*/
|
|
7
|
+
export function prependBytes(bytes: Uint8Array) {
|
|
8
|
+
let isPrepended = false;
|
|
9
|
+
return new TransformStream<Uint8Array, Uint8Array>({
|
|
10
|
+
async transform(chunk, controller) {
|
|
11
|
+
if (!isPrepended) {
|
|
12
|
+
isPrepended = true;
|
|
13
|
+
controller.enqueue(bytes);
|
|
14
|
+
}
|
|
15
|
+
controller.enqueue(chunk);
|
|
16
|
+
},
|
|
17
|
+
});
|
|
18
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Skips the first `length` bytes of a stream.
|
|
3
|
+
*
|
|
4
|
+
* @param length The number of bytes to skip.
|
|
5
|
+
* @returns A transform stream that will skip the specified number of bytes.
|
|
6
|
+
*/
|
|
7
|
+
export function skipFirstBytes(length: number) {
|
|
8
|
+
let totalBytesSkipped = 0;
|
|
9
|
+
return new TransformStream<Uint8Array, Uint8Array>({
|
|
10
|
+
async transform(chunk, controller) {
|
|
11
|
+
if (totalBytesSkipped + chunk.byteLength < length) {
|
|
12
|
+
totalBytesSkipped += chunk.byteLength;
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const bytesToSkip = length - totalBytesSkipped;
|
|
17
|
+
totalBytesSkipped = length;
|
|
18
|
+
controller.enqueue(chunk.slice(bytesToSkip));
|
|
19
|
+
},
|
|
20
|
+
});
|
|
21
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Skips the first `length` bytes of a stream.
|
|
3
|
+
*
|
|
4
|
+
* @param length The number of bytes to skip.
|
|
5
|
+
* @returns A transform stream that will skip the specified number of bytes.
|
|
6
|
+
*/
|
|
7
|
+
export function skipLastBytes(skip: number, streamLength: number) {
|
|
8
|
+
let currentOffset = 0;
|
|
9
|
+
const lastOffset = streamLength - skip;
|
|
10
|
+
return new TransformStream({
|
|
11
|
+
async transform(chunk, controller) {
|
|
12
|
+
if (currentOffset + chunk.byteLength >= lastOffset) {
|
|
13
|
+
const lastChunkOffset = lastOffset - currentOffset;
|
|
14
|
+
if (lastChunkOffset === 0) {
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
chunk = chunk.slice(0, lastChunkOffset);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
currentOffset += chunk.byteLength;
|
|
21
|
+
controller.enqueue(chunk);
|
|
22
|
+
},
|
|
23
|
+
});
|
|
24
|
+
}
|