@addmaple/lz4 0.1.2 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +87 -0
- package/dist/core.js +2 -1
- package/dist/custom.js +77 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -4,6 +4,24 @@ Fast LZ4 compression in the browser and Node.js using Rust + WASM.
|
|
|
4
4
|
|
|
5
5
|
**2.5x-3.5x faster** than `lz4js`.
|
|
6
6
|
|
|
7
|
+
## Implementation (Rust)
|
|
8
|
+
|
|
9
|
+
This package is backed by these Rust crates in the `wasm-fast-compress` repo:
|
|
10
|
+
|
|
11
|
+
- `codec-lz4` (this repo): high-level codec wrapper
|
|
12
|
+
- `lz4_flex` (Git fork, branch `wasm-simd`): core LZ4 implementation with WASM SIMD128 hot paths
|
|
13
|
+
|
|
14
|
+
## SIMD acceleration (how it works)
|
|
15
|
+
|
|
16
|
+
- We build **two WASM binaries**:
|
|
17
|
+
- `lz4.base.wasm`: compiled without `+simd128`
|
|
18
|
+
- `lz4.simd.wasm`: compiled with `-C target-feature=+simd128`
|
|
19
|
+
- At runtime, the JS loader detects SIMD support and loads the best binary automatically.
|
|
20
|
+
|
|
21
|
+
On wasm32, the SIMD build benefits from:
|
|
22
|
+
- `lz4_flex` explicit `wasm32 + simd128` intrinsics for match finding / copying hot paths
|
|
23
|
+
- additional LLVM autovectorization where applicable
|
|
24
|
+
|
|
7
25
|
## Installation
|
|
8
26
|
|
|
9
27
|
```bash
|
|
@@ -15,12 +33,81 @@ npm install @addmaple/lz4
|
|
|
15
33
|
```javascript
|
|
16
34
|
import { init, compress } from '@addmaple/lz4';
|
|
17
35
|
|
|
36
|
+
// Optional: call init() to avoid first-call latency.
|
|
37
|
+
// If you skip init(), the first compress/decompress call will lazy-initialize.
|
|
18
38
|
await init();
|
|
19
39
|
|
|
20
40
|
const input = new TextEncoder().encode('hello world');
|
|
21
41
|
const compressed = await compress(input);
|
|
22
42
|
```
|
|
23
43
|
|
|
44
|
+
### Lazy init (init() optional)
|
|
45
|
+
|
|
46
|
+
```javascript
|
|
47
|
+
import { compress } from '@addmaple/lz4';
|
|
48
|
+
|
|
49
|
+
const input = new TextEncoder().encode('hello world');
|
|
50
|
+
const compressed = await compress(input); // triggers lazy init on first call
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
### Streaming compression + decompression
|
|
54
|
+
|
|
55
|
+
For chunked input (e.g. streaming over the network), use the handle-based streaming helpers:
|
|
56
|
+
|
|
57
|
+
```javascript
|
|
58
|
+
import { init, StreamingCompressor, StreamingDecompressor } from '@addmaple/lz4';
|
|
59
|
+
|
|
60
|
+
// Optional: init() is not required; streaming helpers also lazy-init.
|
|
61
|
+
await init();
|
|
62
|
+
|
|
63
|
+
// Compress
|
|
64
|
+
const enc = new StreamingCompressor();
|
|
65
|
+
const c1 = await enc.compressChunk(chunk1, false);
|
|
66
|
+
const c2 = await enc.compressChunk(chunk2, false);
|
|
67
|
+
const c3 = await enc.compressChunk(chunk3, true); // finish
|
|
68
|
+
|
|
69
|
+
// Decompress (finish must be true to produce output for frame format)
|
|
70
|
+
const dec = new StreamingDecompressor();
|
|
71
|
+
await dec.decompressChunk(c1, false);
|
|
72
|
+
await dec.decompressChunk(c2, false);
|
|
73
|
+
const plain = await dec.decompressChunk(c3, true);
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
### Streaming to `fetch()` (ergonomic)
|
|
77
|
+
|
|
78
|
+
If you want to upload a `File`/`Blob` with LZ4 compression, you can pipe it through the built-in stream helper:
|
|
79
|
+
|
|
80
|
+
```javascript
|
|
81
|
+
import { createCompressionStream } from '@addmaple/lz4';
|
|
82
|
+
|
|
83
|
+
const body = file.stream().pipeThrough(createCompressionStream());
|
|
84
|
+
|
|
85
|
+
await fetch('/upload', {
|
|
86
|
+
method: 'POST',
|
|
87
|
+
headers: {
|
|
88
|
+
// Not a standard encoding token like gzip; your server must explicitly support this.
|
|
89
|
+
'Content-Encoding': 'lz4',
|
|
90
|
+
},
|
|
91
|
+
body,
|
|
92
|
+
// Needed for streaming request bodies in some runtimes (notably Node fetch).
|
|
93
|
+
duplex: 'half',
|
|
94
|
+
});
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### Streaming decompression from `fetch()` (ergonomic)
|
|
98
|
+
|
|
99
|
+
```javascript
|
|
100
|
+
import { createDecompressionStream } from '@addmaple/lz4';
|
|
101
|
+
|
|
102
|
+
const res = await fetch('/download');
|
|
103
|
+
if (!res.body) throw new Error('No response body');
|
|
104
|
+
|
|
105
|
+
const decompressed = res.body.pipeThrough(createDecompressionStream());
|
|
106
|
+
|
|
107
|
+
// Example: read it all (or pipe somewhere else)
|
|
108
|
+
const buf = await new Response(decompressed).arrayBuffer();
|
|
109
|
+
```
|
|
110
|
+
|
|
24
111
|
### Inline (Zero-latency)
|
|
25
112
|
|
|
26
113
|
WASM bytes embedded directly in JS — no separate file fetching:
|
package/dist/core.js
CHANGED
|
@@ -42,7 +42,8 @@ function toBytes(input) {
|
|
|
42
42
|
if (input instanceof Uint8Array) return input;
|
|
43
43
|
if (ArrayBuffer.isView(input)) return new Uint8Array(input.buffer, input.byteOffset, input.byteLength);
|
|
44
44
|
if (input instanceof ArrayBuffer) return new Uint8Array(input);
|
|
45
|
-
|
|
45
|
+
if (typeof input === 'string') return new TextEncoder().encode(input);
|
|
46
|
+
throw new TypeError("Expected a TypedArray, ArrayBuffer, or string");
|
|
46
47
|
}
|
|
47
48
|
|
|
48
49
|
function scalarSize(type) {
|
package/dist/custom.js
CHANGED
|
@@ -14,7 +14,8 @@ function toBytes(input) {
|
|
|
14
14
|
if (input instanceof Uint8Array) return input;
|
|
15
15
|
if (ArrayBuffer.isView(input)) return new Uint8Array(input.buffer, input.byteOffset, input.byteLength);
|
|
16
16
|
if (input instanceof ArrayBuffer) return new Uint8Array(input);
|
|
17
|
-
|
|
17
|
+
if (typeof input === 'string') return new TextEncoder().encode(input);
|
|
18
|
+
throw new TypeError("Expected a TypedArray, ArrayBuffer, or string");
|
|
18
19
|
}
|
|
19
20
|
|
|
20
21
|
// ============================================================================
|
|
@@ -335,5 +336,80 @@ export async function decompress(input) {
|
|
|
335
336
|
}
|
|
336
337
|
}
|
|
337
338
|
|
|
339
|
+
// ============================================================================
|
|
340
|
+
// Ergonomic streaming helpers (Web Streams)
|
|
341
|
+
// ============================================================================
|
|
342
|
+
|
|
343
|
+
function requireTransformStream() {
|
|
344
|
+
if (typeof TransformStream === 'undefined') {
|
|
345
|
+
throw new Error('TransformStream is not available in this runtime');
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
/**
|
|
350
|
+
* Create a TransformStream that LZ4-compresses a byte stream.
|
|
351
|
+
*
|
|
352
|
+
* This uses the LZ4 *frame* streaming API so the output can be decoded without
|
|
353
|
+
* knowing the original size ahead of time (good for fetch request bodies).
|
|
354
|
+
*
|
|
355
|
+
* @returns {TransformStream<Uint8Array, Uint8Array>}
|
|
356
|
+
*/
|
|
357
|
+
export function createCompressionStream() {
|
|
358
|
+
requireTransformStream();
|
|
359
|
+
const enc = new StreamingCompressor();
|
|
360
|
+
|
|
361
|
+
return new TransformStream({
|
|
362
|
+
async transform(chunk, controller) {
|
|
363
|
+
const out = await enc.compressChunk(toBytes(chunk), false);
|
|
364
|
+
if (out.length) controller.enqueue(out);
|
|
365
|
+
},
|
|
366
|
+
async flush(controller) {
|
|
367
|
+
// Finish the stream (flush footer / close handle)
|
|
368
|
+
const out = await enc.compressChunk(new Uint8Array(0), true);
|
|
369
|
+
if (out.length) controller.enqueue(out);
|
|
370
|
+
},
|
|
371
|
+
});
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
/**
|
|
375
|
+
* Create a TransformStream that LZ4-decompresses a byte stream.
|
|
376
|
+
*
|
|
377
|
+
* Note: current LZ4 frame streaming decompression buffers until finish=true,
|
|
378
|
+
* so output is typically produced during `flush()`.
|
|
379
|
+
*
|
|
380
|
+
* @returns {TransformStream<Uint8Array, Uint8Array>}
|
|
381
|
+
*/
|
|
382
|
+
export function createDecompressionStream() {
|
|
383
|
+
requireTransformStream();
|
|
384
|
+
const dec = new StreamingDecompressor();
|
|
385
|
+
|
|
386
|
+
return new TransformStream({
|
|
387
|
+
async transform(chunk, controller) {
|
|
388
|
+
const out = await dec.decompressChunk(toBytes(chunk), false);
|
|
389
|
+
if (out.length) controller.enqueue(out);
|
|
390
|
+
},
|
|
391
|
+
async flush(controller) {
|
|
392
|
+
const out = await dec.decompressChunk(new Uint8Array(0), true);
|
|
393
|
+
if (out.length) controller.enqueue(out);
|
|
394
|
+
},
|
|
395
|
+
});
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
/**
|
|
399
|
+
* Convenience helper: readable.pipeThrough(createCompressionStream()).
|
|
400
|
+
* @param {ReadableStream<Uint8Array>} readable
|
|
401
|
+
*/
|
|
402
|
+
export function compressStream(readable) {
|
|
403
|
+
return readable.pipeThrough(createCompressionStream());
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
/**
|
|
407
|
+
* Convenience helper: readable.pipeThrough(createDecompressionStream()).
|
|
408
|
+
* @param {ReadableStream<Uint8Array>} readable
|
|
409
|
+
*/
|
|
410
|
+
export function decompressStream(readable) {
|
|
411
|
+
return readable.pipeThrough(createDecompressionStream());
|
|
412
|
+
}
|
|
413
|
+
|
|
338
414
|
export { wasmExports };
|
|
339
415
|
|