@addmaple/lz4 0.1.1 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +87 -0
- package/dist/browser-inline.d.ts +4 -1
- package/dist/browser-inline.js +16 -9
- package/dist/browser.d.ts +4 -1
- package/dist/browser.js +16 -9
- package/dist/core.d.ts +10 -1
- package/dist/core.js +174 -2
- package/dist/custom.js +404 -3
- package/dist/node-inline.d.ts +4 -1
- package/dist/node-inline.js +16 -9
- package/dist/node.d.ts +4 -1
- package/dist/node.js +14 -11
- package/dist/util.js +30 -0
- package/dist/wasm/lz4.base.wasm +0 -0
- package/dist/wasm/lz4.simd.wasm +0 -0
- package/dist/wasm-inline/lz4.base.wasm.js +1 -1
- package/dist/wasm-inline/lz4.simd.wasm.js +1 -1
- package/package.json +8 -5
package/dist/custom.js
CHANGED
|
@@ -1,7 +1,131 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
compress_lz4,
|
|
3
|
+
compress_lz4_block,
|
|
4
|
+
decompress_lz4,
|
|
5
|
+
decompress_lz4_block,
|
|
6
|
+
wasmExports,
|
|
7
|
+
alloc,
|
|
8
|
+
free,
|
|
9
|
+
memoryU8,
|
|
10
|
+
ensureReady
|
|
11
|
+
} from './core.js';
|
|
2
12
|
|
|
3
|
-
|
|
4
|
-
|
|
13
|
+
function toBytes(input) {
|
|
14
|
+
if (input instanceof Uint8Array) return input;
|
|
15
|
+
if (ArrayBuffer.isView(input)) return new Uint8Array(input.buffer, input.byteOffset, input.byteLength);
|
|
16
|
+
if (input instanceof ArrayBuffer) return new Uint8Array(input);
|
|
17
|
+
if (typeof input === 'string') return new TextEncoder().encode(input);
|
|
18
|
+
throw new TypeError("Expected a TypedArray, ArrayBuffer, or string");
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// ============================================================================
|
|
22
|
+
// Block API - Maximum speed, no frame overhead
|
|
23
|
+
// Output is NOT compatible with standard LZ4 tools (lz4 CLI, etc.)
|
|
24
|
+
// Use when you control both compression and decompression
|
|
25
|
+
// ============================================================================
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Compress using raw LZ4 block format (maximum speed)
|
|
29
|
+
* ~5x faster than frame API due to no checksumming overhead
|
|
30
|
+
* @param {Uint8Array} input - Data to compress
|
|
31
|
+
* @returns {Promise<Uint8Array>} Compressed data (raw block format)
|
|
32
|
+
*/
|
|
33
|
+
export async function compressBlock(input) {
|
|
34
|
+
try {
|
|
35
|
+
return compress_lz4_block(input);
|
|
36
|
+
} catch (error) {
|
|
37
|
+
throw new Error(`Block compression failed: ${error.message}`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Decompress raw LZ4 block format
|
|
43
|
+
* @param {Uint8Array} input - Compressed data (raw block format)
|
|
44
|
+
* @param {number} originalSize - Original uncompressed size (REQUIRED)
|
|
45
|
+
* @returns {Promise<Uint8Array>} Decompressed data
|
|
46
|
+
*/
|
|
47
|
+
export async function decompressBlock(input, originalSize) {
|
|
48
|
+
if (typeof originalSize !== 'number' || originalSize <= 0) {
|
|
49
|
+
throw new Error('decompressBlock requires originalSize parameter');
|
|
50
|
+
}
|
|
51
|
+
await ensureReady();
|
|
52
|
+
|
|
53
|
+
const view = toBytes(input);
|
|
54
|
+
const len = view.byteLength;
|
|
55
|
+
|
|
56
|
+
const inPtr = alloc(len);
|
|
57
|
+
const outPtr = alloc(originalSize);
|
|
58
|
+
|
|
59
|
+
try {
|
|
60
|
+
memoryU8().set(view, inPtr);
|
|
61
|
+
const written = wasmExports().decompress_lz4_block(inPtr, len, outPtr, originalSize);
|
|
62
|
+
|
|
63
|
+
if (written < 0) {
|
|
64
|
+
throw new Error('Block decompression failed');
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const result = memoryU8().slice(outPtr, outPtr + written);
|
|
68
|
+
return result;
|
|
69
|
+
} finally {
|
|
70
|
+
free(inPtr, len);
|
|
71
|
+
free(outPtr, originalSize);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// ============================================================================
|
|
76
|
+
// Packed Block API - Block format with size prefix (self-contained)
|
|
77
|
+
// Perfect for network transfer when you control both client and server
|
|
78
|
+
// Format: [4 bytes: original size (little-endian)] + [compressed data]
|
|
79
|
+
// ============================================================================
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Compress with size prefix - ready for network transfer
|
|
83
|
+
* Output includes original size, so decompression doesn't need it separately
|
|
84
|
+
* @param {Uint8Array} input - Data to compress
|
|
85
|
+
* @returns {Promise<Uint8Array>} [4-byte size prefix] + [compressed block]
|
|
86
|
+
*/
|
|
87
|
+
export async function compressPacked(input) {
|
|
88
|
+
const view = toBytes(input);
|
|
89
|
+
const compressed = await compressBlock(view);
|
|
90
|
+
|
|
91
|
+
// Prepend 4-byte little-endian size
|
|
92
|
+
const result = new Uint8Array(4 + compressed.length);
|
|
93
|
+
const dataView = new DataView(result.buffer);
|
|
94
|
+
dataView.setUint32(0, view.length, true); // little-endian
|
|
95
|
+
result.set(compressed, 4);
|
|
96
|
+
|
|
97
|
+
return result;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Decompress packed format (with size prefix)
|
|
102
|
+
* @param {Uint8Array} input - Packed compressed data (from compressPacked)
|
|
103
|
+
* @returns {Promise<Uint8Array>} Decompressed data
|
|
104
|
+
*/
|
|
105
|
+
export async function decompressPacked(input) {
|
|
106
|
+
const view = toBytes(input);
|
|
107
|
+
if (view.length < 4) {
|
|
108
|
+
throw new Error('Invalid packed data: too short');
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength);
|
|
112
|
+
const originalSize = dataView.getUint32(0, true); // little-endian
|
|
113
|
+
const compressed = view.subarray(4);
|
|
114
|
+
|
|
115
|
+
return decompressBlock(compressed, originalSize);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// ============================================================================
|
|
119
|
+
// Frame API - Standard LZ4 frame format (compatible with lz4 CLI tools)
|
|
120
|
+
// Includes headers and checksums for integrity
|
|
121
|
+
// ============================================================================
|
|
122
|
+
|
|
123
|
+
/**
|
|
124
|
+
* Compress using standard LZ4 frame format
|
|
125
|
+
* Compatible with lz4 CLI and other standard tools
|
|
126
|
+
* @param {Uint8Array} input - Data to compress
|
|
127
|
+
* @returns {Promise<Uint8Array>} Compressed data (LZ4 frame format)
|
|
128
|
+
*/
|
|
5
129
|
export async function compress(input) {
|
|
6
130
|
try {
|
|
7
131
|
return compress_lz4(input);
|
|
@@ -10,5 +134,282 @@ export async function compress(input) {
|
|
|
10
134
|
}
|
|
11
135
|
}
|
|
12
136
|
|
|
137
|
+
// Streaming compression API
|
|
138
|
+
//
|
|
139
|
+
// Note: We don't use wasm-bindgen-lite's createTransformStream() helper here because
|
|
140
|
+
// compression/decompression requires stateful streaming:
|
|
141
|
+
// - Compression: Must accumulate input chunks until finish() is called
|
|
142
|
+
// - Decompression: Must handle partial frames and buffer incomplete data
|
|
143
|
+
//
|
|
144
|
+
// createTransformStream() is designed for stateless transformations where each chunk
|
|
145
|
+
// is processed independently. Instead, we use a manual handle-based approach that:
|
|
146
|
+
// 1. Creates compressor/decompressor handles in Rust (stateful)
|
|
147
|
+
// 2. Maintains state between chunks via handles
|
|
148
|
+
// 3. Manually manages WASM memory allocation/freeing
|
|
149
|
+
export class StreamingCompressor {
|
|
150
|
+
constructor() {
|
|
151
|
+
this._initPromise = ensureReady();
|
|
152
|
+
this.handle = null;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
async _ensureInit() {
|
|
156
|
+
await this._initPromise;
|
|
157
|
+
if (this.handle === null) {
|
|
158
|
+
this.handle = wasmExports().create_compressor();
|
|
159
|
+
if (this.handle === 0) {
|
|
160
|
+
throw new Error('Failed to create compressor');
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
async compressChunk(input, finish = false) {
|
|
166
|
+
await this._ensureInit();
|
|
167
|
+
if (this.handle === 0) {
|
|
168
|
+
throw new Error('Compressor already destroyed');
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const view = toBytes(input);
|
|
172
|
+
const len = view.byteLength;
|
|
173
|
+
const outLen = len + 1024; // LZ4 compression typically produces smaller output
|
|
174
|
+
|
|
175
|
+
const inPtr = alloc(len);
|
|
176
|
+
const outPtr = alloc(outLen);
|
|
177
|
+
|
|
178
|
+
try {
|
|
179
|
+
memoryU8().set(view, inPtr);
|
|
180
|
+
const written = wasmExports().compress_chunk(this.handle, inPtr, len, outPtr, outLen, finish ? 1 : 0);
|
|
181
|
+
|
|
182
|
+
if (written < 0) {
|
|
183
|
+
if (written === -1) {
|
|
184
|
+
throw new Error('Compression failed');
|
|
185
|
+
} else {
|
|
186
|
+
// Negative value indicates needed buffer size
|
|
187
|
+
free(outPtr, outLen);
|
|
188
|
+
const neededLen = -written;
|
|
189
|
+
const newOutPtr = alloc(neededLen);
|
|
190
|
+
memoryU8().set(view, inPtr);
|
|
191
|
+
const retryWritten = wasmExports().compress_chunk(this.handle, inPtr, len, newOutPtr, neededLen, finish ? 1 : 0);
|
|
192
|
+
if (retryWritten < 0) {
|
|
193
|
+
free(newOutPtr, neededLen);
|
|
194
|
+
throw new Error('Compression failed after retry');
|
|
195
|
+
}
|
|
196
|
+
const result = memoryU8().slice(newOutPtr, newOutPtr + retryWritten);
|
|
197
|
+
free(newOutPtr, neededLen);
|
|
198
|
+
free(inPtr, len);
|
|
199
|
+
if (finish) {
|
|
200
|
+
this.handle = 0;
|
|
201
|
+
}
|
|
202
|
+
return result;
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
if (written === 0) {
|
|
207
|
+
// No output yet (buffering)
|
|
208
|
+
free(outPtr, outLen);
|
|
209
|
+
free(inPtr, len);
|
|
210
|
+
return new Uint8Array(0);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
const result = memoryU8().slice(outPtr, outPtr + written);
|
|
214
|
+
free(outPtr, outLen);
|
|
215
|
+
free(inPtr, len);
|
|
216
|
+
|
|
217
|
+
if (finish) {
|
|
218
|
+
this.handle = 0;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
return result;
|
|
222
|
+
} catch (error) {
|
|
223
|
+
free(outPtr, outLen);
|
|
224
|
+
free(inPtr, len);
|
|
225
|
+
throw new Error(`Compression failed: ${error.message}`);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
async destroy() {
|
|
230
|
+
await this._ensureInit();
|
|
231
|
+
if (this.handle !== 0 && this.handle !== null) {
|
|
232
|
+
wasmExports().destroy_compressor(this.handle);
|
|
233
|
+
this.handle = 0;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// Streaming decompression API
|
|
239
|
+
//
|
|
240
|
+
// See note above about why we use manual handles instead of createTransformStream()
|
|
241
|
+
export class StreamingDecompressor {
|
|
242
|
+
constructor() {
|
|
243
|
+
this._initPromise = ensureReady();
|
|
244
|
+
this.handle = null;
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
async _ensureInit() {
|
|
248
|
+
await this._initPromise;
|
|
249
|
+
if (this.handle === null) {
|
|
250
|
+
this.handle = wasmExports().create_decompressor();
|
|
251
|
+
if (this.handle === 0) {
|
|
252
|
+
throw new Error('Failed to create decompressor');
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
async decompressChunk(input, finish = false) {
|
|
258
|
+
await this._ensureInit();
|
|
259
|
+
if (this.handle === 0) {
|
|
260
|
+
throw new Error('Decompressor already destroyed');
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
const view = toBytes(input);
|
|
264
|
+
const len = view.byteLength;
|
|
265
|
+
const outLen = len * 4; // Decompressed data is typically larger
|
|
266
|
+
|
|
267
|
+
const inPtr = alloc(len);
|
|
268
|
+
const outPtr = alloc(outLen);
|
|
269
|
+
|
|
270
|
+
try {
|
|
271
|
+
memoryU8().set(view, inPtr);
|
|
272
|
+
const written = wasmExports().decompress_chunk(this.handle, inPtr, len, outPtr, outLen, finish ? 1 : 0);
|
|
273
|
+
|
|
274
|
+
if (written < 0) {
|
|
275
|
+
if (written === -1) {
|
|
276
|
+
throw new Error('Decompression failed');
|
|
277
|
+
} else {
|
|
278
|
+
// Negative value indicates needed buffer size
|
|
279
|
+
free(outPtr, outLen);
|
|
280
|
+
const neededLen = -written;
|
|
281
|
+
const newOutPtr = alloc(neededLen);
|
|
282
|
+
memoryU8().set(view, inPtr);
|
|
283
|
+
const retryWritten = wasmExports().decompress_chunk(this.handle, inPtr, len, newOutPtr, neededLen, finish ? 1 : 0);
|
|
284
|
+
if (retryWritten < 0) {
|
|
285
|
+
free(newOutPtr, neededLen);
|
|
286
|
+
throw new Error('Decompression failed after retry');
|
|
287
|
+
}
|
|
288
|
+
const result = memoryU8().slice(newOutPtr, newOutPtr + retryWritten);
|
|
289
|
+
free(newOutPtr, neededLen);
|
|
290
|
+
free(inPtr, len);
|
|
291
|
+
if (finish) {
|
|
292
|
+
this.handle = 0;
|
|
293
|
+
}
|
|
294
|
+
return result;
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
if (written === 0) {
|
|
299
|
+
// No output yet (buffering)
|
|
300
|
+
free(outPtr, outLen);
|
|
301
|
+
free(inPtr, len);
|
|
302
|
+
return new Uint8Array(0);
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
const result = memoryU8().slice(outPtr, outPtr + written);
|
|
306
|
+
free(outPtr, outLen);
|
|
307
|
+
free(inPtr, len);
|
|
308
|
+
|
|
309
|
+
if (finish) {
|
|
310
|
+
this.handle = 0;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
return result;
|
|
314
|
+
} catch (error) {
|
|
315
|
+
free(outPtr, outLen);
|
|
316
|
+
free(inPtr, len);
|
|
317
|
+
throw new Error(`Decompression failed: ${error.message}`);
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
async destroy() {
|
|
322
|
+
await this._ensureInit();
|
|
323
|
+
if (this.handle !== 0 && this.handle !== null) {
|
|
324
|
+
wasmExports().destroy_decompressor(this.handle);
|
|
325
|
+
this.handle = 0;
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// One-shot decompression
|
|
331
|
+
export async function decompress(input) {
|
|
332
|
+
try {
|
|
333
|
+
return decompress_lz4(input);
|
|
334
|
+
} catch (error) {
|
|
335
|
+
throw new Error(`Decompression failed: ${error.message}`);
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
// ============================================================================
|
|
340
|
+
// Ergonomic streaming helpers (Web Streams)
|
|
341
|
+
// ============================================================================
|
|
342
|
+
|
|
343
|
+
function requireTransformStream() {
|
|
344
|
+
if (typeof TransformStream === 'undefined') {
|
|
345
|
+
throw new Error('TransformStream is not available in this runtime');
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
/**
|
|
350
|
+
* Create a TransformStream that LZ4-compresses a byte stream.
|
|
351
|
+
*
|
|
352
|
+
* This uses the LZ4 *frame* streaming API so the output can be decoded without
|
|
353
|
+
* knowing the original size ahead of time (good for fetch request bodies).
|
|
354
|
+
*
|
|
355
|
+
* @returns {TransformStream<Uint8Array, Uint8Array>}
|
|
356
|
+
*/
|
|
357
|
+
export function createCompressionStream() {
|
|
358
|
+
requireTransformStream();
|
|
359
|
+
const enc = new StreamingCompressor();
|
|
360
|
+
|
|
361
|
+
return new TransformStream({
|
|
362
|
+
async transform(chunk, controller) {
|
|
363
|
+
const out = await enc.compressChunk(toBytes(chunk), false);
|
|
364
|
+
if (out.length) controller.enqueue(out);
|
|
365
|
+
},
|
|
366
|
+
async flush(controller) {
|
|
367
|
+
// Finish the stream (flush footer / close handle)
|
|
368
|
+
const out = await enc.compressChunk(new Uint8Array(0), true);
|
|
369
|
+
if (out.length) controller.enqueue(out);
|
|
370
|
+
},
|
|
371
|
+
});
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
/**
|
|
375
|
+
* Create a TransformStream that LZ4-decompresses a byte stream.
|
|
376
|
+
*
|
|
377
|
+
* Note: current LZ4 frame streaming decompression buffers until finish=true,
|
|
378
|
+
* so output is typically produced during `flush()`.
|
|
379
|
+
*
|
|
380
|
+
* @returns {TransformStream<Uint8Array, Uint8Array>}
|
|
381
|
+
*/
|
|
382
|
+
export function createDecompressionStream() {
|
|
383
|
+
requireTransformStream();
|
|
384
|
+
const dec = new StreamingDecompressor();
|
|
385
|
+
|
|
386
|
+
return new TransformStream({
|
|
387
|
+
async transform(chunk, controller) {
|
|
388
|
+
const out = await dec.decompressChunk(toBytes(chunk), false);
|
|
389
|
+
if (out.length) controller.enqueue(out);
|
|
390
|
+
},
|
|
391
|
+
async flush(controller) {
|
|
392
|
+
const out = await dec.decompressChunk(new Uint8Array(0), true);
|
|
393
|
+
if (out.length) controller.enqueue(out);
|
|
394
|
+
},
|
|
395
|
+
});
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
/**
|
|
399
|
+
* Convenience helper: readable.pipeThrough(createCompressionStream()).
|
|
400
|
+
* @param {ReadableStream<Uint8Array>} readable
|
|
401
|
+
*/
|
|
402
|
+
export function compressStream(readable) {
|
|
403
|
+
return readable.pipeThrough(createCompressionStream());
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
/**
|
|
407
|
+
* Convenience helper: readable.pipeThrough(createDecompressionStream()).
|
|
408
|
+
* @param {ReadableStream<Uint8Array>} readable
|
|
409
|
+
*/
|
|
410
|
+
export function decompressStream(readable) {
|
|
411
|
+
return readable.pipeThrough(createDecompressionStream());
|
|
412
|
+
}
|
|
413
|
+
|
|
13
414
|
export { wasmExports };
|
|
14
415
|
|
package/dist/node-inline.d.ts
CHANGED
package/dist/node-inline.js
CHANGED
|
@@ -1,19 +1,26 @@
|
|
|
1
1
|
import { setInstance, registerInit } from "./core.js";
|
|
2
|
-
import {
|
|
2
|
+
import { instantiateWithBackend } from "./util.js";
|
|
3
3
|
|
|
4
|
-
import { wasmBytes as
|
|
5
|
-
import { wasmBytes as
|
|
4
|
+
import { wasmBytes as _simdBytes } from "./wasm-inline/lz4.simd.wasm.js";
|
|
5
|
+
import { wasmBytes as _baseBytes } from "./wasm-inline/lz4.base.wasm.js";
|
|
6
6
|
|
|
7
|
-
async function
|
|
8
|
-
return
|
|
7
|
+
async function getSimdBytes() {
|
|
8
|
+
return _simdBytes;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
async function getBaseBytes() {
|
|
12
|
+
return _baseBytes;
|
|
9
13
|
}
|
|
10
14
|
|
|
11
15
|
|
|
12
16
|
let _ready = null;
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
+
let _backend = null;
|
|
18
|
+
export function init(imports = {}, opts = {}) {
|
|
19
|
+
const backend = opts.backend || 'auto';
|
|
20
|
+
if (_ready && _backend === backend) return _ready;
|
|
21
|
+
_backend = backend;
|
|
22
|
+
return (_ready = (async () => {
|
|
23
|
+
const { instance } = await instantiateWithBackend({ getSimdBytes, getBaseBytes, imports, backend });
|
|
17
24
|
setInstance(instance);
|
|
18
25
|
})());
|
|
19
26
|
}
|
package/dist/node.d.ts
CHANGED
package/dist/node.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { setInstance, registerInit } from "./core.js";
|
|
2
|
-
import {
|
|
2
|
+
import { instantiateWithBackend } from "./util.js";
|
|
3
3
|
|
|
4
4
|
import { readFile } from "node:fs/promises";
|
|
5
5
|
import { fileURLToPath } from "node:url";
|
|
@@ -7,20 +7,23 @@ import { fileURLToPath } from "node:url";
|
|
|
7
7
|
const simdPath = fileURLToPath(new URL("./wasm/lz4.simd.wasm", import.meta.url));
|
|
8
8
|
const basePath = fileURLToPath(new URL("./wasm/lz4.base.wasm", import.meta.url));
|
|
9
9
|
|
|
10
|
-
async function
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
return
|
|
10
|
+
async function getSimdBytes() {
|
|
11
|
+
return readFile(simdPath);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
async function getBaseBytes() {
|
|
15
|
+
return readFile(basePath);
|
|
16
16
|
}
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
let _ready = null;
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
20
|
+
let _backend = null;
|
|
21
|
+
export function init(imports = {}, opts = {}) {
|
|
22
|
+
const backend = opts.backend || 'auto';
|
|
23
|
+
if (_ready && _backend === backend) return _ready;
|
|
24
|
+
_backend = backend;
|
|
25
|
+
return (_ready = (async () => {
|
|
26
|
+
const { instance } = await instantiateWithBackend({ getSimdBytes, getBaseBytes, imports, backend });
|
|
24
27
|
setInstance(instance);
|
|
25
28
|
})());
|
|
26
29
|
}
|
package/dist/util.js
CHANGED
|
@@ -12,3 +12,33 @@ export async function instantiateWithFallback(
|
|
|
12
12
|
return { instance, backend: 'wasm' }
|
|
13
13
|
}
|
|
14
14
|
}
|
|
15
|
+
|
|
16
|
+
export async function instantiateWithBackend({
|
|
17
|
+
getSimdBytes,
|
|
18
|
+
getBaseBytes,
|
|
19
|
+
imports,
|
|
20
|
+
backend = 'auto',
|
|
21
|
+
}) {
|
|
22
|
+
if (backend === 'base') {
|
|
23
|
+
const baseBytes = await getBaseBytes()
|
|
24
|
+
const { instance } = await WebAssembly.instantiate(baseBytes, imports)
|
|
25
|
+
return { instance, backend: 'wasm' }
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
if (backend === 'simd') {
|
|
29
|
+
const simdBytes = await getSimdBytes()
|
|
30
|
+
const { instance } = await WebAssembly.instantiate(simdBytes, imports)
|
|
31
|
+
return { instance, backend: 'wasm-simd' }
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// auto: try simd first, then fallback to baseline
|
|
35
|
+
try {
|
|
36
|
+
const simdBytes = await getSimdBytes()
|
|
37
|
+
const { instance } = await WebAssembly.instantiate(simdBytes, imports)
|
|
38
|
+
return { instance, backend: 'wasm-simd' }
|
|
39
|
+
} catch {
|
|
40
|
+
const baseBytes = await getBaseBytes()
|
|
41
|
+
const { instance } = await WebAssembly.instantiate(baseBytes, imports)
|
|
42
|
+
return { instance, backend: 'wasm' }
|
|
43
|
+
}
|
|
44
|
+
}
|
package/dist/wasm/lz4.base.wasm
CHANGED
|
Binary file
|
package/dist/wasm/lz4.simd.wasm
CHANGED
|
Binary file
|