simple-zstd 1.4.2 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +46 -0
- package/.github/workflows/release.yml +45 -0
- package/.prettierignore +5 -0
- package/.prettierrc +8 -0
- package/.release-it.json +28 -0
- package/CHANGELOG.md +26 -0
- package/README.md +690 -47
- package/dist/src/buffer-writable.d.ts +12 -0
- package/dist/src/buffer-writable.js +41 -0
- package/dist/src/index.d.ts +49 -0
- package/dist/src/index.js +430 -0
- package/dist/src/peek-transform.d.ts +16 -0
- package/dist/src/peek-transform.js +145 -0
- package/dist/src/process-duplex.d.ts +11 -0
- package/dist/src/process-duplex.js +157 -0
- package/dist/src/process-queue.d.ts +8 -0
- package/dist/src/process-queue.js +94 -0
- package/dist/src/types.d.ts +34 -0
- package/dist/src/types.js +3 -0
- package/eslint.config.js +49 -0
- package/package.json +32 -16
- package/src/buffer-writable.ts +30 -0
- package/src/index.ts +472 -0
- package/src/is-zst.d.ts +5 -0
- package/src/peek-transform.ts +153 -0
- package/src/process-duplex.ts +164 -0
- package/src/process-queue.ts +97 -0
- package/src/types.ts +35 -0
- package/tsconfig.json +110 -0
- package/.eslintrc.js +0 -18
- package/.nyc_output/4b36a1ef-a01d-4de7-a4be-e966f315cbd7.json +0 -1
- package/.nyc_output/5d73987b-f188-488b-8441-66c67bb19076.json +0 -1
- package/.nyc_output/processinfo/4b36a1ef-a01d-4de7-a4be-e966f315cbd7.json +0 -1
- package/.nyc_output/processinfo/5d73987b-f188-488b-8441-66c67bb19076.json +0 -1
- package/.nyc_output/processinfo/index.json +0 -1
- package/.travis.yml +0 -9
- package/coverage/base.css +0 -224
- package/coverage/block-navigation.js +0 -87
- package/coverage/buffer-writable.js.html +0 -154
- package/coverage/favicon.png +0 -0
- package/coverage/index.html +0 -146
- package/coverage/index.js.html +0 -841
- package/coverage/oven.js.html +0 -235
- package/coverage/prettify.css +0 -1
- package/coverage/prettify.js +0 -2
- package/coverage/sort-arrow-sprite.png +0 -0
- package/coverage/sorter.js +0 -196
- package/index.js +0 -68
package/src/index.ts
ADDED
|
@@ -0,0 +1,472 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import { writeFile } from 'node:fs/promises';
|
|
3
|
+
import { createHash } from 'node:crypto';
|
|
4
|
+
import { Readable, Duplex, PassThrough } from 'node:stream';
|
|
5
|
+
import { pipeline } from 'node:stream/promises';
|
|
6
|
+
import { execSync } from 'node:child_process';
|
|
7
|
+
|
|
8
|
+
import isZst from 'is-zst';
|
|
9
|
+
import { file } from 'tmp-promise';
|
|
10
|
+
import Debug from 'debug';
|
|
11
|
+
|
|
12
|
+
const debug = Debug('SimpleZSTD');
|
|
13
|
+
|
|
14
|
+
import ProcessQueue from './process-queue';
|
|
15
|
+
import BufferWritable from './buffer-writable';
|
|
16
|
+
import ProcessDuplex from './process-duplex';
|
|
17
|
+
import PeekPassThrough from './peek-transform';
|
|
18
|
+
import { ZSTDOpts, PoolOpts } from './types';
|
|
19
|
+
|
|
20
|
+
// Export types for consumers
|
|
21
|
+
export type { ZSTDOpts, PoolOpts, CompressOpts, DecompressOpts, DictionaryObject } from './types';
|
|
22
|
+
|
|
23
|
+
// Dictionary cache to avoid recreating temp files for the same dictionary buffer
|
|
24
|
+
// Map: hash -> { path: string, cleanup: () => void, refCount: number }
|
|
25
|
+
const dictionaryCache = new Map<string, { path: string; cleanup: () => void; refCount: number }>();
|
|
26
|
+
|
|
27
|
+
function hashBuffer(buffer: Buffer): string {
|
|
28
|
+
return createHash('sha256').update(buffer).digest('hex');
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
async function getCachedDictionaryPath(
|
|
32
|
+
dictionary: Buffer
|
|
33
|
+
): Promise<{ path: string; cleanup: () => void }> {
|
|
34
|
+
const hash = hashBuffer(dictionary);
|
|
35
|
+
|
|
36
|
+
let cached = dictionaryCache.get(hash);
|
|
37
|
+
if (cached) {
|
|
38
|
+
cached.refCount++;
|
|
39
|
+
debug(`Dictionary cache hit: ${hash.slice(0, 8)}... (refCount: ${cached.refCount})`);
|
|
40
|
+
return {
|
|
41
|
+
path: cached.path,
|
|
42
|
+
cleanup: () => {
|
|
43
|
+
cached!.refCount--;
|
|
44
|
+
debug(
|
|
45
|
+
`Dictionary refCount decreased: ${hash.slice(0, 8)}... (refCount: ${cached!.refCount})`
|
|
46
|
+
);
|
|
47
|
+
// Don't call async cleanup here - it will be handled by clearDictionaryCache()
|
|
48
|
+
// or when all references are released
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
debug(`Dictionary cache miss: ${hash.slice(0, 8)}... - creating temp file`);
|
|
54
|
+
const { path, cleanup: tmpCleanup } = await file({ prefix: 'zstd-dict-' });
|
|
55
|
+
await writeFile(path, dictionary);
|
|
56
|
+
|
|
57
|
+
dictionaryCache.set(hash, {
|
|
58
|
+
path,
|
|
59
|
+
cleanup: tmpCleanup,
|
|
60
|
+
refCount: 1,
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
return {
|
|
64
|
+
path,
|
|
65
|
+
cleanup: () => {
|
|
66
|
+
const cached = dictionaryCache.get(hash);
|
|
67
|
+
if (cached) {
|
|
68
|
+
cached.refCount--;
|
|
69
|
+
debug(
|
|
70
|
+
`Dictionary refCount decreased: ${hash.slice(0, 8)}... (refCount: ${cached.refCount})`
|
|
71
|
+
);
|
|
72
|
+
// Don't call async cleanup here - it will be handled by clearDictionaryCache()
|
|
73
|
+
// or when all references are released
|
|
74
|
+
}
|
|
75
|
+
},
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Clear the dictionary cache and cleanup all temporary files
|
|
81
|
+
* This is useful for testing or manual cache management
|
|
82
|
+
* @returns Promise that resolves when all cleanups are complete
|
|
83
|
+
*/
|
|
84
|
+
export async function clearDictionaryCache(): Promise<void> {
|
|
85
|
+
debug('Clearing dictionary cache');
|
|
86
|
+
const cleanupPromises: Promise<void>[] = [];
|
|
87
|
+
|
|
88
|
+
for (const [hash, cached] of dictionaryCache.entries()) {
|
|
89
|
+
debug(`Cleaning up cached dictionary: ${hash.slice(0, 8)}...`);
|
|
90
|
+
// tmp-promise cleanup() returns a Promise
|
|
91
|
+
cleanupPromises.push(Promise.resolve(cached.cleanup()));
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
await Promise.all(cleanupPromises);
|
|
95
|
+
dictionaryCache.clear();
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const find = process.platform === 'win32' ? 'where zstd.exe' : 'which zstd';
|
|
99
|
+
|
|
100
|
+
let bin: string;
|
|
101
|
+
|
|
102
|
+
try {
|
|
103
|
+
bin = execSync(find, { env: process.env }).toString().replace(/\n$/, '').replace(/\r$/, '');
|
|
104
|
+
debug(bin);
|
|
105
|
+
} catch {
|
|
106
|
+
throw new Error('Can not access zstd! Is it installed?');
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
try {
|
|
110
|
+
fs.accessSync(bin, fs.constants.X_OK);
|
|
111
|
+
} catch {
|
|
112
|
+
throw new Error('zstd is not executable');
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
async function CreateCompressStream(compLevel: number, opts: ZSTDOpts): Promise<Duplex> {
|
|
116
|
+
let lvl = compLevel;
|
|
117
|
+
let zo = opts.zstdOptions || [];
|
|
118
|
+
let path: string | null = null;
|
|
119
|
+
let cleanup: () => void = () => null;
|
|
120
|
+
|
|
121
|
+
if (!lvl) lvl = 3;
|
|
122
|
+
if (lvl < 1 || lvl > 22) lvl = 3;
|
|
123
|
+
|
|
124
|
+
// Dictionary
|
|
125
|
+
if (opts.dictionary && 'path' in opts.dictionary) {
|
|
126
|
+
zo = [...zo, '-D', `${opts.dictionary.path}`];
|
|
127
|
+
} else if (Buffer.isBuffer(opts.dictionary)) {
|
|
128
|
+
// Use cached dictionary to avoid recreating temp files
|
|
129
|
+
({ path, cleanup } = await getCachedDictionaryPath(opts.dictionary));
|
|
130
|
+
zo = [...zo, '-D', `${path}`];
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
let c: Duplex;
|
|
134
|
+
|
|
135
|
+
try {
|
|
136
|
+
debug(bin, ['-zc', `-${lvl}`, ...zo], opts.spawnOptions, opts.streamOptions);
|
|
137
|
+
c = new ProcessDuplex(bin, ['-zc', `-${lvl}`, ...zo], opts.spawnOptions, opts.streamOptions);
|
|
138
|
+
} catch (err) {
|
|
139
|
+
// cleanup if error;
|
|
140
|
+
cleanup();
|
|
141
|
+
throw err;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
c.on('exit', (code: number, signal) => {
|
|
145
|
+
debug('c exit', code, signal);
|
|
146
|
+
if (code !== 0) {
|
|
147
|
+
setImmediate(() => {
|
|
148
|
+
c.destroy(new Error(`zstd exited non zero. code: ${code} signal: ${signal}`));
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
cleanup();
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
return c;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
function CompressBuffer(buffer: Buffer, c: Duplex): Promise<Buffer> {
|
|
158
|
+
return new Promise((resolve, reject) => {
|
|
159
|
+
const w = new BufferWritable({});
|
|
160
|
+
|
|
161
|
+
c.once('close', () => {
|
|
162
|
+
setImmediate(() => {
|
|
163
|
+
const result = w.getBuffer();
|
|
164
|
+
if (result) {
|
|
165
|
+
resolve(result);
|
|
166
|
+
} else {
|
|
167
|
+
reject(new Error('Compression failed'));
|
|
168
|
+
}
|
|
169
|
+
});
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
pipeline(Readable.from(buffer), c, w)
|
|
173
|
+
.then(() => {
|
|
174
|
+
c.destroy();
|
|
175
|
+
})
|
|
176
|
+
.catch((err: Error) => {
|
|
177
|
+
reject(err);
|
|
178
|
+
c.destroy();
|
|
179
|
+
});
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
async function CreateDecompressStream(opts: ZSTDOpts): Promise<Duplex> {
|
|
184
|
+
// Dictionary
|
|
185
|
+
let zo = opts.zstdOptions || [];
|
|
186
|
+
let path: string | null = null;
|
|
187
|
+
let cleanup: () => void = () => null;
|
|
188
|
+
|
|
189
|
+
let terminate = false;
|
|
190
|
+
|
|
191
|
+
if (opts.dictionary && 'path' in opts.dictionary) {
|
|
192
|
+
zo = [...zo, '-D', `${opts.dictionary.path}`];
|
|
193
|
+
} else if (Buffer.isBuffer(opts.dictionary)) {
|
|
194
|
+
// Use cached dictionary to avoid recreating temp files
|
|
195
|
+
({ path, cleanup } = await getCachedDictionaryPath(opts.dictionary));
|
|
196
|
+
zo = [...zo, '-D', `${path}`];
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
let d: Duplex;
|
|
200
|
+
|
|
201
|
+
try {
|
|
202
|
+
debug(bin, ['-dc', ...zo], opts.spawnOptions, opts.streamOptions);
|
|
203
|
+
d = new ProcessDuplex(bin, ['-dc', ...zo], opts.spawnOptions, opts.streamOptions);
|
|
204
|
+
} catch (err) {
|
|
205
|
+
// cleanup if error
|
|
206
|
+
cleanup();
|
|
207
|
+
throw err;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
d.on('exit', (code: number, signal) => {
|
|
211
|
+
debug('d exit', code, signal);
|
|
212
|
+
if (code !== 0 && !terminate) {
|
|
213
|
+
setImmediate(() => {
|
|
214
|
+
d.destroy(new Error(`zstd exited non zero. code: ${code} signal: ${signal}`));
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
cleanup();
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
const wrapper = new PeekPassThrough({ maxBuffer: 10 }, (data: Buffer, swap) => {
|
|
221
|
+
if (isZst(data)) {
|
|
222
|
+
swap(null, d);
|
|
223
|
+
} else {
|
|
224
|
+
debug('not zstd');
|
|
225
|
+
terminate = true;
|
|
226
|
+
d.end();
|
|
227
|
+
swap(null, new PassThrough());
|
|
228
|
+
}
|
|
229
|
+
});
|
|
230
|
+
|
|
231
|
+
// CRITICAL: Wrap _destroy to ensure ProcessDuplex is always destroyed
|
|
232
|
+
const originalDestroy = wrapper._destroy.bind(wrapper);
|
|
233
|
+
wrapper._destroy = function (error: Error | null, callback: (error: Error | null) => void) {
|
|
234
|
+
if (!d.destroyed) {
|
|
235
|
+
d.destroy();
|
|
236
|
+
}
|
|
237
|
+
originalDestroy(error, callback);
|
|
238
|
+
};
|
|
239
|
+
|
|
240
|
+
return wrapper;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
function DecompressBuffer(buffer: Buffer, d: Duplex): Promise<Buffer> {
|
|
244
|
+
return new Promise((resolve, reject) => {
|
|
245
|
+
const w = new BufferWritable({});
|
|
246
|
+
|
|
247
|
+
d.once('close', () => {
|
|
248
|
+
setImmediate(() => {
|
|
249
|
+
resolve(w.getBuffer() || Buffer.alloc(0));
|
|
250
|
+
});
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
pipeline(Readable.from(buffer), d, w)
|
|
254
|
+
.then(() => {
|
|
255
|
+
d.destroy();
|
|
256
|
+
})
|
|
257
|
+
.catch((err: Error) => {
|
|
258
|
+
reject(err);
|
|
259
|
+
d.destroy();
|
|
260
|
+
});
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Standalone Functions
|
|
265
|
+
|
|
266
|
+
export function compress(compLevel: number, opts: ZSTDOpts = {}): Promise<Duplex> {
|
|
267
|
+
return CreateCompressStream(compLevel, opts);
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
export async function compressBuffer(
|
|
271
|
+
buffer: Buffer,
|
|
272
|
+
compLevel: number,
|
|
273
|
+
opts: ZSTDOpts = {}
|
|
274
|
+
): Promise<Buffer> {
|
|
275
|
+
const c = await CreateCompressStream(compLevel, opts);
|
|
276
|
+
return CompressBuffer(buffer, c);
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
export function decompress(opts: ZSTDOpts = {}): Promise<Duplex> {
|
|
280
|
+
return CreateDecompressStream(opts);
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
export async function decompressBuffer(buffer: Buffer, opts: ZSTDOpts = {}): Promise<Buffer> {
|
|
284
|
+
const d = await CreateDecompressStream(opts);
|
|
285
|
+
return DecompressBuffer(buffer, d);
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
// SimpleZSTD Class
|
|
289
|
+
export class SimpleZSTD {
|
|
290
|
+
#compressQueue!: ProcessQueue<Duplex>;
|
|
291
|
+
#decompressQueue!: ProcessQueue<Duplex>;
|
|
292
|
+
#compressDictCleanup: () => void = () => null;
|
|
293
|
+
#decompressDictCleanup: () => void = () => null;
|
|
294
|
+
#ready;
|
|
295
|
+
#poolOptions?: PoolOpts;
|
|
296
|
+
|
|
297
|
+
private constructor(poolOptions?: PoolOpts) {
|
|
298
|
+
debug('constructor', poolOptions);
|
|
299
|
+
this.#poolOptions = poolOptions;
|
|
300
|
+
this.#compressDictCleanup = () => null;
|
|
301
|
+
this.#decompressDictCleanup = () => null;
|
|
302
|
+
|
|
303
|
+
this.#ready = new Promise((resolve, reject) => {
|
|
304
|
+
(async () => {
|
|
305
|
+
try {
|
|
306
|
+
// Handle compress queue dictionary
|
|
307
|
+
let compressDictPath: string | undefined = undefined;
|
|
308
|
+
const compressDict = poolOptions?.compressQueue?.dictionary;
|
|
309
|
+
if (compressDict && 'path' in compressDict) {
|
|
310
|
+
compressDictPath = compressDict.path;
|
|
311
|
+
} else if (compressDict && Buffer.isBuffer(compressDict)) {
|
|
312
|
+
const { path, cleanup } = await file({ prefix: 'zstd-dict-' });
|
|
313
|
+
this.#compressDictCleanup = cleanup;
|
|
314
|
+
await writeFile(path, compressDict);
|
|
315
|
+
compressDictPath = path;
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// Handle decompress queue dictionary
|
|
319
|
+
let decompressDictPath: string | undefined = undefined;
|
|
320
|
+
const decompressDict = poolOptions?.decompressQueue?.dictionary;
|
|
321
|
+
if (decompressDict && 'path' in decompressDict) {
|
|
322
|
+
decompressDictPath = decompressDict.path;
|
|
323
|
+
} else if (decompressDict && Buffer.isBuffer(decompressDict)) {
|
|
324
|
+
const { path, cleanup } = await file({ prefix: 'zstd-dict-' });
|
|
325
|
+
this.#decompressDictCleanup = cleanup;
|
|
326
|
+
await writeFile(path, decompressDict);
|
|
327
|
+
decompressDictPath = path;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
this.#compressQueue = new ProcessQueue(
|
|
331
|
+
poolOptions?.compressQueueSize || 0,
|
|
332
|
+
() => {
|
|
333
|
+
debug('compress factory');
|
|
334
|
+
return CreateCompressStream(poolOptions?.compressQueue?.compLevel || 3, {
|
|
335
|
+
...poolOptions?.compressQueue,
|
|
336
|
+
dictionary: compressDictPath ? { path: compressDictPath } : undefined,
|
|
337
|
+
});
|
|
338
|
+
},
|
|
339
|
+
async (p: Promise<Duplex>) => {
|
|
340
|
+
debug('compress cleanup');
|
|
341
|
+
const stream = await p;
|
|
342
|
+
await new Promise<void>((resolve) => {
|
|
343
|
+
if (stream.destroyed) {
|
|
344
|
+
resolve();
|
|
345
|
+
} else {
|
|
346
|
+
stream.once('close', () => resolve());
|
|
347
|
+
stream.destroy();
|
|
348
|
+
}
|
|
349
|
+
});
|
|
350
|
+
}
|
|
351
|
+
);
|
|
352
|
+
|
|
353
|
+
this.#decompressQueue = new ProcessQueue(
|
|
354
|
+
poolOptions?.decompressQueueSize || 0,
|
|
355
|
+
() => {
|
|
356
|
+
debug('decompress factory');
|
|
357
|
+
return CreateDecompressStream({
|
|
358
|
+
...poolOptions?.decompressQueue,
|
|
359
|
+
dictionary: decompressDictPath ? { path: decompressDictPath } : undefined,
|
|
360
|
+
});
|
|
361
|
+
},
|
|
362
|
+
async (p: Promise<Duplex>) => {
|
|
363
|
+
debug('decompress cleanup');
|
|
364
|
+
const stream = await p;
|
|
365
|
+
await new Promise<void>((resolve) => {
|
|
366
|
+
if (stream.destroyed) {
|
|
367
|
+
resolve();
|
|
368
|
+
} else {
|
|
369
|
+
stream.once('close', () => resolve());
|
|
370
|
+
stream.destroy();
|
|
371
|
+
}
|
|
372
|
+
});
|
|
373
|
+
}
|
|
374
|
+
);
|
|
375
|
+
|
|
376
|
+
debug('READY');
|
|
377
|
+
resolve(null);
|
|
378
|
+
} catch (err) {
|
|
379
|
+
reject(err);
|
|
380
|
+
}
|
|
381
|
+
})();
|
|
382
|
+
}).catch((err) => {
|
|
383
|
+
debug('ready error', err);
|
|
384
|
+
this.#compressDictCleanup();
|
|
385
|
+
this.#decompressDictCleanup();
|
|
386
|
+
this.#compressDictCleanup = () => null;
|
|
387
|
+
this.#decompressDictCleanup = () => null;
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
/**
|
|
392
|
+
* Create a new SimpleZSTD instance with process pooling
|
|
393
|
+
* @param poolOptions - Configuration for compression and decompression process pools
|
|
394
|
+
* @returns Promise resolving to initialized SimpleZSTD instance
|
|
395
|
+
*/
|
|
396
|
+
static async create(poolOptions?: PoolOpts): Promise<SimpleZSTD> {
|
|
397
|
+
const instance = new SimpleZSTD(poolOptions);
|
|
398
|
+
await instance.#ready;
|
|
399
|
+
return instance;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
get queueStats() {
|
|
403
|
+
return {
|
|
404
|
+
compress: {
|
|
405
|
+
hits: this.#compressQueue.hits,
|
|
406
|
+
misses: this.#compressQueue.misses,
|
|
407
|
+
},
|
|
408
|
+
decompress: {
|
|
409
|
+
hits: this.#decompressQueue.hits,
|
|
410
|
+
misses: this.#decompressQueue.misses,
|
|
411
|
+
},
|
|
412
|
+
};
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
async destroy() {
|
|
416
|
+
await Promise.all([this.#compressQueue.destroy(), this.#decompressQueue.destroy()]);
|
|
417
|
+
this.#compressDictCleanup();
|
|
418
|
+
this.#decompressDictCleanup();
|
|
419
|
+
this.#compressDictCleanup = () => null;
|
|
420
|
+
this.#decompressDictCleanup = () => null;
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
/**
|
|
424
|
+
* Get a compression stream from the pool, or create a one-off stream with custom compression level
|
|
425
|
+
* @param compLevel - Optional compression level (1-22). If provided, creates a new stream instead of using the pool
|
|
426
|
+
* @returns Promise resolving to a Duplex compression stream
|
|
427
|
+
*/
|
|
428
|
+
async compress(compLevel?: number): Promise<Duplex> {
|
|
429
|
+
await this.#ready;
|
|
430
|
+
|
|
431
|
+
// If custom compression level is provided, create a one-off stream
|
|
432
|
+
if (compLevel !== undefined) {
|
|
433
|
+
return CreateCompressStream(compLevel, {
|
|
434
|
+
...this.#poolOptions?.compressQueue,
|
|
435
|
+
});
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
// Otherwise, acquire from pool
|
|
439
|
+
return this.#compressQueue.acquire();
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
/**
|
|
443
|
+
* Compress a buffer using the pool, or with a custom compression level
|
|
444
|
+
* @param buffer - Buffer to compress
|
|
445
|
+
* @param compLevel - Optional compression level (1-22). If provided, uses this level instead of pool default
|
|
446
|
+
* @returns Promise resolving to compressed buffer
|
|
447
|
+
*/
|
|
448
|
+
async compressBuffer(buffer: Buffer, compLevel?: number): Promise<Buffer> {
|
|
449
|
+
await this.#ready;
|
|
450
|
+
const c = await this.compress(compLevel);
|
|
451
|
+
return CompressBuffer(buffer, c);
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
async decompress(): Promise<Duplex> {
|
|
455
|
+
await this.#ready;
|
|
456
|
+
return this.#decompressQueue.acquire();
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
async decompressBuffer(buffer: Buffer): Promise<Buffer> {
|
|
460
|
+
await this.#ready;
|
|
461
|
+
const d = await this.#decompressQueue.acquire();
|
|
462
|
+
return DecompressBuffer(buffer, d);
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
// module.exports = {
|
|
467
|
+
// SimpleZSTD,
|
|
468
|
+
// compress,
|
|
469
|
+
// compressBuffer,
|
|
470
|
+
// decompress,
|
|
471
|
+
// decompressBuffer,
|
|
472
|
+
// };
|
package/src/is-zst.d.ts
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import { Duplex } from 'node:stream';
|
|
2
|
+
import type { DuplexOptions } from 'node:stream';
|
|
3
|
+
|
|
4
|
+
type SwapCallback = (err: Error | null, stream: Duplex | null) => void;
|
|
5
|
+
type PeekCallback = (data: Buffer, swap: SwapCallback) => void;
|
|
6
|
+
|
|
7
|
+
interface PeekOptions extends DuplexOptions {
|
|
8
|
+
maxBuffer?: number;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export default class PeekPassThrough extends Duplex {
|
|
12
|
+
#maxBuffer: number;
|
|
13
|
+
#buffer: Buffer[];
|
|
14
|
+
#bufferedLength: number;
|
|
15
|
+
#peeked: boolean;
|
|
16
|
+
#peekCallback: PeekCallback;
|
|
17
|
+
#swappedStream: Duplex | null;
|
|
18
|
+
#ended: boolean;
|
|
19
|
+
|
|
20
|
+
constructor(options: PeekOptions, peekCallback: PeekCallback) {
|
|
21
|
+
super(options);
|
|
22
|
+
this.#maxBuffer = options.maxBuffer || 65536;
|
|
23
|
+
this.#buffer = [];
|
|
24
|
+
this.#bufferedLength = 0;
|
|
25
|
+
this.#peeked = false;
|
|
26
|
+
this.#peekCallback = peekCallback;
|
|
27
|
+
this.#swappedStream = null;
|
|
28
|
+
this.#ended = false;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
_write(chunk: Buffer, encoding: BufferEncoding, callback: (error?: Error | null) => void) {
|
|
32
|
+
// If we already peeked and have a swapped stream, write to it
|
|
33
|
+
if (this.#peeked && this.#swappedStream) {
|
|
34
|
+
this.#swappedStream.write(chunk, encoding, callback);
|
|
35
|
+
return;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// If we already peeked but no swap, just pass through
|
|
39
|
+
if (this.#peeked) {
|
|
40
|
+
this.push(chunk);
|
|
41
|
+
callback();
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Buffer chunks until we have enough to peek
|
|
46
|
+
this.#buffer.push(chunk);
|
|
47
|
+
this.#bufferedLength += chunk.length;
|
|
48
|
+
|
|
49
|
+
if (this.#bufferedLength >= this.#maxBuffer) {
|
|
50
|
+
this.#performPeek(callback);
|
|
51
|
+
} else {
|
|
52
|
+
callback();
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
_final(callback: (error?: Error | null) => void) {
|
|
57
|
+
// If we haven't peeked yet and stream is ending, peek now
|
|
58
|
+
if (!this.#peeked) {
|
|
59
|
+
this.#performPeek(callback);
|
|
60
|
+
} else if (this.#swappedStream) {
|
|
61
|
+
// End the swapped stream
|
|
62
|
+
this.#swappedStream.end(callback);
|
|
63
|
+
} else {
|
|
64
|
+
// No swap - signal EOF for passthrough case
|
|
65
|
+
if (!this.#ended) {
|
|
66
|
+
this.#ended = true;
|
|
67
|
+
this.push(null);
|
|
68
|
+
}
|
|
69
|
+
callback();
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
_read(_size: number) {
|
|
74
|
+
// If we have a swapped stream, resume it if paused
|
|
75
|
+
if (this.#swappedStream) {
|
|
76
|
+
if (this.#swappedStream.isPaused && this.#swappedStream.isPaused()) {
|
|
77
|
+
this.#swappedStream.resume();
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
#performPeek(callback: (error?: Error | null) => void) {
|
|
83
|
+
this.#peeked = true;
|
|
84
|
+
|
|
85
|
+
const peekData = Buffer.concat(this.#buffer);
|
|
86
|
+
|
|
87
|
+
// Call the peek callback to determine which stream to use
|
|
88
|
+
this.#peekCallback(peekData, (err: Error | null, swappedStream: Duplex | null) => {
|
|
89
|
+
if (err) {
|
|
90
|
+
callback(err);
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (swappedStream) {
|
|
95
|
+
// We have a swapped stream
|
|
96
|
+
this.#swappedStream = swappedStream;
|
|
97
|
+
|
|
98
|
+
// Pipe swapped stream's output to our output
|
|
99
|
+
swappedStream.on('data', (chunk: Buffer) => {
|
|
100
|
+
if (!this.push(chunk)) {
|
|
101
|
+
swappedStream.pause();
|
|
102
|
+
}
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
swappedStream.on('end', () => {
|
|
106
|
+
if (!this.#ended) {
|
|
107
|
+
this.#ended = true;
|
|
108
|
+
this.push(null);
|
|
109
|
+
}
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
swappedStream.on('error', (streamErr: Error) => {
|
|
113
|
+
this.destroy(streamErr);
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
// Resume reading when downstream is ready
|
|
117
|
+
this.on('drain', () => {
|
|
118
|
+
if (swappedStream.isPaused && swappedStream.isPaused()) {
|
|
119
|
+
swappedStream.resume();
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
// Write all buffered data to the swapped stream
|
|
124
|
+
for (const bufferedChunk of this.#buffer) {
|
|
125
|
+
swappedStream.write(bufferedChunk);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// Ensure the swapped stream is in flowing mode
|
|
129
|
+
if (swappedStream.isPaused && swappedStream.isPaused()) {
|
|
130
|
+
swappedStream.resume();
|
|
131
|
+
}
|
|
132
|
+
} else {
|
|
133
|
+
// No swap - just push buffered data through
|
|
134
|
+
for (const bufferedChunk of this.#buffer) {
|
|
135
|
+
this.push(bufferedChunk);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// Clear the buffer
|
|
140
|
+
this.#buffer = [];
|
|
141
|
+
this.#bufferedLength = 0;
|
|
142
|
+
|
|
143
|
+
callback();
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
_destroy(error: Error | null, callback: (error: Error | null) => void) {
|
|
148
|
+
if (this.#swappedStream && !this.#swappedStream.destroyed) {
|
|
149
|
+
this.#swappedStream.destroy();
|
|
150
|
+
}
|
|
151
|
+
callback(error);
|
|
152
|
+
}
|
|
153
|
+
}
|