@cj-tech-master/excelts 4.2.1-canary.20260111102127.f808a37 → 4.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/THIRD_PARTY_NOTICES.md +31 -0
- package/dist/browser/index.browser.d.ts +0 -1
- package/dist/browser/index.browser.js +0 -12
- package/dist/browser/modules/archive/byte-queue.d.ts +18 -0
- package/dist/browser/modules/archive/byte-queue.js +125 -0
- package/dist/browser/modules/archive/{compression/compress.base.js → compress.base.js} +1 -1
- package/dist/browser/modules/archive/{compression/compress.browser.d.ts → compress.browser.d.ts} +8 -2
- package/dist/{esm/modules/archive/compression → browser/modules/archive}/compress.browser.js +11 -3
- package/dist/browser/modules/archive/{compression/compress.d.ts → compress.d.ts} +2 -2
- package/dist/{esm/modules/archive/compression → browser/modules/archive}/compress.js +1 -1
- package/dist/browser/modules/archive/{compression/crc32.browser.d.ts → crc32.browser.d.ts} +1 -1
- package/dist/browser/modules/archive/{compression/crc32.d.ts → crc32.d.ts} +1 -1
- package/dist/browser/modules/archive/{compression/crc32.js → crc32.js} +1 -1
- package/dist/browser/modules/archive/defaults.d.ts +0 -1
- package/dist/browser/modules/archive/defaults.js +3 -6
- package/dist/browser/modules/archive/{compression/deflate-fallback.js → deflate-fallback.js} +1 -1
- package/dist/browser/modules/archive/{unzip/extract.d.ts → extract.d.ts} +2 -2
- package/dist/browser/modules/archive/index.base.d.ts +4 -4
- package/dist/browser/modules/archive/index.base.js +6 -3
- package/dist/browser/modules/archive/index.browser.d.ts +4 -3
- package/dist/browser/modules/archive/index.browser.js +7 -3
- package/dist/browser/modules/archive/index.d.ts +4 -3
- package/dist/browser/modules/archive/index.js +5 -3
- package/dist/browser/modules/archive/{unzip/stream.base.d.ts → parse.base.d.ts} +2 -36
- package/dist/browser/modules/archive/parse.base.js +644 -0
- package/dist/browser/modules/archive/{unzip/stream.browser.d.ts → parse.browser.d.ts} +1 -1
- package/dist/{esm/modules/archive/unzip/stream.browser.js → browser/modules/archive/parse.browser.js} +110 -371
- package/dist/browser/modules/archive/{unzip/stream.d.ts → parse.d.ts} +2 -2
- package/dist/{esm/modules/archive/unzip/stream.js → browser/modules/archive/parse.js} +5 -6
- package/dist/browser/modules/archive/{compression/streaming-compress.browser.d.ts → streaming-compress.browser.d.ts} +2 -2
- package/dist/browser/modules/archive/{compression/streaming-compress.browser.js → streaming-compress.browser.js} +3 -3
- package/dist/browser/modules/archive/{compression/streaming-compress.d.ts → streaming-compress.d.ts} +2 -2
- package/dist/browser/modules/archive/{compression/streaming-compress.js → streaming-compress.js} +2 -2
- package/dist/browser/modules/archive/{zip/stream.d.ts → streaming-zip.d.ts} +5 -28
- package/dist/{esm/modules/archive/zip/stream.js → browser/modules/archive/streaming-zip.js} +48 -192
- package/dist/browser/modules/archive/utils/bytes.js +16 -16
- package/dist/browser/modules/archive/utils/parse-buffer.js +23 -21
- package/dist/browser/modules/archive/utils/timestamps.js +1 -62
- package/dist/browser/modules/archive/utils/zip-extra-fields.d.ts +1 -1
- package/dist/browser/modules/archive/utils/zip-extra-fields.js +14 -26
- package/dist/browser/modules/archive/utils/zip-extra.d.ts +18 -0
- package/dist/browser/modules/archive/utils/zip-extra.js +68 -0
- package/dist/browser/modules/archive/zip-builder.d.ts +117 -0
- package/dist/browser/modules/archive/zip-builder.js +292 -0
- package/dist/browser/modules/archive/zip-constants.d.ts +18 -0
- package/dist/browser/modules/archive/zip-constants.js +23 -0
- package/dist/{esm/modules/archive/zip → browser/modules/archive}/zip-entry-metadata.js +3 -3
- package/dist/{types/modules/archive/unzip → browser/modules/archive}/zip-parser.d.ts +1 -1
- package/dist/{esm/modules/archive/unzip → browser/modules/archive}/zip-parser.js +24 -38
- package/dist/browser/modules/archive/{zip-spec/zip-records.d.ts → zip-records.d.ts} +0 -20
- package/dist/browser/modules/archive/zip-records.js +84 -0
- package/dist/browser/modules/excel/stream/workbook-reader.browser.js +1 -1
- package/dist/browser/modules/excel/stream/workbook-writer.browser.d.ts +1 -1
- package/dist/browser/modules/excel/stream/workbook-writer.browser.js +1 -1
- package/dist/browser/modules/excel/xlsx/xlsx.browser.js +6 -3
- package/dist/browser/modules/excel/xlsx/xlsx.js +1 -1
- package/dist/browser/modules/stream/streams.browser.d.ts +30 -28
- package/dist/browser/modules/stream/streams.browser.js +710 -830
- package/dist/browser/modules/stream/streams.js +58 -140
- package/dist/cjs/modules/archive/byte-queue.js +129 -0
- package/dist/cjs/modules/archive/{compression/compress.base.js → compress.base.js} +1 -1
- package/dist/cjs/modules/archive/{compression/compress.browser.js → compress.browser.js} +11 -3
- package/dist/cjs/modules/archive/{compression/compress.js → compress.js} +1 -1
- package/dist/cjs/modules/archive/{compression/crc32.js → crc32.js} +1 -1
- package/dist/cjs/modules/archive/defaults.js +4 -7
- package/dist/cjs/modules/archive/{compression/deflate-fallback.js → deflate-fallback.js} +1 -1
- package/dist/cjs/modules/archive/index.base.js +19 -9
- package/dist/cjs/modules/archive/index.browser.js +10 -4
- package/dist/cjs/modules/archive/index.js +8 -4
- package/dist/cjs/modules/archive/parse.base.js +666 -0
- package/dist/cjs/modules/archive/{unzip/stream.browser.js → parse.browser.js} +111 -372
- package/dist/cjs/modules/archive/{unzip/stream.js → parse.js} +8 -9
- package/dist/cjs/modules/archive/{compression/streaming-compress.browser.js → streaming-compress.browser.js} +3 -3
- package/dist/cjs/modules/archive/{compression/streaming-compress.js → streaming-compress.js} +2 -2
- package/dist/cjs/modules/archive/{zip/stream.js → streaming-zip.js} +50 -194
- package/dist/cjs/modules/archive/utils/bytes.js +16 -16
- package/dist/cjs/modules/archive/utils/parse-buffer.js +23 -21
- package/dist/cjs/modules/archive/utils/timestamps.js +3 -64
- package/dist/cjs/modules/archive/utils/zip-extra-fields.js +14 -26
- package/dist/cjs/modules/archive/utils/zip-extra.js +74 -0
- package/dist/cjs/modules/archive/zip-builder.js +297 -0
- package/dist/cjs/modules/archive/zip-constants.js +26 -0
- package/dist/cjs/modules/archive/{zip/zip-entry-metadata.js → zip-entry-metadata.js} +5 -5
- package/dist/cjs/modules/archive/{unzip/zip-parser.js → zip-parser.js} +33 -47
- package/dist/cjs/modules/archive/zip-records.js +90 -0
- package/dist/cjs/modules/excel/stream/workbook-reader.browser.js +2 -2
- package/dist/cjs/modules/excel/stream/workbook-writer.browser.js +4 -4
- package/dist/cjs/modules/excel/xlsx/xlsx.browser.js +9 -6
- package/dist/cjs/modules/excel/xlsx/xlsx.js +2 -2
- package/dist/cjs/modules/stream/streams.browser.js +710 -830
- package/dist/cjs/modules/stream/streams.js +58 -140
- package/dist/esm/index.browser.js +0 -12
- package/dist/esm/modules/archive/byte-queue.js +125 -0
- package/dist/esm/modules/archive/{compression/compress.base.js → compress.base.js} +1 -1
- package/dist/{browser/modules/archive/compression → esm/modules/archive}/compress.browser.js +11 -3
- package/dist/{browser/modules/archive/compression → esm/modules/archive}/compress.js +1 -1
- package/dist/esm/modules/archive/{compression/crc32.js → crc32.js} +1 -1
- package/dist/esm/modules/archive/defaults.js +3 -6
- package/dist/esm/modules/archive/{compression/deflate-fallback.js → deflate-fallback.js} +1 -1
- package/dist/esm/modules/archive/index.base.js +6 -3
- package/dist/esm/modules/archive/index.browser.js +7 -3
- package/dist/esm/modules/archive/index.js +5 -3
- package/dist/esm/modules/archive/parse.base.js +644 -0
- package/dist/{browser/modules/archive/unzip/stream.browser.js → esm/modules/archive/parse.browser.js} +110 -371
- package/dist/{browser/modules/archive/unzip/stream.js → esm/modules/archive/parse.js} +5 -6
- package/dist/esm/modules/archive/{compression/streaming-compress.browser.js → streaming-compress.browser.js} +3 -3
- package/dist/esm/modules/archive/{compression/streaming-compress.js → streaming-compress.js} +2 -2
- package/dist/{browser/modules/archive/zip/stream.js → esm/modules/archive/streaming-zip.js} +48 -192
- package/dist/esm/modules/archive/utils/bytes.js +16 -16
- package/dist/esm/modules/archive/utils/parse-buffer.js +23 -21
- package/dist/esm/modules/archive/utils/timestamps.js +1 -62
- package/dist/esm/modules/archive/utils/zip-extra-fields.js +14 -26
- package/dist/esm/modules/archive/utils/zip-extra.js +68 -0
- package/dist/esm/modules/archive/zip-builder.js +292 -0
- package/dist/esm/modules/archive/zip-constants.js +23 -0
- package/dist/{browser/modules/archive/zip → esm/modules/archive}/zip-entry-metadata.js +3 -3
- package/dist/{browser/modules/archive/unzip → esm/modules/archive}/zip-parser.js +24 -38
- package/dist/esm/modules/archive/zip-records.js +84 -0
- package/dist/esm/modules/excel/stream/workbook-reader.browser.js +1 -1
- package/dist/esm/modules/excel/stream/workbook-writer.browser.js +1 -1
- package/dist/esm/modules/excel/xlsx/xlsx.browser.js +6 -3
- package/dist/esm/modules/excel/xlsx/xlsx.js +1 -1
- package/dist/esm/modules/stream/streams.browser.js +710 -830
- package/dist/esm/modules/stream/streams.js +58 -140
- package/dist/iife/THIRD_PARTY_NOTICES.md +31 -0
- package/dist/iife/excelts.iife.js +4425 -6215
- package/dist/iife/excelts.iife.js.map +1 -1
- package/dist/iife/excelts.iife.min.js +31 -103
- package/dist/types/index.browser.d.ts +0 -1
- package/dist/types/modules/archive/byte-queue.d.ts +18 -0
- package/dist/types/modules/archive/{compression/compress.browser.d.ts → compress.browser.d.ts} +8 -2
- package/dist/types/modules/archive/defaults.d.ts +0 -1
- package/dist/types/modules/archive/index.base.d.ts +4 -4
- package/dist/types/modules/archive/index.browser.d.ts +4 -3
- package/dist/types/modules/archive/index.d.ts +4 -3
- package/dist/types/modules/archive/{unzip/stream.base.d.ts → parse.base.d.ts} +4 -38
- package/dist/types/modules/archive/{unzip/stream.browser.d.ts → parse.browser.d.ts} +2 -2
- package/dist/types/modules/archive/{unzip/stream.d.ts → parse.d.ts} +3 -3
- package/dist/types/modules/archive/{compression/streaming-compress.browser.d.ts → streaming-compress.browser.d.ts} +1 -1
- package/dist/types/modules/archive/{zip/stream.d.ts → streaming-zip.d.ts} +6 -29
- package/dist/types/modules/archive/utils/zip-extra-fields.d.ts +1 -1
- package/dist/types/modules/archive/utils/zip-extra.d.ts +18 -0
- package/dist/types/modules/archive/zip-builder.d.ts +117 -0
- package/dist/types/modules/archive/zip-constants.d.ts +18 -0
- package/dist/types/modules/archive/{zip/zip-entry-metadata.d.ts → zip-entry-metadata.d.ts} +1 -1
- package/dist/{browser/modules/archive/unzip → types/modules/archive}/zip-parser.d.ts +1 -1
- package/dist/types/modules/archive/{zip-spec/zip-records.d.ts → zip-records.d.ts} +0 -20
- package/dist/types/modules/excel/stream/workbook-writer.browser.d.ts +1 -1
- package/dist/types/modules/stream/streams.browser.d.ts +30 -28
- package/package.json +1 -5
- package/dist/browser/modules/archive/internal/byte-queue.d.ts +0 -33
- package/dist/browser/modules/archive/internal/byte-queue.js +0 -407
- package/dist/browser/modules/archive/io/archive-sink.d.ts +0 -9
- package/dist/browser/modules/archive/io/archive-sink.js +0 -77
- package/dist/browser/modules/archive/io/archive-source.d.ts +0 -8
- package/dist/browser/modules/archive/io/archive-source.js +0 -107
- package/dist/browser/modules/archive/unzip/index.d.ts +0 -40
- package/dist/browser/modules/archive/unzip/index.js +0 -164
- package/dist/browser/modules/archive/unzip/stream.base.js +0 -1022
- package/dist/browser/modules/archive/utils/async-queue.d.ts +0 -7
- package/dist/browser/modules/archive/utils/async-queue.js +0 -103
- package/dist/browser/modules/archive/utils/compressibility.d.ts +0 -10
- package/dist/browser/modules/archive/utils/compressibility.js +0 -57
- package/dist/browser/modules/archive/utils/pattern-scanner.d.ts +0 -21
- package/dist/browser/modules/archive/utils/pattern-scanner.js +0 -27
- package/dist/browser/modules/archive/zip/index.d.ts +0 -42
- package/dist/browser/modules/archive/zip/index.js +0 -157
- package/dist/browser/modules/archive/zip/zip-bytes.d.ts +0 -73
- package/dist/browser/modules/archive/zip/zip-bytes.js +0 -239
- package/dist/browser/modules/archive/zip-spec/zip-records.js +0 -126
- package/dist/cjs/modules/archive/internal/byte-queue.js +0 -411
- package/dist/cjs/modules/archive/io/archive-sink.js +0 -82
- package/dist/cjs/modules/archive/io/archive-source.js +0 -114
- package/dist/cjs/modules/archive/unzip/index.js +0 -170
- package/dist/cjs/modules/archive/unzip/stream.base.js +0 -1044
- package/dist/cjs/modules/archive/utils/async-queue.js +0 -106
- package/dist/cjs/modules/archive/utils/compressibility.js +0 -60
- package/dist/cjs/modules/archive/utils/pattern-scanner.js +0 -31
- package/dist/cjs/modules/archive/zip/index.js +0 -162
- package/dist/cjs/modules/archive/zip/zip-bytes.js +0 -242
- package/dist/cjs/modules/archive/zip-spec/zip-records.js +0 -136
- package/dist/esm/modules/archive/internal/byte-queue.js +0 -407
- package/dist/esm/modules/archive/io/archive-sink.js +0 -77
- package/dist/esm/modules/archive/io/archive-source.js +0 -107
- package/dist/esm/modules/archive/unzip/index.js +0 -164
- package/dist/esm/modules/archive/unzip/stream.base.js +0 -1022
- package/dist/esm/modules/archive/utils/async-queue.js +0 -103
- package/dist/esm/modules/archive/utils/compressibility.js +0 -57
- package/dist/esm/modules/archive/utils/pattern-scanner.js +0 -27
- package/dist/esm/modules/archive/zip/index.js +0 -157
- package/dist/esm/modules/archive/zip/zip-bytes.js +0 -239
- package/dist/esm/modules/archive/zip-spec/zip-records.js +0 -126
- package/dist/types/modules/archive/internal/byte-queue.d.ts +0 -33
- package/dist/types/modules/archive/io/archive-sink.d.ts +0 -9
- package/dist/types/modules/archive/io/archive-source.d.ts +0 -8
- package/dist/types/modules/archive/unzip/index.d.ts +0 -40
- package/dist/types/modules/archive/utils/async-queue.d.ts +0 -7
- package/dist/types/modules/archive/utils/compressibility.d.ts +0 -10
- package/dist/types/modules/archive/utils/pattern-scanner.d.ts +0 -21
- package/dist/types/modules/archive/zip/index.d.ts +0 -42
- package/dist/types/modules/archive/zip/zip-bytes.d.ts +0 -73
- /package/dist/browser/modules/archive/{compression/compress.base.d.ts → compress.base.d.ts} +0 -0
- /package/dist/browser/modules/archive/{compression/crc32.base.d.ts → crc32.base.d.ts} +0 -0
- /package/dist/browser/modules/archive/{compression/crc32.base.js → crc32.base.js} +0 -0
- /package/dist/browser/modules/archive/{compression/crc32.browser.js → crc32.browser.js} +0 -0
- /package/dist/browser/modules/archive/{compression/deflate-fallback.d.ts → deflate-fallback.d.ts} +0 -0
- /package/dist/browser/modules/archive/{unzip/extract.js → extract.js} +0 -0
- /package/dist/browser/modules/archive/{compression/streaming-compress.base.d.ts → streaming-compress.base.d.ts} +0 -0
- /package/dist/browser/modules/archive/{compression/streaming-compress.base.js → streaming-compress.base.js} +0 -0
- /package/dist/browser/modules/archive/{zip-spec/zip-entry-info.d.ts → zip-entry-info.d.ts} +0 -0
- /package/dist/browser/modules/archive/{zip-spec/zip-entry-info.js → zip-entry-info.js} +0 -0
- /package/dist/browser/modules/archive/{zip/zip-entry-metadata.d.ts → zip-entry-metadata.d.ts} +0 -0
- /package/dist/cjs/modules/archive/{compression/crc32.base.js → crc32.base.js} +0 -0
- /package/dist/cjs/modules/archive/{compression/crc32.browser.js → crc32.browser.js} +0 -0
- /package/dist/cjs/modules/archive/{unzip/extract.js → extract.js} +0 -0
- /package/dist/cjs/modules/archive/{compression/streaming-compress.base.js → streaming-compress.base.js} +0 -0
- /package/dist/cjs/modules/archive/{zip-spec/zip-entry-info.js → zip-entry-info.js} +0 -0
- /package/dist/esm/modules/archive/{compression/crc32.base.js → crc32.base.js} +0 -0
- /package/dist/esm/modules/archive/{compression/crc32.browser.js → crc32.browser.js} +0 -0
- /package/dist/esm/modules/archive/{unzip/extract.js → extract.js} +0 -0
- /package/dist/esm/modules/archive/{compression/streaming-compress.base.js → streaming-compress.base.js} +0 -0
- /package/dist/esm/modules/archive/{zip-spec/zip-entry-info.js → zip-entry-info.js} +0 -0
- /package/dist/types/modules/archive/{compression/compress.base.d.ts → compress.base.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/compress.d.ts → compress.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/crc32.base.d.ts → crc32.base.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/crc32.browser.d.ts → crc32.browser.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/crc32.d.ts → crc32.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/deflate-fallback.d.ts → deflate-fallback.d.ts} +0 -0
- /package/dist/types/modules/archive/{unzip/extract.d.ts → extract.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/streaming-compress.base.d.ts → streaming-compress.base.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/streaming-compress.d.ts → streaming-compress.d.ts} +0 -0
- /package/dist/types/modules/archive/{zip-spec/zip-entry-info.d.ts → zip-entry-info.d.ts} +0 -0
|
@@ -27,7 +27,6 @@ export declare class Readable<T = Uint8Array> extends EventEmitter {
|
|
|
27
27
|
private _bufferSize;
|
|
28
28
|
private _reading;
|
|
29
29
|
private _ended;
|
|
30
|
-
private _endEmitted;
|
|
31
30
|
private _destroyed;
|
|
32
31
|
private _errored;
|
|
33
32
|
private _closed;
|
|
@@ -72,7 +71,6 @@ export declare class Readable<T = Uint8Array> extends EventEmitter {
|
|
|
72
71
|
* Push data to the stream (when using controllable stream)
|
|
73
72
|
*/
|
|
74
73
|
push(chunk: T | null): boolean;
|
|
75
|
-
private _emitEndOnce;
|
|
76
74
|
/**
|
|
77
75
|
* Put a chunk back at the front of the buffer
|
|
78
76
|
* Note: unshift is allowed even after end, as it's used to put back already read data
|
|
@@ -160,8 +158,7 @@ export declare class Readable<T = Uint8Array> extends EventEmitter {
|
|
|
160
158
|
private _startReading;
|
|
161
159
|
/**
|
|
162
160
|
* Async iterator support
|
|
163
|
-
* Uses
|
|
164
|
-
* This matches Node's behavior more closely (iterator drives flowing mode).
|
|
161
|
+
* Uses Web Stream reader for non-push mode, event-based for push mode
|
|
165
162
|
*/
|
|
166
163
|
[Symbol.asyncIterator](): AsyncIterableIterator<T>;
|
|
167
164
|
/**
|
|
@@ -288,12 +285,10 @@ export declare class Writable<T = Uint8Array> extends EventEmitter {
|
|
|
288
285
|
private _closed;
|
|
289
286
|
private _pendingWrites;
|
|
290
287
|
private _writableLength;
|
|
291
|
-
private _needDrain;
|
|
292
288
|
private _corked;
|
|
293
289
|
private _corkedChunks;
|
|
294
290
|
private _defaultEncoding;
|
|
295
291
|
private _aborted;
|
|
296
|
-
private _ownsStream;
|
|
297
292
|
private _writeFunc?;
|
|
298
293
|
private _finalFunc?;
|
|
299
294
|
readonly objectMode: boolean;
|
|
@@ -403,46 +398,53 @@ export declare function normalizeWritable<T = Uint8Array>(stream: WritableLike |
|
|
|
403
398
|
* A wrapper around Web TransformStream that provides Node.js-like API
|
|
404
399
|
*/
|
|
405
400
|
export declare class Transform<TInput = Uint8Array, TOutput = Uint8Array> extends EventEmitter {
|
|
401
|
+
private _stream;
|
|
406
402
|
/** @internal - for pipe() support */
|
|
407
403
|
readonly _readable: Readable<TOutput>;
|
|
408
404
|
/** @internal - for pipe() support */
|
|
409
405
|
readonly _writable: Writable<TInput>;
|
|
410
406
|
readonly objectMode: boolean;
|
|
411
|
-
private _destroyed;
|
|
412
407
|
private _ended;
|
|
408
|
+
private _destroyed;
|
|
413
409
|
private _errored;
|
|
414
|
-
private
|
|
415
|
-
private
|
|
416
|
-
private
|
|
417
|
-
private
|
|
418
|
-
private _transformImpl;
|
|
419
|
-
private _flushImpl;
|
|
410
|
+
private _pushBuffer;
|
|
411
|
+
private _transformController;
|
|
412
|
+
private _pendingEndWrites;
|
|
413
|
+
private _endPending;
|
|
420
414
|
/**
|
|
421
|
-
* Push data to the readable side (Node.js compatibility)
|
|
422
|
-
*
|
|
415
|
+
* Push data to the readable side (Node.js compatibility)
|
|
416
|
+
* Can be called from within transform callback
|
|
423
417
|
*/
|
|
424
418
|
push(chunk: TOutput | null): boolean;
|
|
425
419
|
constructor(options?: TransformStreamOptions & {
|
|
426
420
|
transform?: ((chunk: TInput) => TOutput | Promise<TOutput>) | ((this: Transform<TInput, TOutput>, chunk: TInput, encoding: string, callback: (error?: Error | null, data?: TOutput) => void) => void);
|
|
427
421
|
flush?: (() => TOutput | void | Promise<TOutput | void>) | ((this: Transform<TInput, TOutput>, callback: (error?: Error | null, data?: TOutput) => void) => void);
|
|
428
422
|
});
|
|
429
|
-
private
|
|
430
|
-
private _scheduleEnd;
|
|
431
|
-
private _emitErrorOnce;
|
|
432
|
-
private _hasSubclassTransform;
|
|
433
|
-
private _hasSubclassFlush;
|
|
434
|
-
private _runTransform;
|
|
435
|
-
private _runFlush;
|
|
423
|
+
private _dataForwardingSetup;
|
|
436
424
|
/**
|
|
437
|
-
* Override on
|
|
438
|
-
* Avoids starting flowing mode unless requested.
|
|
425
|
+
* Override on to start flowing when data listener is added
|
|
439
426
|
*/
|
|
440
427
|
on(event: string | symbol, listener: (...args: any[]) => void): this;
|
|
428
|
+
/** @internal - whether we have a data event consumer */
|
|
429
|
+
private _hasDataConsumer;
|
|
441
430
|
/**
|
|
442
|
-
* Write to the
|
|
431
|
+
* Write data to the transform stream
|
|
432
|
+
* Note: Automatically starts consuming readable if no consumer to allow
|
|
433
|
+
* transform function to execute (Web Streams backpressure compatibility)
|
|
443
434
|
*/
|
|
444
435
|
write(chunk: TInput, callback?: (error?: Error | null) => void): boolean;
|
|
445
436
|
write(chunk: TInput, encoding?: string, callback?: (error?: Error | null) => void): boolean;
|
|
437
|
+
/** @internal - whether we're auto-consuming the readable */
|
|
438
|
+
private _readableConsuming;
|
|
439
|
+
/** @internal - buffer for auto-consumed data */
|
|
440
|
+
private _autoConsumedBuffer;
|
|
441
|
+
private _autoConsumedBufferIndex;
|
|
442
|
+
/** @internal - whether auto-consume has ended */
|
|
443
|
+
private _autoConsumeEnded;
|
|
444
|
+
/** @internal - promise that resolves when auto-consume finishes */
|
|
445
|
+
private _autoConsumePromise;
|
|
446
|
+
/** @internal - auto-consume readable to allow transform to execute */
|
|
447
|
+
private _startAutoConsume;
|
|
446
448
|
/**
|
|
447
449
|
* End the transform stream
|
|
448
450
|
* Delays closing to allow writes during data events to complete
|
|
@@ -454,8 +456,10 @@ export declare class Transform<TInput = Uint8Array, TOutput = Uint8Array> extend
|
|
|
454
456
|
* Read from the transform stream
|
|
455
457
|
*/
|
|
456
458
|
read(size?: number): TOutput | null;
|
|
459
|
+
/** @internal - list of piped destinations for forwarding auto-consumed data */
|
|
460
|
+
private _pipeDestinations;
|
|
457
461
|
/**
|
|
458
|
-
* Pipe
|
|
462
|
+
* Pipe to another stream (writable, transform, or duplex)
|
|
459
463
|
*/
|
|
460
464
|
pipe<W extends Writable<TOutput> | Transform<TOutput, any> | Duplex<any, TOutput>>(destination: W): W;
|
|
461
465
|
/**
|
|
@@ -550,7 +554,6 @@ export declare class Duplex<TRead = Uint8Array, TWrite = Uint8Array> extends Eve
|
|
|
550
554
|
writable: WritableStream<W>;
|
|
551
555
|
};
|
|
552
556
|
private _dataForwardingSetup;
|
|
553
|
-
private _sideForwardingCleanup;
|
|
554
557
|
constructor(options?: DuplexStreamOptions & {
|
|
555
558
|
allowHalfOpen?: boolean;
|
|
556
559
|
objectMode?: boolean;
|
|
@@ -558,7 +561,6 @@ export declare class Duplex<TRead = Uint8Array, TWrite = Uint8Array> extends Eve
|
|
|
558
561
|
write?: (this: Duplex<TRead, TWrite>, chunk: TWrite, encoding: string, callback: (error?: Error | null) => void) => void;
|
|
559
562
|
final?: (this: Duplex<TRead, TWrite>, callback: (error?: Error | null) => void) => void;
|
|
560
563
|
});
|
|
561
|
-
private _setupSideForwarding;
|
|
562
564
|
/**
|
|
563
565
|
* Override on() to set up data forwarding lazily
|
|
564
566
|
*/
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@cj-tech-master/excelts",
|
|
3
|
-
"version": "4.2.1
|
|
3
|
+
"version": "4.2.1",
|
|
4
4
|
"description": "TypeScript Excel Workbook Manager - Read and Write xlsx and csv Files.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"publishConfig": {
|
|
@@ -44,10 +44,6 @@
|
|
|
44
44
|
"default": "./dist/cjs/index.js"
|
|
45
45
|
}
|
|
46
46
|
},
|
|
47
|
-
"./browser": {
|
|
48
|
-
"types": "./dist/browser/index.browser.d.ts",
|
|
49
|
-
"default": "./dist/browser/index.browser.js"
|
|
50
|
-
},
|
|
51
47
|
"./package.json": "./package.json"
|
|
52
48
|
},
|
|
53
49
|
"files": [
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
export declare class ByteQueue {
|
|
2
|
-
private static readonly EMPTY;
|
|
3
|
-
private _chunks;
|
|
4
|
-
private _headOffset;
|
|
5
|
-
private _length;
|
|
6
|
-
private _cachedView;
|
|
7
|
-
private _cachedLength;
|
|
8
|
-
constructor(initial?: Uint8Array);
|
|
9
|
-
get length(): number;
|
|
10
|
-
isEmpty(): boolean;
|
|
11
|
-
view(): Uint8Array;
|
|
12
|
-
reset(data?: Uint8Array): void;
|
|
13
|
-
append(chunk: Uint8Array): void;
|
|
14
|
-
read(length: number): Uint8Array;
|
|
15
|
-
/**
|
|
16
|
-
* Return a list of chunk views totaling `length` bytes without consuming.
|
|
17
|
-
*
|
|
18
|
-
* This avoids materializing a contiguous buffer for streaming write paths.
|
|
19
|
-
*/
|
|
20
|
-
peekChunks(length: number): Uint8Array[];
|
|
21
|
-
discard(length: number): void;
|
|
22
|
-
/**
|
|
23
|
-
* Find the first index of `pattern` within the queue.
|
|
24
|
-
*
|
|
25
|
-
* This avoids materializing a contiguous `view()` for common small patterns
|
|
26
|
-
* (ZIP signatures are typically 2-4 bytes).
|
|
27
|
-
*/
|
|
28
|
-
indexOfPattern(pattern: Uint8Array, startIndex?: number): number;
|
|
29
|
-
/** Peek a little-endian uint32 at `offset` without consuming bytes. Returns null if not enough bytes. */
|
|
30
|
-
peekUint32LE(offset: number): number | null;
|
|
31
|
-
/** Peek a single byte at `offset` without consuming bytes. */
|
|
32
|
-
peekByte(offset: number): number;
|
|
33
|
-
}
|
|
@@ -1,407 +0,0 @@
|
|
|
1
|
-
import { indexOfUint8ArrayPattern } from "../utils/bytes.js";
|
|
2
|
-
export class ByteQueue {
|
|
3
|
-
constructor(initial) {
|
|
4
|
-
// Store data as immutable chunks to avoid copying on append.
|
|
5
|
-
this._chunks = [];
|
|
6
|
-
this._headOffset = 0;
|
|
7
|
-
this._length = 0;
|
|
8
|
-
// Lazily materialized contiguous view (used only by callers that require a single buffer).
|
|
9
|
-
this._cachedView = null;
|
|
10
|
-
this._cachedLength = 0;
|
|
11
|
-
if (initial && initial.length > 0) {
|
|
12
|
-
this.reset(initial);
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
get length() {
|
|
16
|
-
return this._length;
|
|
17
|
-
}
|
|
18
|
-
isEmpty() {
|
|
19
|
-
return this.length === 0;
|
|
20
|
-
}
|
|
21
|
-
view() {
|
|
22
|
-
if (this._length === 0) {
|
|
23
|
-
return ByteQueue.EMPTY;
|
|
24
|
-
}
|
|
25
|
-
// Fast path: single chunk.
|
|
26
|
-
if (this._chunks.length === 1) {
|
|
27
|
-
const c = this._chunks[0];
|
|
28
|
-
return c.subarray(this._headOffset, this._headOffset + this._length);
|
|
29
|
-
}
|
|
30
|
-
if (this._cachedView && this._cachedLength === this._length) {
|
|
31
|
-
return this._cachedView;
|
|
32
|
-
}
|
|
33
|
-
const out = new Uint8Array(this._length);
|
|
34
|
-
let offset = 0;
|
|
35
|
-
for (let i = 0; i < this._chunks.length; i++) {
|
|
36
|
-
const c = this._chunks[i];
|
|
37
|
-
const start = i === 0 ? this._headOffset : 0;
|
|
38
|
-
const end = i === this._chunks.length - 1 ? start + (this._length - offset) : c.length;
|
|
39
|
-
out.set(c.subarray(start, end), offset);
|
|
40
|
-
offset += end - start;
|
|
41
|
-
if (offset >= out.length) {
|
|
42
|
-
break;
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
this._cachedView = out;
|
|
46
|
-
this._cachedLength = this._length;
|
|
47
|
-
return out;
|
|
48
|
-
}
|
|
49
|
-
reset(data) {
|
|
50
|
-
this._cachedView = null;
|
|
51
|
-
this._cachedLength = 0;
|
|
52
|
-
this._chunks = [];
|
|
53
|
-
this._headOffset = 0;
|
|
54
|
-
this._length = 0;
|
|
55
|
-
if (!data || data.length === 0) {
|
|
56
|
-
return;
|
|
57
|
-
}
|
|
58
|
-
// Keep a private copy to ensure future writes cannot mutate the source.
|
|
59
|
-
const copy = new Uint8Array(data.length);
|
|
60
|
-
copy.set(data);
|
|
61
|
-
this._chunks = [copy];
|
|
62
|
-
this._headOffset = 0;
|
|
63
|
-
this._length = copy.length;
|
|
64
|
-
}
|
|
65
|
-
append(chunk) {
|
|
66
|
-
if (chunk.length === 0) {
|
|
67
|
-
return;
|
|
68
|
-
}
|
|
69
|
-
this._cachedView = null;
|
|
70
|
-
this._cachedLength = 0;
|
|
71
|
-
this._chunks.push(chunk);
|
|
72
|
-
this._length += chunk.length;
|
|
73
|
-
}
|
|
74
|
-
read(length) {
|
|
75
|
-
if (length <= 0) {
|
|
76
|
-
return new Uint8Array(0);
|
|
77
|
-
}
|
|
78
|
-
if (length > this._length) {
|
|
79
|
-
throw new RangeError("ByteQueue: read beyond available data");
|
|
80
|
-
}
|
|
81
|
-
this._cachedView = null;
|
|
82
|
-
this._cachedLength = 0;
|
|
83
|
-
if (this._chunks.length === 1) {
|
|
84
|
-
const c = this._chunks[0];
|
|
85
|
-
const start = this._headOffset;
|
|
86
|
-
const end = start + length;
|
|
87
|
-
const out = c.subarray(start, end);
|
|
88
|
-
this._headOffset = end;
|
|
89
|
-
this._length -= length;
|
|
90
|
-
if (this._length === 0) {
|
|
91
|
-
this._chunks = [];
|
|
92
|
-
this._headOffset = 0;
|
|
93
|
-
}
|
|
94
|
-
else if (this._headOffset >= c.length) {
|
|
95
|
-
this._chunks.shift();
|
|
96
|
-
this._headOffset = 0;
|
|
97
|
-
}
|
|
98
|
-
return out;
|
|
99
|
-
}
|
|
100
|
-
// Slow path: spans multiple chunks, copy into a single output buffer.
|
|
101
|
-
const out = new Uint8Array(length);
|
|
102
|
-
let outOffset = 0;
|
|
103
|
-
let remaining = length;
|
|
104
|
-
while (remaining > 0) {
|
|
105
|
-
const c = this._chunks[0];
|
|
106
|
-
const start = this._headOffset;
|
|
107
|
-
const available = c.length - start;
|
|
108
|
-
const toCopy = Math.min(available, remaining);
|
|
109
|
-
out.set(c.subarray(start, start + toCopy), outOffset);
|
|
110
|
-
outOffset += toCopy;
|
|
111
|
-
remaining -= toCopy;
|
|
112
|
-
this._headOffset += toCopy;
|
|
113
|
-
this._length -= toCopy;
|
|
114
|
-
if (this._headOffset >= c.length) {
|
|
115
|
-
this._chunks.shift();
|
|
116
|
-
this._headOffset = 0;
|
|
117
|
-
}
|
|
118
|
-
}
|
|
119
|
-
if (this._length === 0) {
|
|
120
|
-
this._chunks = [];
|
|
121
|
-
this._headOffset = 0;
|
|
122
|
-
}
|
|
123
|
-
return out;
|
|
124
|
-
}
|
|
125
|
-
/**
|
|
126
|
-
* Return a list of chunk views totaling `length` bytes without consuming.
|
|
127
|
-
*
|
|
128
|
-
* This avoids materializing a contiguous buffer for streaming write paths.
|
|
129
|
-
*/
|
|
130
|
-
peekChunks(length) {
|
|
131
|
-
if (length <= 0) {
|
|
132
|
-
return [];
|
|
133
|
-
}
|
|
134
|
-
if (length > this._length) {
|
|
135
|
-
throw new RangeError("ByteQueue: peek beyond available data");
|
|
136
|
-
}
|
|
137
|
-
// Fast path: single chunk.
|
|
138
|
-
if (this._chunks.length === 1) {
|
|
139
|
-
const c = this._chunks[0];
|
|
140
|
-
const start = this._headOffset;
|
|
141
|
-
return [c.subarray(start, start + length)];
|
|
142
|
-
}
|
|
143
|
-
const parts = [];
|
|
144
|
-
let remaining = length;
|
|
145
|
-
for (let i = 0; i < this._chunks.length && remaining > 0; i++) {
|
|
146
|
-
const c = this._chunks[i];
|
|
147
|
-
const start = i === 0 ? this._headOffset : 0;
|
|
148
|
-
const avail = c.length - start;
|
|
149
|
-
if (avail <= 0) {
|
|
150
|
-
continue;
|
|
151
|
-
}
|
|
152
|
-
const toTake = Math.min(avail, remaining);
|
|
153
|
-
parts.push(c.subarray(start, start + toTake));
|
|
154
|
-
remaining -= toTake;
|
|
155
|
-
}
|
|
156
|
-
return parts;
|
|
157
|
-
}
|
|
158
|
-
discard(length) {
|
|
159
|
-
if (length <= 0) {
|
|
160
|
-
return;
|
|
161
|
-
}
|
|
162
|
-
if (length >= this._length) {
|
|
163
|
-
this._chunks = [];
|
|
164
|
-
this._headOffset = 0;
|
|
165
|
-
this._length = 0;
|
|
166
|
-
this._cachedView = null;
|
|
167
|
-
this._cachedLength = 0;
|
|
168
|
-
return;
|
|
169
|
-
}
|
|
170
|
-
this._cachedView = null;
|
|
171
|
-
this._cachedLength = 0;
|
|
172
|
-
let remaining = length;
|
|
173
|
-
while (remaining > 0) {
|
|
174
|
-
const c = this._chunks[0];
|
|
175
|
-
const start = this._headOffset;
|
|
176
|
-
const available = c.length - start;
|
|
177
|
-
const toDrop = Math.min(available, remaining);
|
|
178
|
-
this._headOffset += toDrop;
|
|
179
|
-
this._length -= toDrop;
|
|
180
|
-
remaining -= toDrop;
|
|
181
|
-
if (this._headOffset >= c.length) {
|
|
182
|
-
this._chunks.shift();
|
|
183
|
-
this._headOffset = 0;
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
if (this._length === 0) {
|
|
187
|
-
this._chunks = [];
|
|
188
|
-
this._headOffset = 0;
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
/**
|
|
192
|
-
* Find the first index of `pattern` within the queue.
|
|
193
|
-
*
|
|
194
|
-
* This avoids materializing a contiguous `view()` for common small patterns
|
|
195
|
-
* (ZIP signatures are typically 2-4 bytes).
|
|
196
|
-
*/
|
|
197
|
-
indexOfPattern(pattern, startIndex = 0) {
|
|
198
|
-
const patLen = pattern.length;
|
|
199
|
-
if (patLen === 0) {
|
|
200
|
-
return 0;
|
|
201
|
-
}
|
|
202
|
-
const len = this._length;
|
|
203
|
-
if (patLen > len) {
|
|
204
|
-
return -1;
|
|
205
|
-
}
|
|
206
|
-
let start = startIndex | 0;
|
|
207
|
-
if (start < 0) {
|
|
208
|
-
start = 0;
|
|
209
|
-
}
|
|
210
|
-
if (start > len - patLen) {
|
|
211
|
-
return -1;
|
|
212
|
-
}
|
|
213
|
-
// Fast path: single chunk.
|
|
214
|
-
if (this._chunks.length === 1) {
|
|
215
|
-
const c = this._chunks[0];
|
|
216
|
-
const base = this._headOffset;
|
|
217
|
-
const view = c.subarray(base, base + len);
|
|
218
|
-
// Delegate to native indexOf checks for 1..4 bytes.
|
|
219
|
-
if (patLen === 1) {
|
|
220
|
-
return view.indexOf(pattern[0], start);
|
|
221
|
-
}
|
|
222
|
-
return indexOfUint8ArrayPattern(view, pattern, start);
|
|
223
|
-
}
|
|
224
|
-
// Multi-chunk: optimize only for very common small patterns.
|
|
225
|
-
if (patLen > 4) {
|
|
226
|
-
// Rare: materialize view.
|
|
227
|
-
const v = this.view();
|
|
228
|
-
return indexOfUint8ArrayPattern(v, pattern, start);
|
|
229
|
-
}
|
|
230
|
-
const b0 = pattern[0];
|
|
231
|
-
const b1 = patLen >= 2 ? pattern[1] : 0;
|
|
232
|
-
const b2 = patLen >= 3 ? pattern[2] : 0;
|
|
233
|
-
const b3 = patLen >= 4 ? pattern[3] : 0;
|
|
234
|
-
const chunks = this._chunks;
|
|
235
|
-
const peekByteAcrossChunks = (chunkIndex, absoluteIndex) => {
|
|
236
|
-
let ci = chunkIndex;
|
|
237
|
-
let idx = absoluteIndex;
|
|
238
|
-
while (ci < chunks.length) {
|
|
239
|
-
const c = chunks[ci];
|
|
240
|
-
if (idx < c.length) {
|
|
241
|
-
return c[idx] | 0;
|
|
242
|
-
}
|
|
243
|
-
idx -= c.length;
|
|
244
|
-
ci++;
|
|
245
|
-
}
|
|
246
|
-
return null;
|
|
247
|
-
};
|
|
248
|
-
let globalBase = 0;
|
|
249
|
-
for (let ci = 0; ci < chunks.length; ci++) {
|
|
250
|
-
const c = chunks[ci];
|
|
251
|
-
const chunkOffset = ci === 0 ? this._headOffset : 0;
|
|
252
|
-
const chunkLen = c.length - chunkOffset;
|
|
253
|
-
if (chunkLen <= 0) {
|
|
254
|
-
continue;
|
|
255
|
-
}
|
|
256
|
-
const chunkStartGlobal = globalBase;
|
|
257
|
-
const chunkEndGlobal = chunkStartGlobal + chunkLen;
|
|
258
|
-
// Compute local start for this chunk.
|
|
259
|
-
const localStart = start <= chunkStartGlobal
|
|
260
|
-
? chunkOffset
|
|
261
|
-
: start >= chunkEndGlobal
|
|
262
|
-
? c.length
|
|
263
|
-
: chunkOffset + (start - chunkStartGlobal);
|
|
264
|
-
if (localStart > c.length - 1) {
|
|
265
|
-
globalBase += chunkLen;
|
|
266
|
-
continue;
|
|
267
|
-
}
|
|
268
|
-
const lastLocal = c.length - 1;
|
|
269
|
-
let i = c.indexOf(b0, localStart);
|
|
270
|
-
while (i !== -1 && i <= lastLocal) {
|
|
271
|
-
const globalPos = chunkStartGlobal + (i - chunkOffset);
|
|
272
|
-
if (globalPos > len - patLen) {
|
|
273
|
-
return -1;
|
|
274
|
-
}
|
|
275
|
-
if (patLen === 1) {
|
|
276
|
-
return globalPos;
|
|
277
|
-
}
|
|
278
|
-
// Fast path: match stays fully inside the current chunk.
|
|
279
|
-
// Avoid calling peekByte() which walks the chunk list per byte.
|
|
280
|
-
const staysInChunk = i + patLen <= c.length;
|
|
281
|
-
if (staysInChunk) {
|
|
282
|
-
if (c[i + 1] !== b1) {
|
|
283
|
-
i = c.indexOf(b0, i + 1);
|
|
284
|
-
continue;
|
|
285
|
-
}
|
|
286
|
-
if (patLen === 2) {
|
|
287
|
-
return globalPos;
|
|
288
|
-
}
|
|
289
|
-
if (c[i + 2] !== b2) {
|
|
290
|
-
i = c.indexOf(b0, i + 1);
|
|
291
|
-
continue;
|
|
292
|
-
}
|
|
293
|
-
if (patLen === 3) {
|
|
294
|
-
return globalPos;
|
|
295
|
-
}
|
|
296
|
-
if (c[i + 3] !== b3) {
|
|
297
|
-
i = c.indexOf(b0, i + 1);
|
|
298
|
-
continue;
|
|
299
|
-
}
|
|
300
|
-
return globalPos;
|
|
301
|
-
}
|
|
302
|
-
// Slow path: pattern spans chunks.
|
|
303
|
-
const b1v = peekByteAcrossChunks(ci, i + 1);
|
|
304
|
-
if (b1v === null || b1v !== b1) {
|
|
305
|
-
i = c.indexOf(b0, i + 1);
|
|
306
|
-
continue;
|
|
307
|
-
}
|
|
308
|
-
if (patLen === 2) {
|
|
309
|
-
return globalPos;
|
|
310
|
-
}
|
|
311
|
-
const b2v = peekByteAcrossChunks(ci, i + 2);
|
|
312
|
-
if (b2v === null || b2v !== b2) {
|
|
313
|
-
i = c.indexOf(b0, i + 1);
|
|
314
|
-
continue;
|
|
315
|
-
}
|
|
316
|
-
if (patLen === 3) {
|
|
317
|
-
return globalPos;
|
|
318
|
-
}
|
|
319
|
-
const b3v = peekByteAcrossChunks(ci, i + 3);
|
|
320
|
-
if (b3v === null || b3v !== b3) {
|
|
321
|
-
i = c.indexOf(b0, i + 1);
|
|
322
|
-
continue;
|
|
323
|
-
}
|
|
324
|
-
return globalPos;
|
|
325
|
-
}
|
|
326
|
-
globalBase += chunkLen;
|
|
327
|
-
}
|
|
328
|
-
return -1;
|
|
329
|
-
}
|
|
330
|
-
/** Peek a little-endian uint32 at `offset` without consuming bytes. Returns null if not enough bytes. */
|
|
331
|
-
peekUint32LE(offset) {
|
|
332
|
-
const off = offset | 0;
|
|
333
|
-
if (off < 0 || off + 4 > this._length) {
|
|
334
|
-
return null;
|
|
335
|
-
}
|
|
336
|
-
// Try to read contiguously from a single chunk to avoid 4x chunk-walk.
|
|
337
|
-
const chunks = this._chunks;
|
|
338
|
-
let remaining = off;
|
|
339
|
-
for (let i = 0; i < chunks.length; i++) {
|
|
340
|
-
const c = chunks[i];
|
|
341
|
-
const start = i === 0 ? this._headOffset : 0;
|
|
342
|
-
const avail = c.length - start;
|
|
343
|
-
if (remaining < avail) {
|
|
344
|
-
const idx = start + remaining;
|
|
345
|
-
if (idx + 4 <= c.length) {
|
|
346
|
-
const b0 = c[idx] | 0;
|
|
347
|
-
const b1 = c[idx + 1] | 0;
|
|
348
|
-
const b2 = c[idx + 2] | 0;
|
|
349
|
-
const b3 = c[idx + 3] | 0;
|
|
350
|
-
return (b0 | (b1 << 8) | (b2 << 16) | (b3 << 24)) >>> 0;
|
|
351
|
-
}
|
|
352
|
-
// Cross-chunk read (rare): walk forward across chunks once.
|
|
353
|
-
const b0 = c[idx] | 0;
|
|
354
|
-
let b1 = 0;
|
|
355
|
-
let b2 = 0;
|
|
356
|
-
let b3 = 0;
|
|
357
|
-
let ci = i;
|
|
358
|
-
let pos = idx + 1;
|
|
359
|
-
for (let k = 1; k < 4; k++) {
|
|
360
|
-
while (ci < chunks.length) {
|
|
361
|
-
const cc = chunks[ci];
|
|
362
|
-
if (pos < cc.length) {
|
|
363
|
-
const v = cc[pos] | 0;
|
|
364
|
-
if (k === 1) {
|
|
365
|
-
b1 = v;
|
|
366
|
-
}
|
|
367
|
-
else if (k === 2) {
|
|
368
|
-
b2 = v;
|
|
369
|
-
}
|
|
370
|
-
else {
|
|
371
|
-
b3 = v;
|
|
372
|
-
}
|
|
373
|
-
pos++;
|
|
374
|
-
break;
|
|
375
|
-
}
|
|
376
|
-
ci++;
|
|
377
|
-
pos = 0;
|
|
378
|
-
}
|
|
379
|
-
}
|
|
380
|
-
return (b0 | (b1 << 8) | (b2 << 16) | (b3 << 24)) >>> 0;
|
|
381
|
-
}
|
|
382
|
-
remaining -= avail;
|
|
383
|
-
}
|
|
384
|
-
// Should be unreachable due to bounds check above.
|
|
385
|
-
return null;
|
|
386
|
-
}
|
|
387
|
-
/** Peek a single byte at `offset` without consuming bytes. */
|
|
388
|
-
peekByte(offset) {
|
|
389
|
-
const off = offset | 0;
|
|
390
|
-
if (off < 0 || off >= this._length) {
|
|
391
|
-
throw new RangeError("ByteQueue: peek beyond available data");
|
|
392
|
-
}
|
|
393
|
-
let remaining = off;
|
|
394
|
-
for (let i = 0; i < this._chunks.length; i++) {
|
|
395
|
-
const c = this._chunks[i];
|
|
396
|
-
const start = i === 0 ? this._headOffset : 0;
|
|
397
|
-
const avail = c.length - start;
|
|
398
|
-
if (remaining < avail) {
|
|
399
|
-
return c[start + remaining] | 0;
|
|
400
|
-
}
|
|
401
|
-
remaining -= avail;
|
|
402
|
-
}
|
|
403
|
-
// Should be unreachable.
|
|
404
|
-
throw new RangeError("ByteQueue: peek beyond available data");
|
|
405
|
-
}
|
|
406
|
-
}
|
|
407
|
-
ByteQueue.EMPTY = new Uint8Array(0);
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
export type ArchiveSink = WritableStream<Uint8Array> | {
|
|
2
|
-
write(chunk: Uint8Array): any;
|
|
3
|
-
end?(cb?: any): any;
|
|
4
|
-
on?(event: string, listener: (...args: any[]) => void): any;
|
|
5
|
-
once?(event: string, listener: (...args: any[]) => void): any;
|
|
6
|
-
};
|
|
7
|
-
export declare function isWritableStream(value: unknown): value is WritableStream<Uint8Array>;
|
|
8
|
-
export declare function pipeIterableToSink(iterable: AsyncIterable<Uint8Array>, sink: ArchiveSink): Promise<void>;
|
|
9
|
-
export declare function collect(iterable: AsyncIterable<Uint8Array>): Promise<Uint8Array>;
|
|
@@ -1,77 +0,0 @@
|
|
|
1
|
-
export function isWritableStream(value) {
|
|
2
|
-
return !!value && typeof value === "object" && typeof value.getWriter === "function";
|
|
3
|
-
}
|
|
4
|
-
function once(emitter, event) {
|
|
5
|
-
return new Promise((resolve, reject) => {
|
|
6
|
-
const onError = (err) => {
|
|
7
|
-
cleanup();
|
|
8
|
-
reject(err instanceof Error ? err : new Error(String(err)));
|
|
9
|
-
};
|
|
10
|
-
const onDone = () => {
|
|
11
|
-
cleanup();
|
|
12
|
-
resolve();
|
|
13
|
-
};
|
|
14
|
-
const cleanup = () => {
|
|
15
|
-
emitter.off?.("error", onError);
|
|
16
|
-
emitter.off?.(event, onDone);
|
|
17
|
-
emitter.removeListener?.("error", onError);
|
|
18
|
-
emitter.removeListener?.(event, onDone);
|
|
19
|
-
};
|
|
20
|
-
emitter.on?.("error", onError);
|
|
21
|
-
emitter.on?.(event, onDone);
|
|
22
|
-
});
|
|
23
|
-
}
|
|
24
|
-
export async function pipeIterableToSink(iterable, sink) {
|
|
25
|
-
if (isWritableStream(sink)) {
|
|
26
|
-
const writer = sink.getWriter();
|
|
27
|
-
try {
|
|
28
|
-
for await (const chunk of iterable) {
|
|
29
|
-
await writer.write(chunk);
|
|
30
|
-
}
|
|
31
|
-
await writer.close();
|
|
32
|
-
}
|
|
33
|
-
finally {
|
|
34
|
-
try {
|
|
35
|
-
writer.releaseLock();
|
|
36
|
-
}
|
|
37
|
-
catch {
|
|
38
|
-
// Ignore
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
return;
|
|
42
|
-
}
|
|
43
|
-
// Node-style Writable
|
|
44
|
-
for await (const chunk of iterable) {
|
|
45
|
-
const ok = sink.write(chunk);
|
|
46
|
-
if (ok === false && typeof sink.once === "function") {
|
|
47
|
-
await once(sink, "drain");
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
if (typeof sink.end === "function") {
|
|
51
|
-
sink.end();
|
|
52
|
-
}
|
|
53
|
-
if (typeof sink.once === "function") {
|
|
54
|
-
await Promise.race([once(sink, "finish"), once(sink, "close")]);
|
|
55
|
-
}
|
|
56
|
-
}
|
|
57
|
-
export async function collect(iterable) {
|
|
58
|
-
const chunks = [];
|
|
59
|
-
let total = 0;
|
|
60
|
-
for await (const chunk of iterable) {
|
|
61
|
-
chunks.push(chunk);
|
|
62
|
-
total += chunk.length;
|
|
63
|
-
}
|
|
64
|
-
if (chunks.length === 0) {
|
|
65
|
-
return new Uint8Array(0);
|
|
66
|
-
}
|
|
67
|
-
if (chunks.length === 1) {
|
|
68
|
-
return chunks[0];
|
|
69
|
-
}
|
|
70
|
-
const out = new Uint8Array(total);
|
|
71
|
-
let offset = 0;
|
|
72
|
-
for (const chunk of chunks) {
|
|
73
|
-
out.set(chunk, offset);
|
|
74
|
-
offset += chunk.length;
|
|
75
|
-
}
|
|
76
|
-
return out;
|
|
77
|
-
}
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
export type ArchiveSource = Uint8Array | ArrayBuffer | string | Blob | AsyncIterable<unknown> | ReadableStream<unknown> | {
|
|
2
|
-
[Symbol.asyncIterator](): AsyncIterator<unknown>;
|
|
3
|
-
};
|
|
4
|
-
export declare function isReadableStream(value: unknown): value is ReadableStream<unknown>;
|
|
5
|
-
export declare function isAsyncIterable(value: unknown): value is AsyncIterable<unknown>;
|
|
6
|
-
export declare function toUint8ArraySync(source: Uint8Array | ArrayBuffer | string): Uint8Array;
|
|
7
|
-
export declare function toUint8Array(source: Uint8Array | ArrayBuffer | string | Blob): Promise<Uint8Array>;
|
|
8
|
-
export declare function toAsyncIterable(source: ArchiveSource): AsyncIterable<Uint8Array>;
|