7z-iterator 1.1.2 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/FileEntry.d.cts +12 -4
- package/dist/cjs/FileEntry.d.ts +12 -4
- package/dist/cjs/FileEntry.js +52 -24
- package/dist/cjs/FileEntry.js.map +1 -1
- package/dist/cjs/SevenZipIterator.d.cts +25 -2
- package/dist/cjs/SevenZipIterator.d.ts +25 -2
- package/dist/cjs/SevenZipIterator.js +68 -21
- package/dist/cjs/SevenZipIterator.js.map +1 -1
- package/dist/cjs/index.d.cts +1 -2
- package/dist/cjs/index.d.ts +1 -2
- package/dist/cjs/index.js +19 -3
- package/dist/cjs/index.js.map +1 -1
- package/dist/cjs/lib/streamToSource.d.cts +8 -11
- package/dist/cjs/lib/streamToSource.d.ts +8 -11
- package/dist/cjs/lib/streamToSource.js +21 -67
- package/dist/cjs/lib/streamToSource.js.map +1 -1
- package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/cjs/lzma/index.d.cts +13 -0
- package/dist/cjs/lzma/index.d.ts +13 -0
- package/dist/cjs/lzma/index.js +63 -0
- package/dist/cjs/lzma/index.js.map +1 -0
- package/dist/cjs/lzma/stream/transforms.d.cts +38 -0
- package/dist/cjs/lzma/stream/transforms.d.ts +38 -0
- package/dist/cjs/lzma/stream/transforms.js +149 -0
- package/dist/cjs/lzma/stream/transforms.js.map +1 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +30 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +30 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js +135 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +82 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +82 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js +440 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/cjs/lzma/types.d.cts +110 -0
- package/dist/cjs/lzma/types.d.ts +110 -0
- package/dist/cjs/lzma/types.js +264 -0
- package/dist/cjs/lzma/types.js.map +1 -0
- package/dist/cjs/nextEntry.js +24 -26
- package/dist/cjs/nextEntry.js.map +1 -1
- package/dist/cjs/sevenz/ArchiveSource.d.cts +16 -0
- package/dist/cjs/sevenz/ArchiveSource.d.ts +16 -0
- package/dist/cjs/sevenz/ArchiveSource.js +69 -0
- package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
- package/dist/cjs/sevenz/FolderStreamSplitter.d.cts +101 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.d.ts +101 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.js +229 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -0
- package/dist/cjs/sevenz/SevenZipParser.d.cts +71 -10
- package/dist/cjs/sevenz/SevenZipParser.d.ts +71 -10
- package/dist/cjs/sevenz/SevenZipParser.js +574 -203
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BZip2.js +2 -1
- package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Bcj.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/Bcj.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/Bcj.js +102 -8
- package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BcjArm.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/BcjArm.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/BcjArm.js +51 -9
- package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Copy.d.cts +2 -4
- package/dist/cjs/sevenz/codecs/Copy.d.ts +2 -4
- package/dist/cjs/sevenz/codecs/Copy.js +2 -15
- package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Deflate.d.cts +6 -4
- package/dist/cjs/sevenz/codecs/Deflate.d.ts +6 -4
- package/dist/cjs/sevenz/codecs/Deflate.js +4 -9
- package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Delta.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/Delta.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/Delta.js +29 -10
- package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma.d.cts +5 -2
- package/dist/cjs/sevenz/codecs/Lzma.d.ts +5 -2
- package/dist/cjs/sevenz/codecs/Lzma.js +13 -28
- package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma2.d.cts +9 -3
- package/dist/cjs/sevenz/codecs/Lzma2.d.ts +9 -3
- package/dist/cjs/sevenz/codecs/Lzma2.js +17 -198
- package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -2
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +2 -15
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
- package/dist/cjs/types.d.cts +2 -16
- package/dist/cjs/types.d.ts +2 -16
- package/dist/cjs/types.js.map +1 -1
- package/dist/esm/FileEntry.d.ts +12 -4
- package/dist/esm/FileEntry.js +52 -26
- package/dist/esm/FileEntry.js.map +1 -1
- package/dist/esm/SevenZipIterator.d.ts +25 -2
- package/dist/esm/SevenZipIterator.js +69 -22
- package/dist/esm/SevenZipIterator.js.map +1 -1
- package/dist/esm/index.d.ts +1 -2
- package/dist/esm/index.js +2 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/streamToSource.d.ts +8 -11
- package/dist/esm/lib/streamToSource.js +22 -68
- package/dist/esm/lib/streamToSource.js.map +1 -1
- package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/esm/lzma/index.d.ts +13 -0
- package/dist/esm/lzma/index.js +15 -0
- package/dist/esm/lzma/index.js.map +1 -0
- package/dist/esm/lzma/stream/transforms.d.ts +38 -0
- package/dist/esm/lzma/stream/transforms.js +150 -0
- package/dist/esm/lzma/stream/transforms.js.map +1 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +30 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js +115 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/esm/lzma/sync/LzmaDecoder.d.ts +82 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js +403 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
- package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/esm/lzma/types.d.ts +110 -0
- package/dist/esm/lzma/types.js +154 -0
- package/dist/esm/lzma/types.js.map +1 -0
- package/dist/esm/nextEntry.js +24 -26
- package/dist/esm/nextEntry.js.map +1 -1
- package/dist/esm/sevenz/ArchiveSource.d.ts +16 -0
- package/dist/esm/sevenz/ArchiveSource.js +70 -1
- package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
- package/dist/esm/sevenz/FolderStreamSplitter.d.ts +101 -0
- package/dist/esm/sevenz/FolderStreamSplitter.js +207 -0
- package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -0
- package/dist/esm/sevenz/SevenZipParser.d.ts +71 -10
- package/dist/esm/sevenz/SevenZipParser.js +414 -198
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/BZip2.js +2 -1
- package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
- package/dist/esm/sevenz/codecs/Bcj.d.ts +5 -4
- package/dist/esm/sevenz/codecs/Bcj.js +106 -6
- package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
- package/dist/esm/sevenz/codecs/BcjArm.d.ts +5 -4
- package/dist/esm/sevenz/codecs/BcjArm.js +55 -7
- package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
- package/dist/esm/sevenz/codecs/Copy.d.ts +2 -4
- package/dist/esm/sevenz/codecs/Copy.js +1 -9
- package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
- package/dist/esm/sevenz/codecs/Deflate.d.ts +6 -4
- package/dist/esm/sevenz/codecs/Deflate.js +9 -7
- package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
- package/dist/esm/sevenz/codecs/Delta.d.ts +5 -4
- package/dist/esm/sevenz/codecs/Delta.js +33 -8
- package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma.d.ts +5 -2
- package/dist/esm/sevenz/codecs/Lzma.js +17 -24
- package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma2.d.ts +9 -3
- package/dist/esm/sevenz/codecs/Lzma2.js +15 -196
- package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
- package/dist/esm/sevenz/codecs/createBufferingDecoder.js +1 -9
- package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
- package/dist/esm/types.d.ts +2 -16
- package/dist/esm/types.js.map +1 -1
- package/package.json +3 -3
- package/assets/lzma-purejs/LICENSE +0 -11
- package/assets/lzma-purejs/index.js +0 -19
- package/assets/lzma-purejs/lib/LZ/OutWindow.js +0 -78
- package/assets/lzma-purejs/lib/LZ.js +0 -6
- package/assets/lzma-purejs/lib/LZMA/Base.js +0 -48
- package/assets/lzma-purejs/lib/LZMA/Decoder.js +0 -328
- package/assets/lzma-purejs/lib/LZMA.js +0 -6
- package/assets/lzma-purejs/lib/RangeCoder/BitTreeDecoder.js +0 -41
- package/assets/lzma-purejs/lib/RangeCoder/Decoder.js +0 -58
- package/assets/lzma-purejs/lib/RangeCoder/Encoder.js +0 -106
- package/assets/lzma-purejs/lib/RangeCoder.js +0 -10
- package/assets/lzma-purejs/lib/Stream.js +0 -41
- package/assets/lzma-purejs/lib/Util.js +0 -114
- package/assets/lzma-purejs/lib/makeBuffer.js +0 -25
- package/assets/lzma-purejs/package-lock.json +0 -13
- package/assets/lzma-purejs/package.json +0 -8
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* FolderStreamSplitter - Splits a decompressed folder stream into individual file streams
|
|
3
|
+
*
|
|
4
|
+
* For multi-file solid archives, the folder is decompressed as a single stream.
|
|
5
|
+
* This class splits that stream into individual file streams based on known file boundaries.
|
|
6
|
+
*
|
|
7
|
+
* Features:
|
|
8
|
+
* - Lazy stream creation (streams created on first access)
|
|
9
|
+
* - Backpressure propagation (returns false when downstream is full)
|
|
10
|
+
* - Running CRC verification per file
|
|
11
|
+
* - Automatic cleanup of completed streams
|
|
12
|
+
*/
|
|
13
|
+
import type Stream from 'stream';
|
|
14
|
+
export interface FolderStreamSplitterOptions {
|
|
15
|
+
/** Sizes of each file in the folder (in order) */
|
|
16
|
+
fileSizes: number[];
|
|
17
|
+
/** Whether to verify CRC for each file */
|
|
18
|
+
verifyCrc?: boolean;
|
|
19
|
+
/** Expected CRCs for each file (parallel to fileSizes) */
|
|
20
|
+
expectedCrcs?: (number | undefined)[];
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Splits a decompressed folder stream into individual file streams.
|
|
24
|
+
*
|
|
25
|
+
* Usage:
|
|
26
|
+
* ```
|
|
27
|
+
* const splitter = new FolderStreamSplitter({ fileSizes: [1000, 2000, 500] });
|
|
28
|
+
*
|
|
29
|
+
* decompressStream.on('data', (chunk) => {
|
|
30
|
+
* if (!splitter.write(chunk)) {
|
|
31
|
+
* decompressStream.pause();
|
|
32
|
+
* splitter.onDrain(() => decompressStream.resume());
|
|
33
|
+
* }
|
|
34
|
+
* });
|
|
35
|
+
* decompressStream.on('end', () => splitter.end());
|
|
36
|
+
*
|
|
37
|
+
* // Get stream for file at index 1 (created lazily)
|
|
38
|
+
* const fileStream = splitter.getFileStream(1);
|
|
39
|
+
* ```
|
|
40
|
+
*/
|
|
41
|
+
export declare class FolderStreamSplitter {
|
|
42
|
+
private fileBoundaries;
|
|
43
|
+
private fileStreams;
|
|
44
|
+
private fileCrcs;
|
|
45
|
+
private currentFileIndex;
|
|
46
|
+
private bytesWritten;
|
|
47
|
+
private currentFileEnd;
|
|
48
|
+
private verifyCrc;
|
|
49
|
+
private expectedCrcs;
|
|
50
|
+
private finished;
|
|
51
|
+
private error;
|
|
52
|
+
private drainCallbacks;
|
|
53
|
+
private _needsDrain;
|
|
54
|
+
constructor(options: FolderStreamSplitterOptions);
|
|
55
|
+
/**
|
|
56
|
+
* Write decompressed data chunk. Data is routed to appropriate file stream(s).
|
|
57
|
+
* Returns false if backpressure should be applied (downstream is full).
|
|
58
|
+
*/
|
|
59
|
+
write(chunk: Buffer): boolean;
|
|
60
|
+
/**
|
|
61
|
+
* Ensure stream exists for file index (lazy creation)
|
|
62
|
+
*/
|
|
63
|
+
private ensureFileStream;
|
|
64
|
+
/**
|
|
65
|
+
* Complete current file and move to next
|
|
66
|
+
*/
|
|
67
|
+
private finishCurrentFile;
|
|
68
|
+
/**
|
|
69
|
+
* Signal end of decompressed data
|
|
70
|
+
*/
|
|
71
|
+
end(): void;
|
|
72
|
+
/**
|
|
73
|
+
* Emit error to all pending file streams
|
|
74
|
+
*/
|
|
75
|
+
private emitError;
|
|
76
|
+
/**
|
|
77
|
+
* Get the stream for a specific file by index.
|
|
78
|
+
* Stream is created lazily on first access.
|
|
79
|
+
*/
|
|
80
|
+
getFileStream(fileIndex: number): Stream.PassThrough;
|
|
81
|
+
/**
|
|
82
|
+
* Register callback for when backpressure clears
|
|
83
|
+
*/
|
|
84
|
+
onDrain(callback: () => void): void;
|
|
85
|
+
/**
|
|
86
|
+
* Notify all drain callbacks
|
|
87
|
+
*/
|
|
88
|
+
private notifyDrain;
|
|
89
|
+
/**
|
|
90
|
+
* Check if a specific file's stream has been fully written
|
|
91
|
+
*/
|
|
92
|
+
isFileComplete(fileIndex: number): boolean;
|
|
93
|
+
/**
|
|
94
|
+
* Get total number of files in this folder
|
|
95
|
+
*/
|
|
96
|
+
get fileCount(): number;
|
|
97
|
+
/**
|
|
98
|
+
* Check if splitter has encountered an error
|
|
99
|
+
*/
|
|
100
|
+
getError(): Error | null;
|
|
101
|
+
}
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* FolderStreamSplitter - Splits a decompressed folder stream into individual file streams
|
|
3
|
+
*
|
|
4
|
+
* For multi-file solid archives, the folder is decompressed as a single stream.
|
|
5
|
+
* This class splits that stream into individual file streams based on known file boundaries.
|
|
6
|
+
*
|
|
7
|
+
* Features:
|
|
8
|
+
* - Lazy stream creation (streams created on first access)
|
|
9
|
+
* - Backpressure propagation (returns false when downstream is full)
|
|
10
|
+
* - Running CRC verification per file
|
|
11
|
+
* - Automatic cleanup of completed streams
|
|
12
|
+
*/ import { crc32, PassThrough } from 'extract-base-iterator';
|
|
13
|
+
/**
|
|
14
|
+
* Splits a decompressed folder stream into individual file streams.
|
|
15
|
+
*
|
|
16
|
+
* Usage:
|
|
17
|
+
* ```
|
|
18
|
+
* const splitter = new FolderStreamSplitter({ fileSizes: [1000, 2000, 500] });
|
|
19
|
+
*
|
|
20
|
+
* decompressStream.on('data', (chunk) => {
|
|
21
|
+
* if (!splitter.write(chunk)) {
|
|
22
|
+
* decompressStream.pause();
|
|
23
|
+
* splitter.onDrain(() => decompressStream.resume());
|
|
24
|
+
* }
|
|
25
|
+
* });
|
|
26
|
+
* decompressStream.on('end', () => splitter.end());
|
|
27
|
+
*
|
|
28
|
+
* // Get stream for file at index 1 (created lazily)
|
|
29
|
+
* const fileStream = splitter.getFileStream(1);
|
|
30
|
+
* ```
|
|
31
|
+
*/ export class FolderStreamSplitter {
|
|
32
|
+
/**
|
|
33
|
+
* Write decompressed data chunk. Data is routed to appropriate file stream(s).
|
|
34
|
+
* Returns false if backpressure should be applied (downstream is full).
|
|
35
|
+
*/ write(chunk) {
|
|
36
|
+
if (this.finished || this.error) return true;
|
|
37
|
+
let offset = 0;
|
|
38
|
+
let canContinue = true;
|
|
39
|
+
while(offset < chunk.length && this.currentFileIndex < this.fileStreams.length){
|
|
40
|
+
const remaining = chunk.length - offset;
|
|
41
|
+
const neededForFile = this.currentFileEnd - this.bytesWritten;
|
|
42
|
+
const toWrite = Math.min(remaining, neededForFile);
|
|
43
|
+
if (toWrite > 0) {
|
|
44
|
+
const fileChunk = chunk.slice(offset, offset + toWrite);
|
|
45
|
+
// Ensure stream exists (lazy creation)
|
|
46
|
+
const fileStream = this.ensureFileStream(this.currentFileIndex);
|
|
47
|
+
// Update CRC
|
|
48
|
+
if (this.verifyCrc) {
|
|
49
|
+
this.fileCrcs[this.currentFileIndex] = crc32(fileChunk, this.fileCrcs[this.currentFileIndex]);
|
|
50
|
+
}
|
|
51
|
+
// Write to file stream, track backpressure
|
|
52
|
+
if (!fileStream.write(fileChunk)) {
|
|
53
|
+
canContinue = false;
|
|
54
|
+
this._needsDrain = true;
|
|
55
|
+
fileStream.once('drain', ()=>{
|
|
56
|
+
this._needsDrain = false;
|
|
57
|
+
this.notifyDrain();
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
this.bytesWritten += toWrite;
|
|
62
|
+
offset += toWrite;
|
|
63
|
+
// Check if current file is complete
|
|
64
|
+
if (this.bytesWritten >= this.currentFileEnd) {
|
|
65
|
+
this.finishCurrentFile();
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return canContinue;
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Ensure stream exists for file index (lazy creation)
|
|
72
|
+
*/ ensureFileStream(fileIndex) {
|
|
73
|
+
let stream = this.fileStreams[fileIndex];
|
|
74
|
+
if (!stream) {
|
|
75
|
+
stream = new PassThrough();
|
|
76
|
+
this.fileStreams[fileIndex] = stream;
|
|
77
|
+
}
|
|
78
|
+
return stream;
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Complete current file and move to next
|
|
82
|
+
*/ finishCurrentFile() {
|
|
83
|
+
const fileStream = this.fileStreams[this.currentFileIndex];
|
|
84
|
+
// Verify CRC if enabled
|
|
85
|
+
if (this.verifyCrc) {
|
|
86
|
+
const expectedCrc = this.expectedCrcs[this.currentFileIndex];
|
|
87
|
+
if (expectedCrc !== undefined && this.fileCrcs[this.currentFileIndex] !== expectedCrc) {
|
|
88
|
+
const err = new Error(`CRC mismatch for file ${this.currentFileIndex}: expected ${expectedCrc.toString(16)}, got ${this.fileCrcs[this.currentFileIndex].toString(16)}`);
|
|
89
|
+
this.emitError(err);
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
// End this file's stream
|
|
94
|
+
if (fileStream) {
|
|
95
|
+
fileStream.end();
|
|
96
|
+
}
|
|
97
|
+
// Release reference for GC
|
|
98
|
+
this.fileStreams[this.currentFileIndex] = null;
|
|
99
|
+
// Move to next file
|
|
100
|
+
this.currentFileIndex++;
|
|
101
|
+
if (this.currentFileIndex < this.fileBoundaries.length - 1) {
|
|
102
|
+
this.currentFileEnd = this.fileBoundaries[this.currentFileIndex + 1];
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Signal end of decompressed data
|
|
107
|
+
*/ end() {
|
|
108
|
+
if (this.finished) return;
|
|
109
|
+
this.finished = true;
|
|
110
|
+
// End any remaining streams
|
|
111
|
+
for(let i = this.currentFileIndex; i < this.fileStreams.length; i++){
|
|
112
|
+
const stream = this.fileStreams[i];
|
|
113
|
+
if (stream) {
|
|
114
|
+
stream.end();
|
|
115
|
+
}
|
|
116
|
+
this.fileStreams[i] = null;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
/**
|
|
120
|
+
* Emit error to all pending file streams
|
|
121
|
+
*/ emitError(err) {
|
|
122
|
+
this.error = err;
|
|
123
|
+
for(let i = this.currentFileIndex; i < this.fileStreams.length; i++){
|
|
124
|
+
const stream = this.fileStreams[i];
|
|
125
|
+
if (stream) {
|
|
126
|
+
stream.emit('error', err);
|
|
127
|
+
stream.end();
|
|
128
|
+
}
|
|
129
|
+
this.fileStreams[i] = null;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Get the stream for a specific file by index.
|
|
134
|
+
* Stream is created lazily on first access.
|
|
135
|
+
*/ getFileStream(fileIndex) {
|
|
136
|
+
if (fileIndex < 0 || fileIndex >= this.fileBoundaries.length - 1) {
|
|
137
|
+
throw new Error(`Invalid file index: ${fileIndex}`);
|
|
138
|
+
}
|
|
139
|
+
// Check if file already completed
|
|
140
|
+
if (fileIndex < this.currentFileIndex) {
|
|
141
|
+
throw new Error(`File ${fileIndex} already completed - streams must be accessed in order`);
|
|
142
|
+
}
|
|
143
|
+
return this.ensureFileStream(fileIndex);
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Register callback for when backpressure clears
|
|
147
|
+
*/ onDrain(callback) {
|
|
148
|
+
if (!this._needsDrain) {
|
|
149
|
+
callback();
|
|
150
|
+
} else {
|
|
151
|
+
this.drainCallbacks.push(callback);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Notify all drain callbacks
|
|
156
|
+
*/ notifyDrain() {
|
|
157
|
+
const callbacks = this.drainCallbacks;
|
|
158
|
+
this.drainCallbacks = [];
|
|
159
|
+
for(let i = 0; i < callbacks.length; i++){
|
|
160
|
+
callbacks[i]();
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
/**
|
|
164
|
+
* Check if a specific file's stream has been fully written
|
|
165
|
+
*/ isFileComplete(fileIndex) {
|
|
166
|
+
return fileIndex < this.currentFileIndex;
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* Get total number of files in this folder
|
|
170
|
+
*/ get fileCount() {
|
|
171
|
+
return this.fileBoundaries.length - 1;
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Check if splitter has encountered an error
|
|
175
|
+
*/ getError() {
|
|
176
|
+
return this.error;
|
|
177
|
+
}
|
|
178
|
+
constructor(options){
|
|
179
|
+
const fileSizes = options.fileSizes;
|
|
180
|
+
const verifyCrc = options.verifyCrc !== undefined ? options.verifyCrc : true;
|
|
181
|
+
const expectedCrcs = options.expectedCrcs || [];
|
|
182
|
+
this.verifyCrc = verifyCrc;
|
|
183
|
+
this.expectedCrcs = expectedCrcs;
|
|
184
|
+
this.currentFileIndex = 0;
|
|
185
|
+
this.bytesWritten = 0;
|
|
186
|
+
this.finished = false;
|
|
187
|
+
this.error = null;
|
|
188
|
+
this.drainCallbacks = [];
|
|
189
|
+
this._needsDrain = false;
|
|
190
|
+
// Calculate cumulative boundaries
|
|
191
|
+
this.fileBoundaries = [
|
|
192
|
+
0
|
|
193
|
+
];
|
|
194
|
+
for(let i = 0; i < fileSizes.length; i++){
|
|
195
|
+
this.fileBoundaries.push(this.fileBoundaries[this.fileBoundaries.length - 1] + fileSizes[i]);
|
|
196
|
+
}
|
|
197
|
+
// Initialize streams array (lazy creation - all null initially)
|
|
198
|
+
this.fileStreams = [];
|
|
199
|
+
this.fileCrcs = [];
|
|
200
|
+
for(let i = 0; i < fileSizes.length; i++){
|
|
201
|
+
this.fileStreams.push(null);
|
|
202
|
+
this.fileCrcs.push(0);
|
|
203
|
+
}
|
|
204
|
+
// Set first file boundary
|
|
205
|
+
this.currentFileEnd = this.fileBoundaries[1] || 0;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/FolderStreamSplitter.ts"],"sourcesContent":["/**\n * FolderStreamSplitter - Splits a decompressed folder stream into individual file streams\n *\n * For multi-file solid archives, the folder is decompressed as a single stream.\n * This class splits that stream into individual file streams based on known file boundaries.\n *\n * Features:\n * - Lazy stream creation (streams created on first access)\n * - Backpressure propagation (returns false when downstream is full)\n * - Running CRC verification per file\n * - Automatic cleanup of completed streams\n */\n\nimport { crc32, PassThrough } from 'extract-base-iterator';\nimport type Stream from 'stream';\n\nexport interface FolderStreamSplitterOptions {\n /** Sizes of each file in the folder (in order) */\n fileSizes: number[];\n /** Whether to verify CRC for each file */\n verifyCrc?: boolean;\n /** Expected CRCs for each file (parallel to fileSizes) */\n expectedCrcs?: (number | undefined)[];\n}\n\n/**\n * Splits a decompressed folder stream into individual file streams.\n *\n * Usage:\n * ```\n * const splitter = new FolderStreamSplitter({ fileSizes: [1000, 2000, 500] });\n *\n * decompressStream.on('data', (chunk) => {\n * if (!splitter.write(chunk)) {\n * decompressStream.pause();\n * splitter.onDrain(() => decompressStream.resume());\n * }\n * });\n * decompressStream.on('end', () => splitter.end());\n *\n * // Get stream for file at index 1 (created lazily)\n * const fileStream = splitter.getFileStream(1);\n * ```\n */\nexport class FolderStreamSplitter {\n private fileBoundaries: number[]; // Cumulative offsets [0, size1, size1+size2, ...]\n private fileStreams: (Stream.PassThrough | null)[]; // Lazy-created, null after completion\n private fileCrcs: number[]; // Running CRC per file\n private currentFileIndex: number;\n private bytesWritten: number;\n private currentFileEnd: number;\n private verifyCrc: boolean;\n private expectedCrcs: (number | undefined)[];\n private finished: boolean;\n private error: Error | null;\n private drainCallbacks: (() => void)[];\n private _needsDrain: boolean;\n\n constructor(options: FolderStreamSplitterOptions) {\n const fileSizes = options.fileSizes;\n const verifyCrc = options.verifyCrc !== undefined ? options.verifyCrc : true;\n const expectedCrcs = options.expectedCrcs || [];\n\n this.verifyCrc = verifyCrc;\n this.expectedCrcs = expectedCrcs;\n this.currentFileIndex = 0;\n this.bytesWritten = 0;\n this.finished = false;\n this.error = null;\n this.drainCallbacks = [];\n this._needsDrain = false;\n\n // Calculate cumulative boundaries\n this.fileBoundaries = [0];\n for (let i = 0; i < fileSizes.length; i++) {\n this.fileBoundaries.push(this.fileBoundaries[this.fileBoundaries.length - 1] + fileSizes[i]);\n }\n\n // Initialize streams array (lazy creation - all null initially)\n this.fileStreams = [];\n this.fileCrcs = [];\n for (let i = 0; i < fileSizes.length; i++) {\n this.fileStreams.push(null);\n this.fileCrcs.push(0);\n }\n\n // Set first file boundary\n this.currentFileEnd = this.fileBoundaries[1] || 0;\n }\n\n /**\n * Write decompressed data chunk. Data is routed to appropriate file stream(s).\n * Returns false if backpressure should be applied (downstream is full).\n */\n write(chunk: Buffer): boolean {\n if (this.finished || this.error) return true;\n\n let offset = 0;\n let canContinue = true;\n\n while (offset < chunk.length && this.currentFileIndex < this.fileStreams.length) {\n const remaining = chunk.length - offset;\n const neededForFile = this.currentFileEnd - this.bytesWritten;\n const toWrite = Math.min(remaining, neededForFile);\n\n if (toWrite > 0) {\n const fileChunk = chunk.slice(offset, offset + toWrite);\n\n // Ensure stream exists (lazy creation)\n const fileStream = this.ensureFileStream(this.currentFileIndex);\n\n // Update CRC\n if (this.verifyCrc) {\n this.fileCrcs[this.currentFileIndex] = crc32(fileChunk, this.fileCrcs[this.currentFileIndex]);\n }\n\n // Write to file stream, track backpressure\n if (!fileStream.write(fileChunk)) {\n canContinue = false;\n this._needsDrain = true;\n fileStream.once('drain', () => {\n this._needsDrain = false;\n this.notifyDrain();\n });\n }\n }\n\n this.bytesWritten += toWrite;\n offset += toWrite;\n\n // Check if current file is complete\n if (this.bytesWritten >= this.currentFileEnd) {\n this.finishCurrentFile();\n }\n }\n\n return canContinue;\n }\n\n /**\n * Ensure stream exists for file index (lazy creation)\n */\n private ensureFileStream(fileIndex: number): Stream.PassThrough {\n let stream = this.fileStreams[fileIndex];\n if (!stream) {\n stream = new PassThrough();\n this.fileStreams[fileIndex] = stream;\n }\n return stream;\n }\n\n /**\n * Complete current file and move to next\n */\n private finishCurrentFile(): void {\n const fileStream = this.fileStreams[this.currentFileIndex];\n\n // Verify CRC if enabled\n if (this.verifyCrc) {\n const expectedCrc = this.expectedCrcs[this.currentFileIndex];\n if (expectedCrc !== undefined && this.fileCrcs[this.currentFileIndex] !== expectedCrc) {\n const err = new Error(`CRC mismatch for file ${this.currentFileIndex}: expected ${expectedCrc.toString(16)}, got ${this.fileCrcs[this.currentFileIndex].toString(16)}`);\n this.emitError(err);\n return;\n }\n }\n\n // End this file's stream\n if (fileStream) {\n fileStream.end();\n }\n\n // Release reference for GC\n this.fileStreams[this.currentFileIndex] = null;\n\n // Move to next file\n this.currentFileIndex++;\n if (this.currentFileIndex < this.fileBoundaries.length - 1) {\n this.currentFileEnd = this.fileBoundaries[this.currentFileIndex + 1];\n }\n }\n\n /**\n * Signal end of decompressed data\n */\n end(): void {\n if (this.finished) return;\n this.finished = true;\n\n // End any remaining streams\n for (let i = this.currentFileIndex; i < this.fileStreams.length; i++) {\n const stream = this.fileStreams[i];\n if (stream) {\n stream.end();\n }\n this.fileStreams[i] = null;\n }\n }\n\n /**\n * Emit error to all pending file streams\n */\n private emitError(err: Error): void {\n this.error = err;\n for (let i = this.currentFileIndex; i < this.fileStreams.length; i++) {\n const stream = this.fileStreams[i];\n if (stream) {\n stream.emit('error', err);\n stream.end();\n }\n this.fileStreams[i] = null;\n }\n }\n\n /**\n * Get the stream for a specific file by index.\n * Stream is created lazily on first access.\n */\n getFileStream(fileIndex: number): Stream.PassThrough {\n if (fileIndex < 0 || fileIndex >= this.fileBoundaries.length - 1) {\n throw new Error(`Invalid file index: ${fileIndex}`);\n }\n\n // Check if file already completed\n if (fileIndex < this.currentFileIndex) {\n throw new Error(`File ${fileIndex} already completed - streams must be accessed in order`);\n }\n\n return this.ensureFileStream(fileIndex);\n }\n\n /**\n * Register callback for when backpressure clears\n */\n onDrain(callback: () => void): void {\n if (!this._needsDrain) {\n callback();\n } else {\n this.drainCallbacks.push(callback);\n }\n }\n\n /**\n * Notify all drain callbacks\n */\n private notifyDrain(): void {\n const callbacks = this.drainCallbacks;\n this.drainCallbacks = [];\n for (let i = 0; i < callbacks.length; i++) {\n callbacks[i]();\n }\n }\n\n /**\n * Check if a specific file's stream has been fully written\n */\n isFileComplete(fileIndex: number): boolean {\n return fileIndex < this.currentFileIndex;\n }\n\n /**\n * Get total number of files in this folder\n */\n get fileCount(): number {\n return this.fileBoundaries.length - 1;\n }\n\n /**\n * Check if splitter has encountered an error\n */\n getError(): Error | null {\n return this.error;\n }\n}\n"],"names":["crc32","PassThrough","FolderStreamSplitter","write","chunk","finished","error","offset","canContinue","length","currentFileIndex","fileStreams","remaining","neededForFile","currentFileEnd","bytesWritten","toWrite","Math","min","fileChunk","slice","fileStream","ensureFileStream","verifyCrc","fileCrcs","_needsDrain","once","notifyDrain","finishCurrentFile","fileIndex","stream","expectedCrc","expectedCrcs","undefined","err","Error","toString","emitError","end","fileBoundaries","i","emit","getFileStream","onDrain","callback","drainCallbacks","push","callbacks","isFileComplete","fileCount","getError","options","fileSizes"],"mappings":"AAAA;;;;;;;;;;;CAWC,GAED,SAASA,KAAK,EAAEC,WAAW,QAAQ,wBAAwB;AAY3D;;;;;;;;;;;;;;;;;;CAkBC,GACD,OAAO,MAAMC;IA8CX;;;GAGC,GACDC,MAAMC,KAAa,EAAW;QAC5B,IAAI,IAAI,CAACC,QAAQ,IAAI,IAAI,CAACC,KAAK,EAAE,OAAO;QAExC,IAAIC,SAAS;QACb,IAAIC,cAAc;QAElB,MAAOD,SAASH,MAAMK,MAAM,IAAI,IAAI,CAACC,gBAAgB,GAAG,IAAI,CAACC,WAAW,CAACF,MAAM,CAAE;YAC/E,MAAMG,YAAYR,MAAMK,MAAM,GAAGF;YACjC,MAAMM,gBAAgB,IAAI,CAACC,cAAc,GAAG,IAAI,CAACC,YAAY;YAC7D,MAAMC,UAAUC,KAAKC,GAAG,CAACN,WAAWC;YAEpC,IAAIG,UAAU,GAAG;gBACf,MAAMG,YAAYf,MAAMgB,KAAK,CAACb,QAAQA,SAASS;gBAE/C,uCAAuC;gBACvC,MAAMK,aAAa,IAAI,CAACC,gBAAgB,CAAC,IAAI,CAACZ,gBAAgB;gBAE9D,aAAa;gBACb,IAAI,IAAI,CAACa,SAAS,EAAE;oBAClB,IAAI,CAACC,QAAQ,CAAC,IAAI,CAACd,gBAAgB,CAAC,GAAGV,MAAMmB,WAAW,IAAI,CAACK,QAAQ,CAAC,IAAI,CAACd,gBAAgB,CAAC;gBAC9F;gBAEA,2CAA2C;gBAC3C,IAAI,CAACW,WAAWlB,KAAK,CAACgB,YAAY;oBAChCX,cAAc;oBACd,IAAI,CAACiB,WAAW,GAAG;oBACnBJ,WAAWK,IAAI,CAAC,SAAS;wBACvB,IAAI,CAACD,WAAW,GAAG;wBACnB,IAAI,CAACE,WAAW;oBAClB;gBACF;YACF;YAEA,IAAI,CAACZ,YAAY,IAAIC;YACrBT,UAAUS;YAEV,oCAAoC;YACpC,IAAI,IAAI,CAACD,YAAY,IAAI,IAAI,CAACD,cAAc,EAAE;gBAC5C,IAAI,CAACc,iBAAiB;YACxB;QACF;QAEA,OAAOpB;IACT;IAEA;;GAEC,GACD,AAAQc,iBAAiBO,SAAiB,EAAsB;QAC9D,IAAIC,SAAS,IAAI,CAACnB,WAAW,CAACkB,UAAU;QACxC,IAAI,CAACC,QAAQ;YACXA,SAAS,IAAI7B;YACb,IAAI,CAACU,WAAW,CAACkB,UAAU,GAAGC;QAChC;QACA,OAAOA;IACT;IAEA;;GAEC,GACD,AAAQF,oBAA0B;QAChC,MAAMP,aAAa,IAAI,CAACV,WAAW,CAAC,IAAI,CAACD,gBAAgB,CAAC;QAE1D,wBAAwB;QACxB,IAAI,IAAI,CAACa,SAAS,EAAE;YAClB,MAAMQ,cAAc,IAAI,CAACC,YAAY,CAAC,IAAI,CAACtB,gBAAgB,CAAC;YAC5D,IAAIqB,gBAAgBE,aAAa,IAAI,CAACT,QAAQ,CAAC,IAAI,CAACd,gBAAgB,CAAC,KAAKqB,aAAa;gBACrF,MAAMG,MAAM,IAAIC,MAAM,CAAC,sBAAsB,EAAE,IAAI,CAACzB,gBAAgB,CAAC,WAAW,EAAEqB,YAAYK,QAAQ,CAAC,IAAI,MAAM,EAAE,IAAI,CAACZ,QAAQ,CAAC,IAAI,CAACd,gBAAgB,CAAC,CAAC0B,QAAQ,CAAC,KAAK;gBACtK,IAAI,CAACC,SAAS,CAACH;gBACf;YACF;QACF;QAEA,yBAAyB;QACzB,IAAIb,YAAY;YACdA,WAAWiB,GAAG;QAChB;QAEA,2BAA2B;QAC3B,IAAI,CAAC3B,WAAW,CAAC,IAAI,CAACD,gBAAgB,CAAC,GAAG;QAE1C,oBAAoB;QACpB,IAAI,CAACA,gBAAgB;QACrB,IAAI,IAAI,CAACA,gBAAgB,GAAG,IAAI,CAAC6B,cAAc,CAAC9B,MAAM,GAAG,GAAG;YAC1D,IAAI,CAACK,cAAc,GAAG,IAAI,CAACyB,cAAc,CAAC,IAAI,CAAC7B,gBAAgB,GAAG,EAAE;QACtE;IACF;IAEA;;GAEC,GACD4B,MAAY;QACV,IAAI,IAAI,CAACjC,QAAQ,EAAE;QACnB,IAAI,CAACA,QAAQ,GAAG;QAEhB,4BAA4B;QAC5B,IAAK,IAAImC,IAAI,IAAI,CAAC9B,gBAAgB,EAAE8B,IAAI,IAAI,CAAC7B,WAAW,CAACF,MAAM,EAAE+B,IAAK;YACpE,MAAMV,SAAS,IAAI,CAACnB,WAAW,CAAC6B,EAAE;YAClC,IAAIV,QAAQ;gBACVA,OAAOQ,GAAG;YACZ;YACA,IAAI,CAAC3B,WAAW,CAAC6B,EAAE,GAAG;QACxB;IACF;IAEA;;GAEC,GACD,AAAQH,UAAUH,GAAU,EAAQ;QAClC,IAAI,CAAC5B,KAAK,GAAG4B;QACb,IAAK,IAAIM,IAAI,IAAI,CAAC9B,gBAAgB,EAAE8B,IAAI,IAAI,CAAC7B,WAAW,CAACF,MAAM,EAAE+B,IAAK;YACpE,MAAMV,SAAS,IAAI,CAACnB,WAAW,CAAC6B,EAAE;YAClC,IAAIV,QAAQ;gBACVA,OAAOW,IAAI,CAAC,SAASP;gBACrBJ,OAAOQ,GAAG;YACZ;YACA,IAAI,CAAC3B,WAAW,CAAC6B,EAAE,GAAG;QACxB;IACF;IAEA;;;GAGC,GACDE,cAAcb,SAAiB,EAAsB;QACnD,IAAIA,YAAY,KAAKA,aAAa,IAAI,CAACU,cAAc,CAAC9B,MAAM,GAAG,GAAG;YAChE,MAAM,IAAI0B,MAAM,CAAC,oBAAoB,EAAEN,WAAW;QACpD;QAEA,kCAAkC;QAClC,IAAIA,YAAY,IAAI,CAACnB,gBAAgB,EAAE;YACrC,MAAM,IAAIyB,MAAM,CAAC,KAAK,EAAEN,UAAU,sDAAsD,CAAC;QAC3F;QAEA,OAAO,IAAI,CAACP,gBAAgB,CAACO;IAC/B;IAEA;;GAEC,GACDc,QAAQC,QAAoB,EAAQ;QAClC,IAAI,CAAC,IAAI,CAACnB,WAAW,EAAE;YACrBmB;QACF,OAAO;YACL,IAAI,CAACC,cAAc,CAACC,IAAI,CAACF;QAC3B;IACF;IAEA;;GAEC,GACD,AAAQjB,cAAoB;QAC1B,MAAMoB,YAAY,IAAI,CAACF,cAAc;QACrC,IAAI,CAACA,cAAc,GAAG,EAAE;QACxB,IAAK,IAAIL,IAAI,GAAGA,IAAIO,UAAUtC,MAAM,EAAE+B,IAAK;YACzCO,SAAS,CAACP,EAAE;QACd;IACF;IAEA;;GAEC,GACDQ,eAAenB,SAAiB,EAAW;QACzC,OAAOA,YAAY,IAAI,CAACnB,gBAAgB;IAC1C;IAEA;;GAEC,GACD,IAAIuC,YAAoB;QACtB,OAAO,IAAI,CAACV,cAAc,CAAC9B,MAAM,GAAG;IACtC;IAEA;;GAEC,GACDyC,WAAyB;QACvB,OAAO,IAAI,CAAC5C,KAAK;IACnB;IAtNA,YAAY6C,OAAoC,CAAE;QAChD,MAAMC,YAAYD,QAAQC,SAAS;QACnC,MAAM7B,YAAY4B,QAAQ5B,SAAS,KAAKU,YAAYkB,QAAQ5B,SAAS,GAAG;QACxE,MAAMS,eAAemB,QAAQnB,YAAY,IAAI,EAAE;QAE/C,IAAI,CAACT,SAAS,GAAGA;QACjB,IAAI,CAACS,YAAY,GAAGA;QACpB,IAAI,CAACtB,gBAAgB,GAAG;QACxB,IAAI,CAACK,YAAY,GAAG;QACpB,IAAI,CAACV,QAAQ,GAAG;QAChB,IAAI,CAACC,KAAK,GAAG;QACb,IAAI,CAACuC,cAAc,GAAG,EAAE;QACxB,IAAI,CAACpB,WAAW,GAAG;QAEnB,kCAAkC;QAClC,IAAI,CAACc,cAAc,GAAG;YAAC;SAAE;QACzB,IAAK,IAAIC,IAAI,GAAGA,IAAIY,UAAU3C,MAAM,EAAE+B,IAAK;YACzC,IAAI,CAACD,cAAc,CAACO,IAAI,CAAC,IAAI,CAACP,cAAc,CAAC,IAAI,CAACA,cAAc,CAAC9B,MAAM,GAAG,EAAE,GAAG2C,SAAS,CAACZ,EAAE;QAC7F;QAEA,gEAAgE;QAChE,IAAI,CAAC7B,WAAW,GAAG,EAAE;QACrB,IAAI,CAACa,QAAQ,GAAG,EAAE;QAClB,IAAK,IAAIgB,IAAI,GAAGA,IAAIY,UAAU3C,MAAM,EAAE+B,IAAK;YACzC,IAAI,CAAC7B,WAAW,CAACmC,IAAI,CAAC;YACtB,IAAI,CAACtB,QAAQ,CAACsB,IAAI,CAAC;QACrB;QAEA,0BAA0B;QAC1B,IAAI,CAAChC,cAAc,GAAG,IAAI,CAACyB,cAAc,CAAC,EAAE,IAAI;IAClD;AAyLF"}
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
* - Non-solid: one file per folder
|
|
18
18
|
* - Supports LZMA, LZMA2, COPY, BCJ2, and other codecs
|
|
19
19
|
*/
|
|
20
|
-
import Stream from 'stream';
|
|
20
|
+
import type Stream from 'stream';
|
|
21
21
|
import type { ArchiveSource } from './ArchiveSource.js';
|
|
22
22
|
type Readable = Stream.Readable;
|
|
23
23
|
export { type ArchiveSource, BufferSource, FileSource } from './ArchiveSource.js';
|
|
@@ -36,6 +36,7 @@ export interface SevenZipEntry {
|
|
|
36
36
|
_streamIndexInFolder: number;
|
|
37
37
|
_hasStream: boolean;
|
|
38
38
|
_crc?: number;
|
|
39
|
+
_canStream: boolean;
|
|
39
40
|
}
|
|
40
41
|
/**
|
|
41
42
|
* SevenZipParser - parses 7z archives and provides entry iteration
|
|
@@ -50,6 +51,7 @@ export declare class SevenZipParser {
|
|
|
50
51
|
private decompressedCache;
|
|
51
52
|
private filesPerFolder;
|
|
52
53
|
private extractedPerFolder;
|
|
54
|
+
private folderSplitters;
|
|
53
55
|
constructor(source: ArchiveSource);
|
|
54
56
|
/**
|
|
55
57
|
* Parse the archive structure
|
|
@@ -78,14 +80,21 @@ export declare class SevenZipParser {
|
|
|
78
80
|
*/
|
|
79
81
|
getEntries(): SevenZipEntry[];
|
|
80
82
|
/**
|
|
81
|
-
* Get a readable stream for an entry's content
|
|
83
|
+
* Get a readable stream for an entry's content.
|
|
84
|
+
* Returns immediately - decompression happens when data is read (proper streaming).
|
|
85
|
+
* Uses true streaming for codecs that support it, buffered for others.
|
|
82
86
|
*/
|
|
83
87
|
getEntryStream(entry: SevenZipEntry): Readable;
|
|
84
88
|
/**
|
|
85
|
-
*
|
|
86
|
-
*
|
|
89
|
+
* True streaming: data flows through without buffering entire folder.
|
|
90
|
+
* Only used for single-file folders with streamable codecs (BZip2, Deflate, LZMA2).
|
|
87
91
|
*/
|
|
88
|
-
|
|
92
|
+
private _getEntryStreamStreaming;
|
|
93
|
+
/**
|
|
94
|
+
* Buffered extraction: decompress entire folder, slice out file.
|
|
95
|
+
* Used for codecs that don't support incremental streaming (LZMA1, BCJ2).
|
|
96
|
+
*/
|
|
97
|
+
private _getEntryStreamBuffered;
|
|
89
98
|
/**
|
|
90
99
|
* Check if a folder uses BCJ2 codec
|
|
91
100
|
*/
|
|
@@ -95,11 +104,6 @@ export declare class SevenZipParser {
|
|
|
95
104
|
* Only caches when multiple files share a block, releases when last file extracted
|
|
96
105
|
*/
|
|
97
106
|
private getDecompressedFolder;
|
|
98
|
-
/**
|
|
99
|
-
* Get decompressed data for a folder using streaming (callback-based async)
|
|
100
|
-
* Uses createDecoder() streams for non-blocking decompression
|
|
101
|
-
*/
|
|
102
|
-
private getDecompressedFolderAsync;
|
|
103
107
|
/**
|
|
104
108
|
* Decompress a BCJ2 folder with multi-stream handling
|
|
105
109
|
* BCJ2 uses 4 input streams: main, call, jump, range coder
|
|
@@ -113,4 +117,61 @@ export declare class SevenZipParser {
|
|
|
113
117
|
* Close the parser and release resources
|
|
114
118
|
*/
|
|
115
119
|
close(): void;
|
|
120
|
+
/**
|
|
121
|
+
* Check if a codec supports true streaming decompression.
|
|
122
|
+
*
|
|
123
|
+
* Only codecs that process data incrementally (not buffering entire input) qualify.
|
|
124
|
+
* @param codecId - The codec ID as an array of bytes
|
|
125
|
+
* @returns true if the codec can stream
|
|
126
|
+
*/
|
|
127
|
+
private codecSupportsStreaming;
|
|
128
|
+
/**
|
|
129
|
+
* Check if a folder can be streamed (vs buffered).
|
|
130
|
+
*
|
|
131
|
+
* Streaming is possible when ALL codecs in the chain support streaming.
|
|
132
|
+
* BCJ2 folders are never streamable due to their 4-stream architecture.
|
|
133
|
+
*
|
|
134
|
+
* @param folderIndex - Index of the folder to check
|
|
135
|
+
* @returns true if the folder can be streamed
|
|
136
|
+
*/
|
|
137
|
+
canStreamFolder(folderIndex: number): boolean;
|
|
138
|
+
/**
|
|
139
|
+
* Stream a folder's decompression.
|
|
140
|
+
*
|
|
141
|
+
* Creates a pipeline: packed data → codec decoders → output stream
|
|
142
|
+
*
|
|
143
|
+
* @param folderIndex - Index of folder to decompress
|
|
144
|
+
* @returns Object with output stream and control methods
|
|
145
|
+
*/
|
|
146
|
+
streamFolder(folderIndex: number): {
|
|
147
|
+
output: Readable;
|
|
148
|
+
pause: () => void;
|
|
149
|
+
resume: () => void;
|
|
150
|
+
destroy: (err?: Error) => void;
|
|
151
|
+
};
|
|
152
|
+
/**
|
|
153
|
+
* Get a streaming entry stream (Promise-based API).
|
|
154
|
+
*
|
|
155
|
+
* For streamable folders: Returns a true streaming decompression
|
|
156
|
+
* For non-streamable folders: Falls back to buffered extraction
|
|
157
|
+
*
|
|
158
|
+
* @param entry - The entry to get stream for
|
|
159
|
+
* @returns Promise resolving to readable stream
|
|
160
|
+
*/
|
|
161
|
+
getEntryStreamStreaming(entry: SevenZipEntry): Promise<Readable>;
|
|
162
|
+
/**
|
|
163
|
+
* Direct streaming for single-file folders.
|
|
164
|
+
* Pipes folder decompression directly to output with CRC verification.
|
|
165
|
+
*/
|
|
166
|
+
private getEntryStreamDirect;
|
|
167
|
+
/**
|
|
168
|
+
* Get stream from folder splitter (for multi-file folders).
|
|
169
|
+
* Creates splitter on first access, reuses for subsequent files in same folder.
|
|
170
|
+
*/
|
|
171
|
+
private getEntryStreamFromSplitter;
|
|
172
|
+
/**
|
|
173
|
+
* Get file sizes and CRCs for all files in a folder (in stream order).
|
|
174
|
+
* Used by FolderStreamSplitter to know file boundaries.
|
|
175
|
+
*/
|
|
176
|
+
private getFolderFileInfo;
|
|
116
177
|
}
|