7z-iterator 1.1.2 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/FileEntry.d.cts +12 -4
- package/dist/cjs/FileEntry.d.ts +12 -4
- package/dist/cjs/FileEntry.js +52 -24
- package/dist/cjs/FileEntry.js.map +1 -1
- package/dist/cjs/SevenZipIterator.d.cts +25 -2
- package/dist/cjs/SevenZipIterator.d.ts +25 -2
- package/dist/cjs/SevenZipIterator.js +68 -21
- package/dist/cjs/SevenZipIterator.js.map +1 -1
- package/dist/cjs/index.d.cts +0 -2
- package/dist/cjs/index.d.ts +0 -2
- package/dist/cjs/index.js +3 -12
- package/dist/cjs/index.js.map +1 -1
- package/dist/cjs/lib/streamToSource.d.cts +8 -11
- package/dist/cjs/lib/streamToSource.d.ts +8 -11
- package/dist/cjs/lib/streamToSource.js +21 -67
- package/dist/cjs/lib/streamToSource.js.map +1 -1
- package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/cjs/lzma/index.d.cts +13 -0
- package/dist/cjs/lzma/index.d.ts +13 -0
- package/dist/cjs/lzma/index.js +63 -0
- package/dist/cjs/lzma/index.js.map +1 -0
- package/dist/cjs/lzma/stream/transforms.d.cts +38 -0
- package/dist/cjs/lzma/stream/transforms.d.ts +38 -0
- package/dist/cjs/lzma/stream/transforms.js +149 -0
- package/dist/cjs/lzma/stream/transforms.js.map +1 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +30 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +30 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js +135 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +82 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +82 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js +440 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/cjs/lzma/types.d.cts +110 -0
- package/dist/cjs/lzma/types.d.ts +110 -0
- package/dist/cjs/lzma/types.js +264 -0
- package/dist/cjs/lzma/types.js.map +1 -0
- package/dist/cjs/nextEntry.js +24 -26
- package/dist/cjs/nextEntry.js.map +1 -1
- package/dist/cjs/sevenz/ArchiveSource.d.cts +16 -0
- package/dist/cjs/sevenz/ArchiveSource.d.ts +16 -0
- package/dist/cjs/sevenz/ArchiveSource.js +69 -0
- package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
- package/dist/cjs/sevenz/FolderStreamSplitter.d.cts +101 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.d.ts +101 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.js +229 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -0
- package/dist/cjs/sevenz/SevenZipParser.d.cts +71 -10
- package/dist/cjs/sevenz/SevenZipParser.d.ts +71 -10
- package/dist/cjs/sevenz/SevenZipParser.js +574 -203
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BZip2.js +2 -1
- package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Bcj.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/Bcj.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/Bcj.js +102 -8
- package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BcjArm.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/BcjArm.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/BcjArm.js +51 -9
- package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Copy.d.cts +2 -4
- package/dist/cjs/sevenz/codecs/Copy.d.ts +2 -4
- package/dist/cjs/sevenz/codecs/Copy.js +2 -15
- package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Deflate.d.cts +6 -4
- package/dist/cjs/sevenz/codecs/Deflate.d.ts +6 -4
- package/dist/cjs/sevenz/codecs/Deflate.js +4 -9
- package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Delta.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/Delta.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/Delta.js +29 -10
- package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma.d.cts +5 -2
- package/dist/cjs/sevenz/codecs/Lzma.d.ts +5 -2
- package/dist/cjs/sevenz/codecs/Lzma.js +13 -28
- package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma2.d.cts +9 -3
- package/dist/cjs/sevenz/codecs/Lzma2.d.ts +9 -3
- package/dist/cjs/sevenz/codecs/Lzma2.js +17 -198
- package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -2
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +2 -15
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
- package/dist/cjs/types.d.cts +2 -16
- package/dist/cjs/types.d.ts +2 -16
- package/dist/cjs/types.js.map +1 -1
- package/dist/esm/FileEntry.d.ts +12 -4
- package/dist/esm/FileEntry.js +52 -26
- package/dist/esm/FileEntry.js.map +1 -1
- package/dist/esm/SevenZipIterator.d.ts +25 -2
- package/dist/esm/SevenZipIterator.js +69 -22
- package/dist/esm/SevenZipIterator.js.map +1 -1
- package/dist/esm/index.d.ts +0 -2
- package/dist/esm/index.js +0 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/streamToSource.d.ts +8 -11
- package/dist/esm/lib/streamToSource.js +22 -68
- package/dist/esm/lib/streamToSource.js.map +1 -1
- package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/esm/lzma/index.d.ts +13 -0
- package/dist/esm/lzma/index.js +15 -0
- package/dist/esm/lzma/index.js.map +1 -0
- package/dist/esm/lzma/stream/transforms.d.ts +38 -0
- package/dist/esm/lzma/stream/transforms.js +150 -0
- package/dist/esm/lzma/stream/transforms.js.map +1 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +30 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js +115 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/esm/lzma/sync/LzmaDecoder.d.ts +82 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js +403 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
- package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/esm/lzma/types.d.ts +110 -0
- package/dist/esm/lzma/types.js +154 -0
- package/dist/esm/lzma/types.js.map +1 -0
- package/dist/esm/nextEntry.js +24 -26
- package/dist/esm/nextEntry.js.map +1 -1
- package/dist/esm/sevenz/ArchiveSource.d.ts +16 -0
- package/dist/esm/sevenz/ArchiveSource.js +70 -1
- package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
- package/dist/esm/sevenz/FolderStreamSplitter.d.ts +101 -0
- package/dist/esm/sevenz/FolderStreamSplitter.js +207 -0
- package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -0
- package/dist/esm/sevenz/SevenZipParser.d.ts +71 -10
- package/dist/esm/sevenz/SevenZipParser.js +414 -198
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/BZip2.js +2 -1
- package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
- package/dist/esm/sevenz/codecs/Bcj.d.ts +5 -4
- package/dist/esm/sevenz/codecs/Bcj.js +106 -6
- package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
- package/dist/esm/sevenz/codecs/BcjArm.d.ts +5 -4
- package/dist/esm/sevenz/codecs/BcjArm.js +55 -7
- package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
- package/dist/esm/sevenz/codecs/Copy.d.ts +2 -4
- package/dist/esm/sevenz/codecs/Copy.js +1 -9
- package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
- package/dist/esm/sevenz/codecs/Deflate.d.ts +6 -4
- package/dist/esm/sevenz/codecs/Deflate.js +9 -7
- package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
- package/dist/esm/sevenz/codecs/Delta.d.ts +5 -4
- package/dist/esm/sevenz/codecs/Delta.js +33 -8
- package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma.d.ts +5 -2
- package/dist/esm/sevenz/codecs/Lzma.js +17 -24
- package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma2.d.ts +9 -3
- package/dist/esm/sevenz/codecs/Lzma2.js +15 -196
- package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
- package/dist/esm/sevenz/codecs/createBufferingDecoder.js +1 -9
- package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
- package/dist/esm/types.d.ts +2 -16
- package/dist/esm/types.js.map +1 -1
- package/package.json +3 -3
- package/assets/lzma-purejs/LICENSE +0 -11
- package/assets/lzma-purejs/index.js +0 -19
- package/assets/lzma-purejs/lib/LZ/OutWindow.js +0 -78
- package/assets/lzma-purejs/lib/LZ.js +0 -6
- package/assets/lzma-purejs/lib/LZMA/Base.js +0 -48
- package/assets/lzma-purejs/lib/LZMA/Decoder.js +0 -328
- package/assets/lzma-purejs/lib/LZMA.js +0 -6
- package/assets/lzma-purejs/lib/RangeCoder/BitTreeDecoder.js +0 -41
- package/assets/lzma-purejs/lib/RangeCoder/Decoder.js +0 -58
- package/assets/lzma-purejs/lib/RangeCoder/Encoder.js +0 -106
- package/assets/lzma-purejs/lib/RangeCoder.js +0 -10
- package/assets/lzma-purejs/lib/Stream.js +0 -41
- package/assets/lzma-purejs/lib/Util.js +0 -114
- package/assets/lzma-purejs/lib/makeBuffer.js +0 -25
- package/assets/lzma-purejs/package-lock.json +0 -13
- package/assets/lzma-purejs/package.json +0 -8
|
@@ -16,18 +16,9 @@
|
|
|
16
16
|
* - Solid archives: multiple files share one folder (decompress once)
|
|
17
17
|
* - Non-solid: one file per folder
|
|
18
18
|
* - Supports LZMA, LZMA2, COPY, BCJ2, and other codecs
|
|
19
|
-
*/ import { crc32 } from 'extract-base-iterator';
|
|
20
|
-
import oo from 'on-one';
|
|
21
|
-
import Stream from 'stream';
|
|
19
|
+
*/ import { crc32, PassThrough } from 'extract-base-iterator';
|
|
22
20
|
import { decodeBcj2Multi, getCodec, getCodecName, isBcj2Codec, isCodecSupported } from './codecs/index.js';
|
|
23
|
-
|
|
24
|
-
const major = +process.versions.node.split('.')[0];
|
|
25
|
-
let PassThrough;
|
|
26
|
-
if (major > 0) {
|
|
27
|
-
PassThrough = Stream.PassThrough;
|
|
28
|
-
} else {
|
|
29
|
-
PassThrough = require('readable-stream').PassThrough;
|
|
30
|
-
}
|
|
21
|
+
import { FolderStreamSplitter } from './FolderStreamSplitter.js';
|
|
31
22
|
import { createCodedError, ErrorCode, FileAttribute, PropertyId, SIGNATURE_HEADER_SIZE } from './constants.js';
|
|
32
23
|
import { parseEncodedHeader, parseHeaderContent, parseSignatureHeader } from './headers.js';
|
|
33
24
|
import { readNumber } from './NumberCodec.js';
|
|
@@ -299,6 +290,15 @@ export { BufferSource, FileSource } from './ArchiveSource.js';
|
|
|
299
290
|
}
|
|
300
291
|
}
|
|
301
292
|
}
|
|
293
|
+
// Set _canStream for all entries now that we have complete folder info
|
|
294
|
+
// This must be done after all entries are built because canStreamFolder
|
|
295
|
+
// relies on the folder structure being fully parsed
|
|
296
|
+
for(let i = 0; i < this.entries.length; i++){
|
|
297
|
+
const entry = this.entries[i];
|
|
298
|
+
if (entry._hasStream && entry._folderIndex >= 0) {
|
|
299
|
+
entry._canStream = this.canStreamFolder(entry._folderIndex);
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
302
|
}
|
|
303
303
|
/**
|
|
304
304
|
* Create an entry from file info
|
|
@@ -340,7 +340,8 @@ export { BufferSource, FileSource } from './ArchiveSource.js';
|
|
|
340
340
|
_folderIndex: folderIndex,
|
|
341
341
|
_streamIndex: 0,
|
|
342
342
|
_streamIndexInFolder: streamInFolder,
|
|
343
|
-
_hasStream: file.hasStream
|
|
343
|
+
_hasStream: file.hasStream,
|
|
344
|
+
_canStream: false
|
|
344
345
|
};
|
|
345
346
|
}
|
|
346
347
|
/**
|
|
@@ -352,7 +353,9 @@ export { BufferSource, FileSource } from './ArchiveSource.js';
|
|
|
352
353
|
return this.entries;
|
|
353
354
|
}
|
|
354
355
|
/**
|
|
355
|
-
* Get a readable stream for an entry's content
|
|
356
|
+
* Get a readable stream for an entry's content.
|
|
357
|
+
* Returns immediately - decompression happens when data is read (proper streaming).
|
|
358
|
+
* Uses true streaming for codecs that support it, buffered for others.
|
|
356
359
|
*/ getEntryStream(entry) {
|
|
357
360
|
if (!entry._hasStream || entry.type === 'directory') {
|
|
358
361
|
// Return empty stream for directories and empty files
|
|
@@ -376,106 +379,148 @@ export { BufferSource, FileSource } from './ArchiveSource.js';
|
|
|
376
379
|
throw createCodedError(`Unsupported codec: ${codecName}`, ErrorCode.UNSUPPORTED_CODEC);
|
|
377
380
|
}
|
|
378
381
|
}
|
|
379
|
-
//
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
for(let m = 0; m < entry._streamIndexInFolder; m++){
|
|
386
|
-
// Sum sizes of all streams before this one in the folder
|
|
387
|
-
const prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
|
|
388
|
-
fileStart += this.streamsInfo.unpackSizes[prevStreamGlobalIndex];
|
|
389
|
-
}
|
|
390
|
-
const fileSize = entry.size;
|
|
391
|
-
// Create a PassThrough stream with the file data
|
|
392
|
-
const outputStream = new PassThrough();
|
|
393
|
-
// Bounds check to prevent "oob" error on older Node versions
|
|
394
|
-
if (fileStart + fileSize > data.length) {
|
|
395
|
-
throw createCodedError(`File data out of bounds: offset ${fileStart} + size ${fileSize} > decompressed length ${data.length}`, ErrorCode.DECOMPRESSION_FAILED);
|
|
396
|
-
}
|
|
397
|
-
const fileData = data.slice(fileStart, fileStart + fileSize);
|
|
398
|
-
// Verify CRC if present
|
|
399
|
-
if (entry._crc !== undefined) {
|
|
400
|
-
const actualCRC = crc32(fileData);
|
|
401
|
-
if (actualCRC !== entry._crc) {
|
|
402
|
-
throw createCodedError(`CRC mismatch for ${entry.path}: expected ${entry._crc.toString(16)}, got ${actualCRC.toString(16)}`, ErrorCode.CRC_MISMATCH);
|
|
403
|
-
}
|
|
404
|
-
}
|
|
405
|
-
outputStream.end(fileData);
|
|
406
|
-
// Track extraction and release cache when all files from this folder are done
|
|
407
|
-
this.extractedPerFolder[folderIdx] = (this.extractedPerFolder[folderIdx] || 0) + 1;
|
|
408
|
-
if (this.extractedPerFolder[folderIdx] >= this.filesPerFolder[folderIdx]) {
|
|
409
|
-
// All files from this folder extracted, release cache
|
|
410
|
-
delete this.decompressedCache[folderIdx];
|
|
382
|
+
// Use true streaming for single-file folders that support it.
|
|
383
|
+
// Multi-file folders use buffered approach because streaming requires
|
|
384
|
+
// accessing files in order, which doesn't work with concurrent extraction.
|
|
385
|
+
const filesInFolder = this.filesPerFolder[entry._folderIndex] || 1;
|
|
386
|
+
if (entry._canStream && filesInFolder === 1) {
|
|
387
|
+
return this._getEntryStreamStreaming(entry);
|
|
411
388
|
}
|
|
412
|
-
return
|
|
389
|
+
return this._getEntryStreamBuffered(entry);
|
|
413
390
|
}
|
|
414
391
|
/**
|
|
415
|
-
*
|
|
416
|
-
*
|
|
417
|
-
*/
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
392
|
+
* True streaming: data flows through without buffering entire folder.
|
|
393
|
+
* Only used for single-file folders with streamable codecs (BZip2, Deflate, LZMA2).
|
|
394
|
+
*/ _getEntryStreamStreaming(entry) {
|
|
395
|
+
let started = false;
|
|
396
|
+
let destroyed = false;
|
|
397
|
+
let folderStream = null;
|
|
398
|
+
const stream = new PassThrough();
|
|
399
|
+
const originalRead = stream._read.bind(stream);
|
|
400
|
+
stream._read = (size)=>{
|
|
401
|
+
if (!started && !destroyed) {
|
|
402
|
+
started = true;
|
|
403
|
+
setTimeout(()=>{
|
|
404
|
+
if (destroyed) return;
|
|
405
|
+
try {
|
|
406
|
+
let crcValue = 0;
|
|
407
|
+
const verifyCrc = entry._crc !== undefined;
|
|
408
|
+
folderStream = this.streamFolder(entry._folderIndex);
|
|
409
|
+
folderStream.output.on('data', (chunk)=>{
|
|
410
|
+
if (destroyed) return;
|
|
411
|
+
if (verifyCrc) {
|
|
412
|
+
crcValue = crc32(chunk, crcValue);
|
|
413
|
+
}
|
|
414
|
+
if (!stream.write(chunk)) {
|
|
415
|
+
folderStream === null || folderStream === void 0 ? void 0 : folderStream.pause();
|
|
416
|
+
stream.once('drain', ()=>folderStream === null || folderStream === void 0 ? void 0 : folderStream.resume());
|
|
417
|
+
}
|
|
418
|
+
});
|
|
419
|
+
folderStream.output.on('end', ()=>{
|
|
420
|
+
if (destroyed) return;
|
|
421
|
+
if (verifyCrc && crcValue !== entry._crc) {
|
|
422
|
+
var _entry__crc;
|
|
423
|
+
stream.destroy(createCodedError(`CRC mismatch for ${entry.path}: expected ${(_entry__crc = entry._crc) === null || _entry__crc === void 0 ? void 0 : _entry__crc.toString(16)}, got ${crcValue.toString(16)}`, ErrorCode.CRC_MISMATCH));
|
|
424
|
+
return;
|
|
425
|
+
}
|
|
426
|
+
stream.end();
|
|
427
|
+
this.extractedPerFolder[entry._folderIndex] = (this.extractedPerFolder[entry._folderIndex] || 0) + 1;
|
|
428
|
+
});
|
|
429
|
+
folderStream.output.on('error', (err)=>{
|
|
430
|
+
if (!destroyed) stream.destroy(err);
|
|
431
|
+
});
|
|
432
|
+
} catch (err) {
|
|
433
|
+
if (!destroyed) {
|
|
434
|
+
stream.destroy(err);
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
}, 0);
|
|
442
438
|
}
|
|
439
|
+
return originalRead(size);
|
|
440
|
+
};
|
|
441
|
+
// Override destroy to clean up folder stream
|
|
442
|
+
// IMPORTANT: Emit error synchronously BEFORE calling original destroy.
|
|
443
|
+
// On older Node, destroy() emits 'finish' and 'end' before 'error',
|
|
444
|
+
// which causes piped streams to complete successfully before the error fires.
|
|
445
|
+
const streamWithDestroy = stream;
|
|
446
|
+
const originalDestroy = typeof streamWithDestroy.destroy === 'function' ? streamWithDestroy.destroy.bind(stream) : null;
|
|
447
|
+
streamWithDestroy.destroy = (err)=>{
|
|
448
|
+
destroyed = true;
|
|
449
|
+
if (err) stream.emit('error', err);
|
|
450
|
+
if (folderStream) folderStream.destroy();
|
|
451
|
+
if (originalDestroy) return originalDestroy();
|
|
452
|
+
return stream;
|
|
453
|
+
};
|
|
454
|
+
return stream;
|
|
455
|
+
}
|
|
456
|
+
/**
|
|
457
|
+
* Buffered extraction: decompress entire folder, slice out file.
|
|
458
|
+
* Used for codecs that don't support incremental streaming (LZMA1, BCJ2).
|
|
459
|
+
*/ _getEntryStreamBuffered(entry) {
|
|
460
|
+
if (!this.streamsInfo) {
|
|
461
|
+
throw createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER);
|
|
443
462
|
}
|
|
444
|
-
// Get decompressed data for this folder using async method
|
|
445
|
-
const folderIdx = entry._folderIndex;
|
|
446
463
|
const streamsInfo = this.streamsInfo;
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
464
|
+
const folderIdx = entry._folderIndex;
|
|
465
|
+
let started = false;
|
|
466
|
+
let destroyed = false;
|
|
467
|
+
const stream = new PassThrough();
|
|
468
|
+
const originalRead = stream._read.bind(stream);
|
|
469
|
+
stream._read = (size)=>{
|
|
470
|
+
if (!started && !destroyed) {
|
|
471
|
+
started = true;
|
|
472
|
+
setTimeout(()=>{
|
|
473
|
+
if (destroyed) return;
|
|
474
|
+
try {
|
|
475
|
+
const data = this.getDecompressedFolder(folderIdx);
|
|
476
|
+
let fileStart = 0;
|
|
477
|
+
for(let m = 0; m < entry._streamIndexInFolder; m++){
|
|
478
|
+
const prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
|
|
479
|
+
fileStart += streamsInfo.unpackSizes[prevStreamGlobalIndex];
|
|
480
|
+
}
|
|
481
|
+
const fileSize = entry.size;
|
|
482
|
+
if (fileStart + fileSize > data.length) {
|
|
483
|
+
stream.destroy(createCodedError(`File data out of bounds: offset ${fileStart} + size ${fileSize} > decompressed length ${data.length}`, ErrorCode.DECOMPRESSION_FAILED));
|
|
484
|
+
return;
|
|
485
|
+
}
|
|
486
|
+
const fileData = data.slice(fileStart, fileStart + fileSize);
|
|
487
|
+
if (entry._crc !== undefined) {
|
|
488
|
+
const actualCRC = crc32(fileData);
|
|
489
|
+
if (actualCRC !== entry._crc) {
|
|
490
|
+
stream.destroy(createCodedError(`CRC mismatch for ${entry.path}: expected ${entry._crc.toString(16)}, got ${actualCRC.toString(16)}`, ErrorCode.CRC_MISMATCH));
|
|
491
|
+
return;
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
this.extractedPerFolder[folderIdx] = (this.extractedPerFolder[folderIdx] || 0) + 1;
|
|
495
|
+
if (this.extractedPerFolder[folderIdx] >= this.filesPerFolder[folderIdx]) {
|
|
496
|
+
delete this.decompressedCache[folderIdx];
|
|
497
|
+
}
|
|
498
|
+
if (!destroyed) {
|
|
499
|
+
stream.push(fileData);
|
|
500
|
+
stream.push(null);
|
|
501
|
+
}
|
|
502
|
+
} catch (err) {
|
|
503
|
+
if (!destroyed) {
|
|
504
|
+
stream.destroy(err);
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
}, 0);
|
|
476
508
|
}
|
|
477
|
-
|
|
478
|
-
}
|
|
509
|
+
return originalRead(size);
|
|
510
|
+
};
|
|
511
|
+
// Override destroy to set destroyed flag
|
|
512
|
+
// IMPORTANT: Emit error synchronously BEFORE calling original destroy.
|
|
513
|
+
// On older Node, destroy() emits 'finish' and 'end' before 'error',
|
|
514
|
+
// which causes piped streams to complete successfully before the error fires.
|
|
515
|
+
const streamWithDestroy = stream;
|
|
516
|
+
const originalDestroy = typeof streamWithDestroy.destroy === 'function' ? streamWithDestroy.destroy.bind(stream) : null;
|
|
517
|
+
streamWithDestroy.destroy = (err)=>{
|
|
518
|
+
destroyed = true;
|
|
519
|
+
if (err) stream.emit('error', err);
|
|
520
|
+
if (originalDestroy) return originalDestroy();
|
|
521
|
+
return stream;
|
|
522
|
+
};
|
|
523
|
+
return stream;
|
|
479
524
|
}
|
|
480
525
|
/**
|
|
481
526
|
* Check if a folder uses BCJ2 codec
|
|
@@ -543,98 +588,6 @@ export { BufferSource, FileSource } from './ArchiveSource.js';
|
|
|
543
588
|
return data2;
|
|
544
589
|
}
|
|
545
590
|
/**
|
|
546
|
-
* Get decompressed data for a folder using streaming (callback-based async)
|
|
547
|
-
* Uses createDecoder() streams for non-blocking decompression
|
|
548
|
-
*/ getDecompressedFolderAsync(folderIndex, callback) {
|
|
549
|
-
const self = this;
|
|
550
|
-
// Check cache first
|
|
551
|
-
if (this.decompressedCache[folderIndex]) return callback(null, this.decompressedCache[folderIndex]);
|
|
552
|
-
if (!this.streamsInfo) {
|
|
553
|
-
callback(createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER));
|
|
554
|
-
return;
|
|
555
|
-
}
|
|
556
|
-
const folder = this.streamsInfo.folders[folderIndex];
|
|
557
|
-
// Check how many files remain in this folder
|
|
558
|
-
const filesInFolder = this.filesPerFolder[folderIndex] || 1;
|
|
559
|
-
const extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
|
|
560
|
-
const remainingFiles = filesInFolder - extractedFromFolder;
|
|
561
|
-
const shouldCache = remainingFiles > 1;
|
|
562
|
-
// BCJ2 requires special handling - use sync version for now
|
|
563
|
-
// TODO: Add async BCJ2 support
|
|
564
|
-
if (this.folderHasBcj2(folder)) {
|
|
565
|
-
try {
|
|
566
|
-
const data = this.decompressBcj2Folder(folderIndex);
|
|
567
|
-
if (shouldCache) {
|
|
568
|
-
this.decompressedCache[folderIndex] = data;
|
|
569
|
-
}
|
|
570
|
-
callback(null, data);
|
|
571
|
-
} catch (err) {
|
|
572
|
-
callback(err);
|
|
573
|
-
}
|
|
574
|
-
return;
|
|
575
|
-
}
|
|
576
|
-
// Calculate packed data position
|
|
577
|
-
let packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
|
|
578
|
-
// Find which pack stream this folder uses
|
|
579
|
-
let packStreamIndex = 0;
|
|
580
|
-
for(let j = 0; j < folderIndex; j++){
|
|
581
|
-
packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
|
|
582
|
-
}
|
|
583
|
-
// Calculate position of this pack stream
|
|
584
|
-
for(let k = 0; k < packStreamIndex; k++){
|
|
585
|
-
packPos += this.streamsInfo.packSizes[k];
|
|
586
|
-
}
|
|
587
|
-
const packSize = this.streamsInfo.packSizes[packStreamIndex];
|
|
588
|
-
// Read packed data
|
|
589
|
-
const packedData = this.source.read(packPos, packSize);
|
|
590
|
-
// Create decoder stream chain and decompress
|
|
591
|
-
const coders = folder.coders;
|
|
592
|
-
const unpackSizes = folder.unpackSizes;
|
|
593
|
-
// Helper to decompress through a single codec stream
|
|
594
|
-
function decompressWithStream(input, coderIdx, cb) {
|
|
595
|
-
const coderInfo = coders[coderIdx];
|
|
596
|
-
const codec = getCodec(coderInfo.id);
|
|
597
|
-
const decoder = codec.createDecoder(coderInfo.properties, unpackSizes[coderIdx]);
|
|
598
|
-
const chunks = [];
|
|
599
|
-
let errorOccurred = false;
|
|
600
|
-
decoder.on('data', (chunk)=>{
|
|
601
|
-
chunks.push(chunk);
|
|
602
|
-
});
|
|
603
|
-
oo(decoder, [
|
|
604
|
-
'error',
|
|
605
|
-
'end',
|
|
606
|
-
'close',
|
|
607
|
-
'finish'
|
|
608
|
-
], (err)=>{
|
|
609
|
-
if (errorOccurred) return;
|
|
610
|
-
if (err) {
|
|
611
|
-
errorOccurred = true;
|
|
612
|
-
return cb(err);
|
|
613
|
-
}
|
|
614
|
-
cb(null, Buffer.concat(chunks));
|
|
615
|
-
});
|
|
616
|
-
// Write input data to decoder and signal end
|
|
617
|
-
decoder.end(input);
|
|
618
|
-
}
|
|
619
|
-
// Chain decompression through all codecs
|
|
620
|
-
function decompressChain(input, idx) {
|
|
621
|
-
if (idx >= coders.length) {
|
|
622
|
-
// All done - cache and return
|
|
623
|
-
if (shouldCache) {
|
|
624
|
-
self.decompressedCache[folderIndex] = input;
|
|
625
|
-
}
|
|
626
|
-
callback(null, input);
|
|
627
|
-
return;
|
|
628
|
-
}
|
|
629
|
-
decompressWithStream(input, idx, (err, output)=>{
|
|
630
|
-
if (err) return callback(err);
|
|
631
|
-
decompressChain(output, idx + 1);
|
|
632
|
-
});
|
|
633
|
-
}
|
|
634
|
-
// Start the chain
|
|
635
|
-
decompressChain(packedData, 0);
|
|
636
|
-
}
|
|
637
|
-
/**
|
|
638
591
|
* Decompress a BCJ2 folder with multi-stream handling
|
|
639
592
|
* BCJ2 uses 4 input streams: main, call, jump, range coder
|
|
640
593
|
*/ decompressBcj2Folder(folderIndex) {
|
|
@@ -824,6 +777,267 @@ export { BufferSource, FileSource } from './ArchiveSource.js';
|
|
|
824
777
|
this.source.close();
|
|
825
778
|
}
|
|
826
779
|
}
|
|
780
|
+
// ============================================================
|
|
781
|
+
// STREAMING METHODS (Phase 1+)
|
|
782
|
+
// ============================================================
|
|
783
|
+
/**
|
|
784
|
+
* Check if a codec supports true streaming decompression.
|
|
785
|
+
*
|
|
786
|
+
* Only codecs that process data incrementally (not buffering entire input) qualify.
|
|
787
|
+
* @param codecId - The codec ID as an array of bytes
|
|
788
|
+
* @returns true if the codec can stream
|
|
789
|
+
*/ codecSupportsStreaming(codecId) {
|
|
790
|
+
// Convert to string key for comparison
|
|
791
|
+
const key = codecId.map((b)=>b.toString(16).toUpperCase()).join('-');
|
|
792
|
+
// BZip2 - unbzip2-stream processes blocks incrementally
|
|
793
|
+
if (key === '4-2-2') return true;
|
|
794
|
+
// Copy/Store - PassThrough, obviously streams
|
|
795
|
+
if (key === '0') return true;
|
|
796
|
+
// Deflate - now uses zlib.createInflateRaw() which streams
|
|
797
|
+
if (key === '4-1-8') return true;
|
|
798
|
+
// Delta - now uses streaming Transform (Phase 2.5)
|
|
799
|
+
if (key === '3') return true;
|
|
800
|
+
// BCJ x86 - now uses streaming Transform (Phase 3.5)
|
|
801
|
+
if (key === '3-3-1-3') return true;
|
|
802
|
+
// BCJ ARM - now uses streaming Transform (Phase 3.5)
|
|
803
|
+
if (key === '3-3-1-5') return true;
|
|
804
|
+
// LZMA2 - now uses streaming Transform (Phase 5)
|
|
805
|
+
if (key === '21') return true;
|
|
806
|
+
// LZMA - still buffer-based (TODO: Phase 5 continuation)
|
|
807
|
+
// Other BCJ variants (ARM64, ARMT, IA64, PPC, SPARC) - still buffer-based
|
|
808
|
+
// BCJ2 - multi-stream architecture, never streamable
|
|
809
|
+
return false;
|
|
810
|
+
}
|
|
811
|
+
/**
|
|
812
|
+
* Check if a folder can be streamed (vs buffered).
|
|
813
|
+
*
|
|
814
|
+
* Streaming is possible when ALL codecs in the chain support streaming.
|
|
815
|
+
* BCJ2 folders are never streamable due to their 4-stream architecture.
|
|
816
|
+
*
|
|
817
|
+
* @param folderIndex - Index of the folder to check
|
|
818
|
+
* @returns true if the folder can be streamed
|
|
819
|
+
*/ canStreamFolder(folderIndex) {
|
|
820
|
+
if (!this.streamsInfo) return false;
|
|
821
|
+
const folder = this.streamsInfo.folders[folderIndex];
|
|
822
|
+
if (!folder) return false;
|
|
823
|
+
// BCJ2 requires special multi-stream handling - not streamable
|
|
824
|
+
if (this.folderHasBcj2(folder)) {
|
|
825
|
+
return false;
|
|
826
|
+
}
|
|
827
|
+
// Check if ALL codecs in chain support streaming
|
|
828
|
+
for(let i = 0; i < folder.coders.length; i++){
|
|
829
|
+
if (!this.codecSupportsStreaming(folder.coders[i].id)) {
|
|
830
|
+
return false;
|
|
831
|
+
}
|
|
832
|
+
}
|
|
833
|
+
return true;
|
|
834
|
+
}
|
|
835
|
+
/**
|
|
836
|
+
* Stream a folder's decompression.
|
|
837
|
+
*
|
|
838
|
+
* Creates a pipeline: packed data → codec decoders → output stream
|
|
839
|
+
*
|
|
840
|
+
* @param folderIndex - Index of folder to decompress
|
|
841
|
+
* @returns Object with output stream and control methods
|
|
842
|
+
*/ streamFolder(folderIndex) {
|
|
843
|
+
if (!this.streamsInfo) {
|
|
844
|
+
throw createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER);
|
|
845
|
+
}
|
|
846
|
+
if (!this.canStreamFolder(folderIndex)) {
|
|
847
|
+
throw createCodedError('Folder does not support streaming', ErrorCode.UNSUPPORTED_CODEC);
|
|
848
|
+
}
|
|
849
|
+
const folder = this.streamsInfo.folders[folderIndex];
|
|
850
|
+
// Calculate packed data position
|
|
851
|
+
let packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
|
|
852
|
+
// Find which pack stream this folder uses
|
|
853
|
+
let packStreamIndex = 0;
|
|
854
|
+
for(let j = 0; j < folderIndex; j++){
|
|
855
|
+
packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
|
|
856
|
+
}
|
|
857
|
+
// Calculate position of this pack stream
|
|
858
|
+
for(let k = 0; k < packStreamIndex; k++){
|
|
859
|
+
packPos += this.streamsInfo.packSizes[k];
|
|
860
|
+
}
|
|
861
|
+
const packSize = this.streamsInfo.packSizes[packStreamIndex];
|
|
862
|
+
// Create readable stream from packed data
|
|
863
|
+
const packedStream = this.source.createReadStream(packPos, packSize);
|
|
864
|
+
// Build codec pipeline
|
|
865
|
+
let stream = packedStream;
|
|
866
|
+
const decoders = [];
|
|
867
|
+
for(let i = 0; i < folder.coders.length; i++){
|
|
868
|
+
const coderInfo = folder.coders[i];
|
|
869
|
+
const codec = getCodec(coderInfo.id);
|
|
870
|
+
const unpackSize = folder.unpackSizes[i];
|
|
871
|
+
const decoder = codec.createDecoder(coderInfo.properties, unpackSize);
|
|
872
|
+
decoders.push(decoder);
|
|
873
|
+
stream = stream.pipe(decoder);
|
|
874
|
+
}
|
|
875
|
+
return {
|
|
876
|
+
output: stream,
|
|
877
|
+
pause: ()=>packedStream.pause(),
|
|
878
|
+
resume: ()=>packedStream.resume(),
|
|
879
|
+
destroy: (err)=>{
|
|
880
|
+
// Check for destroy method existence (not available in Node 4 and earlier)
|
|
881
|
+
const ps = packedStream;
|
|
882
|
+
if (typeof ps.destroy === 'function') ps.destroy(err);
|
|
883
|
+
for(let i = 0; i < decoders.length; i++){
|
|
884
|
+
const d = decoders[i];
|
|
885
|
+
if (typeof d.destroy === 'function') d.destroy(err);
|
|
886
|
+
}
|
|
887
|
+
}
|
|
888
|
+
};
|
|
889
|
+
}
|
|
890
|
+
/**
|
|
891
|
+
* Get a streaming entry stream (Promise-based API).
|
|
892
|
+
*
|
|
893
|
+
* For streamable folders: Returns a true streaming decompression
|
|
894
|
+
* For non-streamable folders: Falls back to buffered extraction
|
|
895
|
+
*
|
|
896
|
+
* @param entry - The entry to get stream for
|
|
897
|
+
* @returns Promise resolving to readable stream
|
|
898
|
+
*/ async getEntryStreamStreaming(entry) {
|
|
899
|
+
if (!entry._hasStream || entry.type === 'directory') {
|
|
900
|
+
const emptyStream = new PassThrough();
|
|
901
|
+
emptyStream.end();
|
|
902
|
+
return emptyStream;
|
|
903
|
+
}
|
|
904
|
+
const folderIndex = entry._folderIndex;
|
|
905
|
+
// Fall back to buffered if not streamable
|
|
906
|
+
if (!this.canStreamFolder(folderIndex)) {
|
|
907
|
+
return this.getEntryStream(entry);
|
|
908
|
+
}
|
|
909
|
+
const filesInFolder = this.filesPerFolder[folderIndex] || 1;
|
|
910
|
+
if (filesInFolder === 1) {
|
|
911
|
+
// Single file - direct streaming
|
|
912
|
+
return this.getEntryStreamDirect(entry);
|
|
913
|
+
}
|
|
914
|
+
// Multi-file folders use FolderStreamSplitter (Phase 2)
|
|
915
|
+
return this.getEntryStreamFromSplitter(entry);
|
|
916
|
+
}
|
|
917
|
+
/**
|
|
918
|
+
* Direct streaming for single-file folders.
|
|
919
|
+
* Pipes folder decompression directly to output with CRC verification.
|
|
920
|
+
*/ getEntryStreamDirect(entry) {
|
|
921
|
+
return new Promise((resolve, reject)=>{
|
|
922
|
+
const outputStream = new PassThrough();
|
|
923
|
+
let crcValue = 0;
|
|
924
|
+
const verifyCrc = entry._crc !== undefined;
|
|
925
|
+
try {
|
|
926
|
+
const folderStream = this.streamFolder(entry._folderIndex);
|
|
927
|
+
folderStream.output.on('data', (chunk)=>{
|
|
928
|
+
if (verifyCrc) {
|
|
929
|
+
crcValue = crc32(chunk, crcValue);
|
|
930
|
+
}
|
|
931
|
+
// Handle backpressure
|
|
932
|
+
if (!outputStream.write(chunk)) {
|
|
933
|
+
folderStream.pause();
|
|
934
|
+
outputStream.once('drain', ()=>folderStream.resume());
|
|
935
|
+
}
|
|
936
|
+
});
|
|
937
|
+
folderStream.output.on('end', ()=>{
|
|
938
|
+
// Verify CRC
|
|
939
|
+
if (verifyCrc && crcValue !== entry._crc) {
|
|
940
|
+
var _entry__crc;
|
|
941
|
+
const err = createCodedError(`CRC mismatch for ${entry.path}: expected ${(_entry__crc = entry._crc) === null || _entry__crc === void 0 ? void 0 : _entry__crc.toString(16)}, got ${crcValue.toString(16)}`, ErrorCode.CRC_MISMATCH);
|
|
942
|
+
outputStream.destroy(err);
|
|
943
|
+
return;
|
|
944
|
+
}
|
|
945
|
+
outputStream.end();
|
|
946
|
+
// Track extraction
|
|
947
|
+
this.extractedPerFolder[entry._folderIndex] = (this.extractedPerFolder[entry._folderIndex] || 0) + 1;
|
|
948
|
+
});
|
|
949
|
+
folderStream.output.on('error', (err)=>{
|
|
950
|
+
outputStream.destroy(err);
|
|
951
|
+
});
|
|
952
|
+
resolve(outputStream);
|
|
953
|
+
} catch (err) {
|
|
954
|
+
reject(err);
|
|
955
|
+
}
|
|
956
|
+
});
|
|
957
|
+
}
|
|
958
|
+
/**
|
|
959
|
+
* Get stream from folder splitter (for multi-file folders).
|
|
960
|
+
* Creates splitter on first access, reuses for subsequent files in same folder.
|
|
961
|
+
*/ getEntryStreamFromSplitter(entry) {
|
|
962
|
+
return new Promise((resolve, reject)=>{
|
|
963
|
+
const folderIndex = entry._folderIndex;
|
|
964
|
+
// Get or create splitter for this folder
|
|
965
|
+
let splitter = this.folderSplitters[folderIndex];
|
|
966
|
+
if (!splitter) {
|
|
967
|
+
// Create new splitter with file sizes and CRCs
|
|
968
|
+
const folderInfo = this.getFolderFileInfo(folderIndex);
|
|
969
|
+
splitter = new FolderStreamSplitter({
|
|
970
|
+
fileSizes: folderInfo.fileSizes,
|
|
971
|
+
verifyCrc: true,
|
|
972
|
+
expectedCrcs: folderInfo.expectedCrcs
|
|
973
|
+
});
|
|
974
|
+
this.folderSplitters[folderIndex] = splitter;
|
|
975
|
+
// Start streaming the folder
|
|
976
|
+
let folderStream;
|
|
977
|
+
try {
|
|
978
|
+
folderStream = this.streamFolder(folderIndex);
|
|
979
|
+
} catch (err) {
|
|
980
|
+
delete this.folderSplitters[folderIndex];
|
|
981
|
+
reject(err);
|
|
982
|
+
return;
|
|
983
|
+
}
|
|
984
|
+
folderStream.output.on('data', (chunk)=>{
|
|
985
|
+
// Handle backpressure from splitter
|
|
986
|
+
if (!(splitter === null || splitter === void 0 ? void 0 : splitter.write(chunk))) {
|
|
987
|
+
folderStream.pause();
|
|
988
|
+
splitter === null || splitter === void 0 ? void 0 : splitter.onDrain(()=>{
|
|
989
|
+
folderStream.resume();
|
|
990
|
+
});
|
|
991
|
+
}
|
|
992
|
+
});
|
|
993
|
+
folderStream.output.on('end', ()=>{
|
|
994
|
+
splitter === null || splitter === void 0 ? void 0 : splitter.end();
|
|
995
|
+
delete this.folderSplitters[folderIndex];
|
|
996
|
+
});
|
|
997
|
+
folderStream.output.on('error', (_err)=>{
|
|
998
|
+
splitter === null || splitter === void 0 ? void 0 : splitter.end();
|
|
999
|
+
delete this.folderSplitters[folderIndex];
|
|
1000
|
+
});
|
|
1001
|
+
}
|
|
1002
|
+
// Get this entry's stream from splitter
|
|
1003
|
+
try {
|
|
1004
|
+
const fileStream = splitter.getFileStream(entry._streamIndexInFolder);
|
|
1005
|
+
// Track extraction when stream ends
|
|
1006
|
+
fileStream.on('end', ()=>{
|
|
1007
|
+
this.extractedPerFolder[folderIndex] = (this.extractedPerFolder[folderIndex] || 0) + 1;
|
|
1008
|
+
});
|
|
1009
|
+
resolve(fileStream);
|
|
1010
|
+
} catch (err) {
|
|
1011
|
+
reject(err);
|
|
1012
|
+
}
|
|
1013
|
+
});
|
|
1014
|
+
}
|
|
1015
|
+
/**
|
|
1016
|
+
* Get file sizes and CRCs for all files in a folder (in stream order).
|
|
1017
|
+
* Used by FolderStreamSplitter to know file boundaries.
|
|
1018
|
+
*/ getFolderFileInfo(folderIndex) {
|
|
1019
|
+
const fileSizes = [];
|
|
1020
|
+
const expectedCrcs = [];
|
|
1021
|
+
// Collect entries in this folder, sorted by stream index
|
|
1022
|
+
const folderEntries = [];
|
|
1023
|
+
for(let i = 0; i < this.entries.length; i++){
|
|
1024
|
+
const e = this.entries[i];
|
|
1025
|
+
if (e._folderIndex === folderIndex && e._hasStream) {
|
|
1026
|
+
folderEntries.push(e);
|
|
1027
|
+
}
|
|
1028
|
+
}
|
|
1029
|
+
// Sort by stream index within folder
|
|
1030
|
+
folderEntries.sort((a, b)=>a._streamIndexInFolder - b._streamIndexInFolder);
|
|
1031
|
+
for(let i = 0; i < folderEntries.length; i++){
|
|
1032
|
+
const entry = folderEntries[i];
|
|
1033
|
+
fileSizes.push(entry.size);
|
|
1034
|
+
expectedCrcs.push(entry._crc);
|
|
1035
|
+
}
|
|
1036
|
+
return {
|
|
1037
|
+
fileSizes: fileSizes,
|
|
1038
|
+
expectedCrcs: expectedCrcs
|
|
1039
|
+
};
|
|
1040
|
+
}
|
|
827
1041
|
constructor(source){
|
|
828
1042
|
this.signature = null;
|
|
829
1043
|
this.streamsInfo = null;
|
|
@@ -836,6 +1050,8 @@ export { BufferSource, FileSource } from './ArchiveSource.js';
|
|
|
836
1050
|
// Track files per folder and how many have been extracted
|
|
837
1051
|
this.filesPerFolder = {};
|
|
838
1052
|
this.extractedPerFolder = {};
|
|
1053
|
+
// Splitter cache for multi-file folder streaming (Phase 2)
|
|
1054
|
+
this.folderSplitters = {};
|
|
839
1055
|
this.source = source;
|
|
840
1056
|
}
|
|
841
1057
|
}
|