7z-iterator 2.0.3 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/dist/cjs/FileEntry.js.map +1 -1
  2. package/dist/cjs/SevenZipIterator.js +13 -8
  3. package/dist/cjs/SevenZipIterator.js.map +1 -1
  4. package/dist/cjs/compat.js.map +1 -1
  5. package/dist/cjs/index.js.map +1 -1
  6. package/dist/cjs/lib/defer.js.map +1 -1
  7. package/dist/cjs/lib/runDecode.d.cts +5 -0
  8. package/dist/cjs/lib/runDecode.d.ts +5 -0
  9. package/dist/cjs/lib/runDecode.js +55 -0
  10. package/dist/cjs/lib/runDecode.js.map +1 -0
  11. package/dist/cjs/lib/streamToSource.js.map +1 -1
  12. package/dist/cjs/nextEntry.js.map +1 -1
  13. package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
  14. package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -1
  15. package/dist/cjs/sevenz/NumberCodec.js.map +1 -1
  16. package/dist/cjs/sevenz/SevenZipParser.d.cts +12 -1
  17. package/dist/cjs/sevenz/SevenZipParser.d.ts +12 -1
  18. package/dist/cjs/sevenz/SevenZipParser.js +325 -217
  19. package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
  20. package/dist/cjs/sevenz/codecs/Aes.js.map +1 -1
  21. package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
  22. package/dist/cjs/sevenz/codecs/Bcj2.js.map +1 -1
  23. package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
  24. package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
  25. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -1
  26. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -1
  27. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +24 -4
  28. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  29. package/dist/cjs/sevenz/codecs/index.d.cts +2 -1
  30. package/dist/cjs/sevenz/codecs/index.d.ts +2 -1
  31. package/dist/cjs/sevenz/codecs/index.js +28 -16
  32. package/dist/cjs/sevenz/codecs/index.js.map +1 -1
  33. package/dist/cjs/sevenz/codecs/streams.js.map +1 -1
  34. package/dist/cjs/sevenz/constants.js.map +1 -1
  35. package/dist/cjs/sevenz/headers.js.map +1 -1
  36. package/dist/cjs/sevenz/index.d.cts +1 -1
  37. package/dist/cjs/sevenz/index.d.ts +1 -1
  38. package/dist/cjs/sevenz/index.js.map +1 -1
  39. package/dist/cjs/types.js.map +1 -1
  40. package/dist/esm/FileEntry.js.map +1 -1
  41. package/dist/esm/SevenZipIterator.js +13 -8
  42. package/dist/esm/SevenZipIterator.js.map +1 -1
  43. package/dist/esm/compat.js.map +1 -1
  44. package/dist/esm/index.js.map +1 -1
  45. package/dist/esm/lib/defer.js.map +1 -1
  46. package/dist/esm/lib/runDecode.d.ts +5 -0
  47. package/dist/esm/lib/runDecode.js +29 -0
  48. package/dist/esm/lib/runDecode.js.map +1 -0
  49. package/dist/esm/lib/streamToSource.js.map +1 -1
  50. package/dist/esm/nextEntry.js.map +1 -1
  51. package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
  52. package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -1
  53. package/dist/esm/sevenz/NumberCodec.js.map +1 -1
  54. package/dist/esm/sevenz/SevenZipParser.d.ts +12 -1
  55. package/dist/esm/sevenz/SevenZipParser.js +308 -218
  56. package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
  57. package/dist/esm/sevenz/codecs/Aes.js.map +1 -1
  58. package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
  59. package/dist/esm/sevenz/codecs/Bcj2.js.map +1 -1
  60. package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
  61. package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
  62. package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -1
  63. package/dist/esm/sevenz/codecs/createBufferingDecoder.js +19 -4
  64. package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  65. package/dist/esm/sevenz/codecs/index.d.ts +2 -1
  66. package/dist/esm/sevenz/codecs/index.js +28 -16
  67. package/dist/esm/sevenz/codecs/index.js.map +1 -1
  68. package/dist/esm/sevenz/codecs/streams.js.map +1 -1
  69. package/dist/esm/sevenz/constants.js.map +1 -1
  70. package/dist/esm/sevenz/headers.js.map +1 -1
  71. package/dist/esm/sevenz/index.d.ts +1 -1
  72. package/dist/esm/sevenz/index.js.map +1 -1
  73. package/dist/esm/types.js.map +1 -1
  74. package/package.json +2 -2
@@ -37,6 +37,7 @@ _export(exports, {
37
37
  return SevenZipParser;
38
38
  }
39
39
  });
40
+ var _calloncefn = /*#__PURE__*/ _interop_require_default(require("call-once-fn"));
40
41
  var _extractbaseiterator = require("extract-base-iterator");
41
42
  var _deferts = require("../lib/defer.js");
42
43
  var _indexts = require("./codecs/index.js");
@@ -79,6 +80,18 @@ function _class_call_check(instance, Constructor) {
79
80
  throw new TypeError("Cannot call a class as a function");
80
81
  }
81
82
  }
83
+ function _instanceof(left, right) {
84
+ if (right != null && typeof Symbol !== "undefined" && right[Symbol.hasInstance]) {
85
+ return !!right[Symbol.hasInstance](left);
86
+ } else {
87
+ return left instanceof right;
88
+ }
89
+ }
90
+ function _interop_require_default(obj) {
91
+ return obj && obj.__esModule ? obj : {
92
+ default: obj
93
+ };
94
+ }
82
95
  function _ts_generator(thisArg, body) {
83
96
  var f, y, t, _ = {
84
97
  label: 0,
@@ -195,129 +208,194 @@ var SevenZipParser = /*#__PURE__*/ function() {
195
208
  this.extractedPerFolder = {};
196
209
  // Splitter cache for multi-file folder streaming (Phase 2)
197
210
  this.folderSplitters = {};
211
+ this.pendingFolders = {};
198
212
  this.source = source;
199
213
  }
200
214
  var _proto = SevenZipParser.prototype;
215
+ _proto.decodeWithCodec = function decodeWithCodec(codec, input, properties, unpackSize, callback) {
216
+ var done = (0, _calloncefn.default)(callback);
217
+ try {
218
+ codec.decode(input, properties, unpackSize, function(err, result) {
219
+ if (err) return done(err);
220
+ if (!result) return done((0, _constantsts.createCodedError)('Decoder returned no data', _constantsts.ErrorCode.DECOMPRESSION_FAILED));
221
+ done(null, result);
222
+ });
223
+ } catch (err) {
224
+ done(err);
225
+ }
226
+ };
201
227
  /**
202
228
  * Parse the archive structure
203
229
  * Must be called before iterating entries
204
- */ _proto.parse = function parse() {
205
- if (this.parsed) return;
206
- // Read signature header
207
- var sigBuf = this.source.read(0, _constantsts.SIGNATURE_HEADER_SIZE);
208
- if (sigBuf.length < _constantsts.SIGNATURE_HEADER_SIZE) {
209
- throw (0, _constantsts.createCodedError)('Archive too small', _constantsts.ErrorCode.TRUNCATED_ARCHIVE);
210
- }
211
- this.signature = (0, _headersts.parseSignatureHeader)(sigBuf);
212
- // Read encoded header
213
- var headerOffset = _constantsts.SIGNATURE_HEADER_SIZE + this.signature.nextHeaderOffset;
214
- var headerBuf = this.source.read(headerOffset, this.signature.nextHeaderSize);
215
- if (headerBuf.length < this.signature.nextHeaderSize) {
216
- throw (0, _constantsts.createCodedError)('Truncated header', _constantsts.ErrorCode.TRUNCATED_ARCHIVE);
217
- }
218
- // Parse encoded header (may need decompression)
230
+ */ _proto.parse = function parse(callback) {
231
+ var _this = this;
232
+ if (this.parsed) {
233
+ if (typeof callback === 'function') {
234
+ callback(null);
235
+ return;
236
+ }
237
+ if (typeof Promise === 'undefined') {
238
+ return;
239
+ }
240
+ return Promise.resolve();
241
+ }
242
+ var executor = function(done) {
243
+ _this.parseInternal(done);
244
+ };
245
+ if (typeof callback === 'function') {
246
+ executor(callback);
247
+ return;
248
+ }
249
+ if (typeof Promise === 'undefined') {
250
+ throw new Error('Promises are not available in this runtime. Please provide a callback to parse().');
251
+ }
252
+ return new Promise(function(resolve, reject) {
253
+ executor(function(err) {
254
+ if (err) {
255
+ reject(err);
256
+ return;
257
+ }
258
+ resolve();
259
+ });
260
+ });
261
+ };
262
+ _proto.parseInternal = function parseInternal(callback) {
263
+ var _this = this;
264
+ if (this.parsed) {
265
+ callback(null);
266
+ return;
267
+ }
268
+ var signature;
269
+ var headerBuf;
270
+ try {
271
+ var sigBuf = this.source.read(0, _constantsts.SIGNATURE_HEADER_SIZE);
272
+ if (sigBuf.length < _constantsts.SIGNATURE_HEADER_SIZE) {
273
+ callback((0, _constantsts.createCodedError)('Archive too small', _constantsts.ErrorCode.TRUNCATED_ARCHIVE));
274
+ return;
275
+ }
276
+ signature = (0, _headersts.parseSignatureHeader)(sigBuf);
277
+ this.signature = signature;
278
+ var headerOffset = _constantsts.SIGNATURE_HEADER_SIZE + signature.nextHeaderOffset;
279
+ headerBuf = this.source.read(headerOffset, signature.nextHeaderSize);
280
+ if (headerBuf.length < signature.nextHeaderSize) {
281
+ callback((0, _constantsts.createCodedError)('Truncated header', _constantsts.ErrorCode.TRUNCATED_ARCHIVE));
282
+ return;
283
+ }
284
+ } catch (err) {
285
+ callback(err);
286
+ return;
287
+ }
288
+ var finalize = function() {
289
+ try {
290
+ _this.buildEntries();
291
+ _this.parsed = true;
292
+ callback(null);
293
+ } catch (err) {
294
+ callback(err);
295
+ }
296
+ };
219
297
  try {
220
- var headerResult = (0, _headersts.parseEncodedHeader)(headerBuf, this.signature.nextHeaderCRC);
298
+ var _ref;
299
+ var _this_signature;
300
+ var headerResult = (0, _headersts.parseEncodedHeader)(headerBuf, (_ref = (_this_signature = this.signature) === null || _this_signature === void 0 ? void 0 : _this_signature.nextHeaderCRC) !== null && _ref !== void 0 ? _ref : 0);
221
301
  this.streamsInfo = headerResult.streamsInfo || null;
222
302
  this.filesInfo = headerResult.filesInfo;
303
+ finalize();
223
304
  } catch (err) {
224
305
  var codedErr = err;
225
306
  if (codedErr && codedErr.code === _constantsts.ErrorCode.COMPRESSED_HEADER) {
226
- // Header is compressed - need to decompress first
227
- this.handleCompressedHeader(headerBuf);
307
+ this.handleCompressedHeader(headerBuf, function(headerErr) {
308
+ if (headerErr) {
309
+ callback(headerErr);
310
+ return;
311
+ }
312
+ finalize();
313
+ });
228
314
  } else {
229
- throw err;
315
+ callback(err);
230
316
  }
231
317
  }
232
- // Build entries list
233
- this.buildEntries();
234
- this.parsed = true;
235
318
  };
236
319
  /**
237
320
  * Handle compressed header (kEncodedHeader)
238
- */ _proto.handleCompressedHeader = function handleCompressedHeader(headerBuf) {
321
+ */ _proto.handleCompressedHeader = function handleCompressedHeader(headerBuf, callback) {
322
+ var _this = this;
239
323
  // Parse the encoded header info to get decompression parameters
240
324
  var offset = 1; // Skip kEncodedHeader byte
241
- // Should have StreamsInfo for the header itself
242
325
  var propertyId = headerBuf[offset++];
243
326
  if (propertyId !== _constantsts.PropertyId.kMainStreamsInfo && propertyId !== _constantsts.PropertyId.kPackInfo) {
244
- throw (0, _constantsts.createCodedError)('Expected StreamsInfo in encoded header', _constantsts.ErrorCode.CORRUPT_HEADER);
245
- }
246
- // For now, we parse the streams info from the encoded header block
247
- // This tells us how to decompress the actual header
248
- // Read pack info from the encoded header structure
249
- var packInfoResult = this.parseEncodedHeaderStreams(headerBuf, 1);
250
- // Calculate compressed header position
251
- // For simple archives: header is at SIGNATURE_HEADER_SIZE + packPos
252
- // For BCJ2/complex archives: header may be at the END of pack data area
253
- // The pack data area ends at nextHeaderOffset (where encoded header starts)
254
- var compressedStart = _constantsts.SIGNATURE_HEADER_SIZE + packInfoResult.packPos;
255
- var compressedData = this.source.read(compressedStart, packInfoResult.packSize);
256
- // Decompress using the specified codec
257
- var codec = (0, _indexts.getCodec)(packInfoResult.codecId);
258
- var decompressedHeader = null;
259
- // Try decompressing from the calculated position first
327
+ callback((0, _constantsts.createCodedError)('Expected StreamsInfo in encoded header', _constantsts.ErrorCode.CORRUPT_HEADER));
328
+ return;
329
+ }
330
+ var packInfoResult;
260
331
  try {
261
- decompressedHeader = codec.decode(compressedData, packInfoResult.properties, packInfoResult.unpackSize);
262
- // Verify CRC if present
263
- if (packInfoResult.unpackCRC !== undefined) {
264
- var actualCRC = (0, _extractbaseiterator.crc32)(decompressedHeader);
265
- if (actualCRC !== packInfoResult.unpackCRC) {
266
- decompressedHeader = null; // CRC mismatch, need to search
267
- }
268
- }
269
- } catch (unused) {
270
- decompressedHeader = null; // Decompression failed, need to search
332
+ packInfoResult = this.parseEncodedHeaderStreams(headerBuf, 1);
333
+ } catch (err) {
334
+ callback(err);
335
+ return;
271
336
  }
272
- // If initial decompression failed, search for the correct position as a fallback
273
- // This handles edge cases where packPos doesn't point directly to header pack data
274
- if (decompressedHeader === null && this.signature) {
337
+ var codec = (0, _indexts.getCodec)(packInfoResult.codecId);
338
+ var candidates = [];
339
+ var compressedStart = _constantsts.SIGNATURE_HEADER_SIZE + packInfoResult.packPos;
340
+ candidates.push(this.source.read(compressedStart, packInfoResult.packSize));
341
+ if (this.signature) {
275
342
  var packAreaEnd = _constantsts.SIGNATURE_HEADER_SIZE + this.signature.nextHeaderOffset;
276
343
  var searchStart = packAreaEnd - packInfoResult.packSize;
277
344
  var searchEnd = Math.max(_constantsts.SIGNATURE_HEADER_SIZE, compressedStart - 100000);
278
- // Scan for LZMA data starting with 0x00 (range coder init)
279
- // Try each candidate and validate with CRC
280
345
  var scanChunkSize = 4096;
281
- searchLoop: for(var chunkStart = searchStart; chunkStart >= searchEnd; chunkStart -= scanChunkSize){
346
+ for(var chunkStart = searchStart; chunkStart >= searchEnd; chunkStart -= scanChunkSize){
282
347
  var chunk = this.source.read(chunkStart, scanChunkSize + packInfoResult.packSize);
283
- for(var i = 0; i < Math.min(chunk.length, scanChunkSize); i++){
348
+ var limit = Math.min(chunk.length, scanChunkSize);
349
+ for(var i = 0; i < limit; i++){
284
350
  if (chunk[i] === 0x00) {
285
- var candidateData = chunk.subarray(i, i + packInfoResult.packSize);
286
- if (candidateData.length === packInfoResult.packSize) {
287
- try {
288
- var candidateDecompressed = codec.decode(candidateData, packInfoResult.properties, packInfoResult.unpackSize);
289
- if (packInfoResult.unpackCRC !== undefined) {
290
- var candCRC = (0, _extractbaseiterator.crc32)(candidateDecompressed);
291
- if (candCRC === packInfoResult.unpackCRC) {
292
- decompressedHeader = candidateDecompressed;
293
- break searchLoop;
294
- }
295
- } else {
296
- decompressedHeader = candidateDecompressed;
297
- break searchLoop;
298
- }
299
- } catch (unused) {
300
- // Decompression failed, continue searching
351
+ var end = i + packInfoResult.packSize;
352
+ if (end <= chunk.length) {
353
+ var candidateData = chunk.slice(i, end);
354
+ if (candidateData.length === packInfoResult.packSize) {
355
+ candidates.push(candidateData);
301
356
  }
302
357
  }
303
358
  }
304
359
  }
305
360
  }
306
361
  }
307
- if (decompressedHeader === null) {
308
- throw (0, _constantsts.createCodedError)('Failed to decompress header - could not find valid LZMA data', _constantsts.ErrorCode.CORRUPT_HEADER);
309
- }
310
- // Now parse the decompressed header
311
- // It should start with kHeader
362
+ var tryCandidate = function(index) {
363
+ if (index >= candidates.length) {
364
+ callback((0, _constantsts.createCodedError)('Failed to decompress header - could not find valid LZMA data', _constantsts.ErrorCode.CORRUPT_HEADER));
365
+ return;
366
+ }
367
+ _this.decodeWithCodec(codec, candidates[index], packInfoResult.properties, packInfoResult.unpackSize, function(err, decompressed) {
368
+ if (err || !decompressed) {
369
+ tryCandidate(index + 1);
370
+ return;
371
+ }
372
+ if (packInfoResult.unpackCRC !== undefined) {
373
+ var actualCRC = (0, _extractbaseiterator.crc32)(decompressed);
374
+ if (actualCRC !== packInfoResult.unpackCRC) {
375
+ tryCandidate(index + 1);
376
+ return;
377
+ }
378
+ }
379
+ _this.parseDecompressedHeader(decompressed, callback);
380
+ });
381
+ };
382
+ tryCandidate(0);
383
+ };
384
+ _proto.parseDecompressedHeader = function parseDecompressedHeader(decompressedHeader, callback) {
312
385
  var decompOffset = 0;
313
386
  var headerId = decompressedHeader[decompOffset++];
314
387
  if (headerId !== _constantsts.PropertyId.kHeader) {
315
- throw (0, _constantsts.createCodedError)('Expected kHeader in decompressed header', _constantsts.ErrorCode.CORRUPT_HEADER);
388
+ callback((0, _constantsts.createCodedError)('Expected kHeader in decompressed header', _constantsts.ErrorCode.CORRUPT_HEADER));
389
+ return;
390
+ }
391
+ try {
392
+ var result = (0, _headersts.parseHeaderContent)(decompressedHeader, decompOffset);
393
+ this.streamsInfo = result.streamsInfo || null;
394
+ this.filesInfo = result.filesInfo;
395
+ callback(null);
396
+ } catch (err) {
397
+ callback(err);
316
398
  }
317
- // Parse the decompressed header using shared function from headers.ts
318
- var result = (0, _headersts.parseHeaderContent)(decompressedHeader, decompOffset);
319
- this.streamsInfo = result.streamsInfo || null;
320
- this.filesInfo = result.filesInfo;
321
399
  };
322
400
  /**
323
401
  * Parse streams info from encoded header block
@@ -519,7 +597,7 @@ var SevenZipParser = /*#__PURE__*/ function() {
519
597
  * Get the list of entries
520
598
  */ _proto.getEntries = function getEntries() {
521
599
  if (!this.parsed) {
522
- this.parse();
600
+ throw new Error('SevenZipParser has not been parsed yet. Call parse(callback) before accessing entries.');
523
601
  }
524
602
  return this.entries;
525
603
  };
@@ -646,39 +724,43 @@ var SevenZipParser = /*#__PURE__*/ function() {
646
724
  started = true;
647
725
  (0, _deferts.defer)(function() {
648
726
  if (destroyed) return;
649
- try {
650
- var data = _this.getDecompressedFolder(folderIdx);
651
- var fileStart = 0;
652
- for(var m = 0; m < entry._streamIndexInFolder; m++){
653
- var prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
654
- fileStart += streamsInfo.unpackSizes[prevStreamGlobalIndex];
655
- }
656
- var fileSize = entry.size;
657
- if (fileStart + fileSize > data.length) {
658
- stream.destroy((0, _constantsts.createCodedError)("File data out of bounds: offset ".concat(fileStart, " + size ").concat(fileSize, " > decompressed length ").concat(data.length), _constantsts.ErrorCode.DECOMPRESSION_FAILED));
727
+ _this.getDecompressedFolder(folderIdx, function(err, data) {
728
+ if (destroyed) return;
729
+ if (err || !data) {
730
+ stream.destroy(err || (0, _constantsts.createCodedError)('Unable to decompress folder', _constantsts.ErrorCode.DECOMPRESSION_FAILED));
659
731
  return;
660
732
  }
661
- var fileData = data.slice(fileStart, fileStart + fileSize);
662
- if (entry._crc !== undefined) {
663
- var actualCRC = (0, _extractbaseiterator.crc32)(fileData);
664
- if (actualCRC !== entry._crc) {
665
- stream.destroy((0, _constantsts.createCodedError)("CRC mismatch for ".concat(entry.path, ": expected ").concat(entry._crc.toString(16), ", got ").concat(actualCRC.toString(16)), _constantsts.ErrorCode.CRC_MISMATCH));
733
+ try {
734
+ var fileStart = 0;
735
+ for(var m = 0; m < entry._streamIndexInFolder; m++){
736
+ var prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
737
+ fileStart += streamsInfo.unpackSizes[prevStreamGlobalIndex];
738
+ }
739
+ var fileSize = entry.size;
740
+ if (fileStart + fileSize > data.length) {
741
+ stream.destroy((0, _constantsts.createCodedError)("File data out of bounds: offset ".concat(fileStart, " + size ").concat(fileSize, " > decompressed length ").concat(data.length), _constantsts.ErrorCode.DECOMPRESSION_FAILED));
666
742
  return;
667
743
  }
744
+ var fileData = data.slice(fileStart, fileStart + fileSize);
745
+ if (entry._crc !== undefined) {
746
+ var actualCRC = (0, _extractbaseiterator.crc32)(fileData);
747
+ if (actualCRC !== entry._crc) {
748
+ stream.destroy((0, _constantsts.createCodedError)("CRC mismatch for ".concat(entry.path, ": expected ").concat(entry._crc.toString(16), ", got ").concat(actualCRC.toString(16)), _constantsts.ErrorCode.CRC_MISMATCH));
749
+ return;
750
+ }
751
+ }
752
+ _this.extractedPerFolder[folderIdx] = (_this.extractedPerFolder[folderIdx] || 0) + 1;
753
+ if (_this.extractedPerFolder[folderIdx] >= _this.filesPerFolder[folderIdx]) {
754
+ delete _this.decompressedCache[folderIdx];
755
+ }
756
+ if (!destroyed) {
757
+ stream.push(fileData);
758
+ stream.push(null);
759
+ }
760
+ } catch (decodeErr) {
761
+ stream.destroy(decodeErr);
668
762
  }
669
- _this.extractedPerFolder[folderIdx] = (_this.extractedPerFolder[folderIdx] || 0) + 1;
670
- if (_this.extractedPerFolder[folderIdx] >= _this.filesPerFolder[folderIdx]) {
671
- delete _this.decompressedCache[folderIdx];
672
- }
673
- if (!destroyed) {
674
- stream.push(fileData);
675
- stream.push(null);
676
- }
677
- } catch (err) {
678
- if (!destroyed) {
679
- stream.destroy(err);
680
- }
681
- }
763
+ });
682
764
  });
683
765
  }
684
766
  return originalRead(size);
@@ -710,97 +792,140 @@ var SevenZipParser = /*#__PURE__*/ function() {
710
792
  /**
711
793
  * Get decompressed data for a folder, with smart caching for solid archives
712
794
  * Only caches when multiple files share a block, releases when last file extracted
713
- */ _proto.getDecompressedFolder = function getDecompressedFolder(folderIndex) {
714
- // Check cache first
795
+ */ _proto.getDecompressedFolder = function getDecompressedFolder(folderIndex, callback) {
796
+ var _this = this;
715
797
  if (this.decompressedCache[folderIndex]) {
716
- return this.decompressedCache[folderIndex];
798
+ callback(null, this.decompressedCache[folderIndex]);
799
+ return;
800
+ }
801
+ if (this.pendingFolders[folderIndex]) {
802
+ this.pendingFolders[folderIndex].push(callback);
803
+ return;
717
804
  }
718
805
  if (!this.streamsInfo) {
719
- throw (0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER);
806
+ callback((0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER));
807
+ return;
720
808
  }
721
- var folder = this.streamsInfo.folders[folderIndex];
722
- // Check how many files remain in this folder
809
+ this.pendingFolders[folderIndex] = [
810
+ callback
811
+ ];
812
+ this.decodeFolderData(folderIndex, function(err, data) {
813
+ var waiters = _this.pendingFolders[folderIndex] || [];
814
+ delete _this.pendingFolders[folderIndex];
815
+ if (err || !data) {
816
+ for(var i = 0; i < waiters.length; i++){
817
+ waiters[i](err || (0, _constantsts.createCodedError)('Decoder returned no data', _constantsts.ErrorCode.DECOMPRESSION_FAILED));
818
+ }
819
+ return;
820
+ }
821
+ if (_this.shouldCacheFolder(folderIndex)) {
822
+ _this.decompressedCache[folderIndex] = data;
823
+ }
824
+ for(var i1 = 0; i1 < waiters.length; i1++){
825
+ waiters[i1](null, data);
826
+ }
827
+ });
828
+ };
829
+ _proto.shouldCacheFolder = function shouldCacheFolder(folderIndex) {
723
830
  var filesInFolder = this.filesPerFolder[folderIndex] || 1;
724
831
  var extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
725
- var remainingFiles = filesInFolder - extractedFromFolder;
726
- // Only cache if more than 1 file remains (including the current one being extracted)
727
- var shouldCache = remainingFiles > 1;
728
- // Check if this folder uses BCJ2 (requires special multi-stream handling)
832
+ return filesInFolder - extractedFromFolder > 1;
833
+ };
834
+ _proto.decodeFolderData = function decodeFolderData(folderIndex, callback) {
835
+ if (!this.streamsInfo) {
836
+ callback((0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER));
837
+ return;
838
+ }
839
+ var folder = this.streamsInfo.folders[folderIndex];
840
+ if (!folder) {
841
+ callback((0, _constantsts.createCodedError)('Invalid folder index', _constantsts.ErrorCode.CORRUPT_HEADER));
842
+ return;
843
+ }
729
844
  if (this.folderHasBcj2(folder)) {
730
- var data = this.decompressBcj2Folder(folderIndex);
731
- if (shouldCache) {
732
- this.decompressedCache[folderIndex] = data;
733
- }
734
- return data;
845
+ this.decompressBcj2Folder(folderIndex, callback);
846
+ return;
847
+ }
848
+ var packDataResult = this.readPackedData(folderIndex);
849
+ if (_instanceof(packDataResult, Error)) {
850
+ callback(packDataResult);
851
+ return;
852
+ }
853
+ this.decodeFolderCoders(folder, packDataResult, 0, callback);
854
+ };
855
+ _proto.readPackedData = function readPackedData(folderIndex) {
856
+ if (!this.streamsInfo) {
857
+ return (0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER);
858
+ }
859
+ var folder = this.streamsInfo.folders[folderIndex];
860
+ if (!folder) {
861
+ return (0, _constantsts.createCodedError)('Invalid folder index', _constantsts.ErrorCode.CORRUPT_HEADER);
735
862
  }
736
- // Calculate packed data position
737
- // Use Math.max to prevent 32-bit signed overflow
738
863
  var signedHeaderSize = _constantsts.SIGNATURE_HEADER_SIZE;
739
864
  var signedPackPos = this.streamsInfo.packPos;
740
865
  var packPos = Math.max(signedHeaderSize, 0) + Math.max(signedPackPos, 0);
741
- // Find which pack stream this folder uses
742
866
  var packStreamIndex = 0;
743
867
  for(var j = 0; j < folderIndex; j++){
744
868
  packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
745
869
  }
746
- // Calculate position of this pack stream - PREVENT OVERFLOW
747
870
  for(var k = 0; k < packStreamIndex; k++){
748
871
  var size = this.streamsInfo.packSizes[k];
749
872
  if (packPos + size < packPos) {
750
- throw (0, _constantsts.createCodedError)("Pack position overflow at index ".concat(k), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
873
+ return (0, _constantsts.createCodedError)("Pack position overflow at index ".concat(k), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
751
874
  }
752
875
  packPos += size;
753
876
  }
754
877
  var packSize = this.streamsInfo.packSizes[packStreamIndex];
755
- // Validate pack size to prevent overflow
756
- // Upper bound is Number.MAX_SAFE_INTEGER (2^53-1 = 9PB) - safe for all realistic archives
757
878
  if (packSize < 0 || packSize > Number.MAX_SAFE_INTEGER) {
758
- throw (0, _constantsts.createCodedError)("Invalid pack size: ".concat(packSize), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
879
+ return (0, _constantsts.createCodedError)("Invalid pack size: ".concat(packSize), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
759
880
  }
760
881
  if (packPos < 0 || packPos > Number.MAX_SAFE_INTEGER) {
761
- throw (0, _constantsts.createCodedError)("Invalid pack position: ".concat(packPos), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
762
- }
763
- // Read packed data
764
- var packedData = this.source.read(packPos, packSize);
765
- // Decompress through codec chain
766
- var data2 = packedData;
767
- for(var l = 0; l < folder.coders.length; l++){
768
- var coderInfo = folder.coders[l];
769
- var codec = (0, _indexts.getCodec)(coderInfo.id);
770
- // Get unpack size for this coder (needed by LZMA)
771
- var unpackSize = folder.unpackSizes[l];
772
- // Validate unpack size to prevent overflow
773
- if (unpackSize < 0 || unpackSize > Number.MAX_SAFE_INTEGER) {
774
- throw (0, _constantsts.createCodedError)("Invalid unpack size: ".concat(unpackSize), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
775
- }
776
- data2 = codec.decode(data2, coderInfo.properties, unpackSize);
882
+ return (0, _constantsts.createCodedError)("Invalid pack position: ".concat(packPos), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
777
883
  }
778
- // Cache only if more files remain in this folder
779
- if (shouldCache) {
780
- this.decompressedCache[folderIndex] = data2;
884
+ return this.source.read(packPos, packSize);
885
+ };
886
+ _proto.decodeFolderCoders = function decodeFolderCoders(folder, input, index, callback) {
887
+ var _this = this;
888
+ if (index >= folder.coders.length) {
889
+ callback(null, input);
890
+ return;
891
+ }
892
+ var coderInfo = folder.coders[index];
893
+ var codec = (0, _indexts.getCodec)(coderInfo.id);
894
+ var unpackSize = folder.unpackSizes[index];
895
+ if (unpackSize < 0 || unpackSize > Number.MAX_SAFE_INTEGER) {
896
+ callback((0, _constantsts.createCodedError)("Invalid unpack size: ".concat(unpackSize), _constantsts.ErrorCode.CORRUPT_ARCHIVE));
897
+ return;
781
898
  }
782
- return data2;
899
+ this.decodeWithCodec(codec, input, coderInfo.properties, unpackSize, function(err, output) {
900
+ if (err || !output) {
901
+ callback(err || (0, _constantsts.createCodedError)('Decoder returned no data', _constantsts.ErrorCode.DECOMPRESSION_FAILED));
902
+ return;
903
+ }
904
+ _this.decodeFolderCoders(folder, output, index + 1, callback);
905
+ });
783
906
  };
784
907
  /**
785
908
  * Decompress a BCJ2 folder with multi-stream handling
786
909
  * BCJ2 uses 4 input streams: main, call, jump, range coder
787
- */ _proto.decompressBcj2Folder = function decompressBcj2Folder(folderIndex) {
910
+ */ _proto.decompressBcj2Folder = function decompressBcj2Folder(folderIndex, callback) {
911
+ var _this = this;
788
912
  if (!this.streamsInfo) {
789
- throw (0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER);
913
+ callback((0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER));
914
+ return;
790
915
  }
791
916
  var folder = this.streamsInfo.folders[folderIndex];
792
- // Calculate starting pack position
917
+ if (!folder) {
918
+ callback((0, _constantsts.createCodedError)('Invalid folder index', _constantsts.ErrorCode.CORRUPT_HEADER));
919
+ return;
920
+ }
793
921
  var packPos = _constantsts.SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
794
- // Find which pack stream index this folder starts at
795
922
  var packStreamIndex = 0;
796
923
  for(var j = 0; j < folderIndex; j++){
797
924
  packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
798
925
  }
799
- // Calculate position
800
926
  for(var k = 0; k < packStreamIndex; k++){
801
927
  packPos += this.streamsInfo.packSizes[k];
802
928
  }
803
- // Read all pack streams for this folder
804
929
  var numPackStreams = folder.packedStreams.length;
805
930
  var packStreams = [];
806
931
  var currentPos = packPos;
@@ -809,16 +934,7 @@ var SevenZipParser = /*#__PURE__*/ function() {
809
934
  packStreams.push(this.source.read(currentPos, size));
810
935
  currentPos += size;
811
936
  }
812
- // Build a map of coder outputs
813
- // For BCJ2, typical structure is:
814
- // Coder 0: LZMA2 (main stream) - 1 in, 1 out
815
- // Coder 1: LZMA (call stream) - 1 in, 1 out
816
- // Coder 2: LZMA (jump stream) - 1 in, 1 out
817
- // Coder 3: BCJ2 - 4 in, 1 out
818
- // Pack streams map to: coder inputs not bound to other coder outputs
819
- // First, decompress each non-BCJ2 coder
820
937
  var coderOutputs = {};
821
- // Find the BCJ2 coder
822
938
  var bcj2CoderIndex = -1;
823
939
  for(var c = 0; c < folder.coders.length; c++){
824
940
  if ((0, _indexts.isBcj2Codec)(folder.coders[c].id)) {
@@ -827,56 +943,50 @@ var SevenZipParser = /*#__PURE__*/ function() {
827
943
  }
828
944
  }
829
945
  if (bcj2CoderIndex === -1) {
830
- throw (0, _constantsts.createCodedError)('BCJ2 coder not found in folder', _constantsts.ErrorCode.CORRUPT_HEADER);
946
+ callback((0, _constantsts.createCodedError)('BCJ2 coder not found in folder', _constantsts.ErrorCode.CORRUPT_HEADER));
947
+ return;
831
948
  }
832
- // Build input stream index -> pack stream mapping
833
- // folder.packedStreams tells us which input indices are unbound and their order
834
949
  var inputToPackStream = {};
835
950
  for(var pi = 0; pi < folder.packedStreams.length; pi++){
836
951
  inputToPackStream[folder.packedStreams[pi]] = pi;
837
952
  }
838
- // Build output stream index -> coder mapping
839
- var outputToCoder = {};
840
- var totalOutputs = 0;
841
- for(var co = 0; co < folder.coders.length; co++){
842
- var numOut = folder.coders[co].numOutStreams;
843
- for(var outp = 0; outp < numOut; outp++){
844
- outputToCoder[totalOutputs + outp] = co;
845
- }
846
- totalOutputs += numOut;
847
- }
848
- // Decompress non-BCJ2 coders (LZMA, LZMA2)
849
- // We need to process in dependency order
850
- var processed = {};
851
953
  var processOrder = this.getCoderProcessOrder(folder, bcj2CoderIndex);
852
- for(var po = 0; po < processOrder.length; po++){
853
- var coderIdx = processOrder[po];
854
- if (coderIdx === bcj2CoderIndex) continue;
954
+ var processNext = function(orderIndex) {
955
+ if (orderIndex >= processOrder.length) {
956
+ _this.finishBcj2Decode(folder, bcj2CoderIndex, coderOutputs, inputToPackStream, packStreams, callback);
957
+ return;
958
+ }
959
+ var coderIdx = processOrder[orderIndex];
960
+ if (coderIdx === bcj2CoderIndex) {
961
+ processNext(orderIndex + 1);
962
+ return;
963
+ }
855
964
  var coder = folder.coders[coderIdx];
856
965
  var codec = (0, _indexts.getCodec)(coder.id);
857
- // Find input for this coder
858
966
  var coderInputStart = 0;
859
967
  for(var ci2 = 0; ci2 < coderIdx; ci2++){
860
968
  coderInputStart += folder.coders[ci2].numInStreams;
861
969
  }
862
- // Get input data (from pack stream)
863
970
  var inputIdx = coderInputStart;
864
971
  var packStreamIdx = inputToPackStream[inputIdx];
865
972
  var inputData = packStreams[packStreamIdx];
866
- // Decompress
867
973
  var unpackSize = folder.unpackSizes[coderIdx];
868
- var outputData = codec.decode(inputData, coder.properties, unpackSize);
869
- // Store in coder outputs
870
- var coderOutputStart = 0;
871
- for(var co2 = 0; co2 < coderIdx; co2++){
872
- coderOutputStart += folder.coders[co2].numOutStreams;
873
- }
874
- coderOutputs[coderOutputStart] = outputData;
875
- processed[coderIdx] = true;
876
- }
877
- // Now process BCJ2
878
- // BCJ2 has 4 inputs, need to map them correctly
879
- // Standard order: main(LZMA2 output), call(LZMA output), jump(LZMA output), range(raw pack)
974
+ _this.decodeWithCodec(codec, inputData, coder.properties, unpackSize, function(err, outputData) {
975
+ if (err || !outputData) {
976
+ callback(err || (0, _constantsts.createCodedError)('Decoder returned no data', _constantsts.ErrorCode.DECOMPRESSION_FAILED));
977
+ return;
978
+ }
979
+ var coderOutputStart = 0;
980
+ for(var co2 = 0; co2 < coderIdx; co2++){
981
+ coderOutputStart += folder.coders[co2].numOutStreams;
982
+ }
983
+ coderOutputs[coderOutputStart] = outputData;
984
+ processNext(orderIndex + 1);
985
+ });
986
+ };
987
+ processNext(0);
988
+ };
989
+ _proto.finishBcj2Decode = function finishBcj2Decode(folder, bcj2CoderIndex, coderOutputs, inputToPackStream, packStreams, callback) {
880
990
  var bcj2InputStart = 0;
881
991
  for(var ci3 = 0; ci3 < bcj2CoderIndex; ci3++){
882
992
  bcj2InputStart += folder.coders[ci3].numInStreams;
@@ -884,7 +994,6 @@ var SevenZipParser = /*#__PURE__*/ function() {
884
994
  var bcj2Inputs = [];
885
995
  for(var bi = 0; bi < 4; bi++){
886
996
  var globalIdx = bcj2InputStart + bi;
887
- // Check if this input is bound to a coder output
888
997
  var boundOutput = -1;
889
998
  for(var bp2 = 0; bp2 < folder.bindPairs.length; bp2++){
890
999
  if (folder.bindPairs[bp2].inIndex === globalIdx) {
@@ -893,29 +1002,28 @@ var SevenZipParser = /*#__PURE__*/ function() {
893
1002
  }
894
1003
  }
895
1004
  if (boundOutput >= 0) {
896
- // Get from coder outputs
897
1005
  bcj2Inputs.push(coderOutputs[boundOutput]);
898
1006
  } else {
899
- // Get from pack streams
900
1007
  var psIdx = inputToPackStream[globalIdx];
901
1008
  bcj2Inputs.push(packStreams[psIdx]);
902
1009
  }
903
1010
  }
904
- // Get BCJ2 unpack size
905
1011
  var bcj2OutputStart = 0;
906
1012
  for(var co3 = 0; co3 < bcj2CoderIndex; co3++){
907
1013
  bcj2OutputStart += folder.coders[co3].numOutStreams;
908
1014
  }
909
1015
  var bcj2UnpackSize = folder.unpackSizes[bcj2OutputStart];
910
- // Memory optimization: Clear intermediate buffers to help GC
911
- // These are no longer needed after bcj2Inputs is built
912
- for(var key in coderOutputs){
913
- delete coderOutputs[key];
914
- }
915
- // Clear packStreams array (allows GC to free compressed data)
916
- packStreams.length = 0;
917
- // Decode BCJ2
918
- return (0, _indexts.decodeBcj2Multi)(bcj2Inputs, undefined, bcj2UnpackSize);
1016
+ try {
1017
+ var result = (0, _indexts.decodeBcj2Multi)(bcj2Inputs, undefined, bcj2UnpackSize);
1018
+ callback(null, result);
1019
+ } catch (err) {
1020
+ callback(err);
1021
+ } finally{
1022
+ for(var key in coderOutputs){
1023
+ delete coderOutputs[key];
1024
+ }
1025
+ packStreams.length = 0;
1026
+ }
919
1027
  };
920
1028
  /**
921
1029
  * Get processing order for coders (dependency order)