7z-iterator 1.1.1 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (188) hide show
  1. package/dist/cjs/FileEntry.d.cts +12 -4
  2. package/dist/cjs/FileEntry.d.ts +12 -4
  3. package/dist/cjs/FileEntry.js +52 -24
  4. package/dist/cjs/FileEntry.js.map +1 -1
  5. package/dist/cjs/SevenZipIterator.d.cts +25 -2
  6. package/dist/cjs/SevenZipIterator.d.ts +25 -2
  7. package/dist/cjs/SevenZipIterator.js +68 -21
  8. package/dist/cjs/SevenZipIterator.js.map +1 -1
  9. package/dist/cjs/compat.js +1 -8
  10. package/dist/cjs/compat.js.map +1 -1
  11. package/dist/cjs/index.d.cts +0 -2
  12. package/dist/cjs/index.d.ts +0 -2
  13. package/dist/cjs/index.js +3 -12
  14. package/dist/cjs/index.js.map +1 -1
  15. package/dist/cjs/lib/streamToSource.d.cts +8 -11
  16. package/dist/cjs/lib/streamToSource.d.ts +8 -11
  17. package/dist/cjs/lib/streamToSource.js +21 -67
  18. package/dist/cjs/lib/streamToSource.js.map +1 -1
  19. package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
  20. package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
  21. package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
  22. package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
  23. package/dist/cjs/lzma/index.d.cts +13 -0
  24. package/dist/cjs/lzma/index.d.ts +13 -0
  25. package/dist/cjs/lzma/index.js +63 -0
  26. package/dist/cjs/lzma/index.js.map +1 -0
  27. package/dist/cjs/lzma/stream/transforms.d.cts +38 -0
  28. package/dist/cjs/lzma/stream/transforms.d.ts +38 -0
  29. package/dist/cjs/lzma/stream/transforms.js +149 -0
  30. package/dist/cjs/lzma/stream/transforms.js.map +1 -0
  31. package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +30 -0
  32. package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +30 -0
  33. package/dist/cjs/lzma/sync/Lzma2Decoder.js +135 -0
  34. package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
  35. package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +82 -0
  36. package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +82 -0
  37. package/dist/cjs/lzma/sync/LzmaDecoder.js +440 -0
  38. package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
  39. package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
  40. package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
  41. package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
  42. package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
  43. package/dist/cjs/lzma/types.d.cts +110 -0
  44. package/dist/cjs/lzma/types.d.ts +110 -0
  45. package/dist/cjs/lzma/types.js +264 -0
  46. package/dist/cjs/lzma/types.js.map +1 -0
  47. package/dist/cjs/nextEntry.js +24 -26
  48. package/dist/cjs/nextEntry.js.map +1 -1
  49. package/dist/cjs/sevenz/ArchiveSource.d.cts +16 -0
  50. package/dist/cjs/sevenz/ArchiveSource.d.ts +16 -0
  51. package/dist/cjs/sevenz/ArchiveSource.js +69 -0
  52. package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
  53. package/dist/cjs/sevenz/FolderStreamSplitter.d.cts +101 -0
  54. package/dist/cjs/sevenz/FolderStreamSplitter.d.ts +101 -0
  55. package/dist/cjs/sevenz/FolderStreamSplitter.js +229 -0
  56. package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -0
  57. package/dist/cjs/sevenz/SevenZipParser.d.cts +71 -10
  58. package/dist/cjs/sevenz/SevenZipParser.d.ts +71 -10
  59. package/dist/cjs/sevenz/SevenZipParser.js +574 -203
  60. package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
  61. package/dist/cjs/sevenz/codecs/BZip2.js +2 -1
  62. package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
  63. package/dist/cjs/sevenz/codecs/Bcj.d.cts +5 -4
  64. package/dist/cjs/sevenz/codecs/Bcj.d.ts +5 -4
  65. package/dist/cjs/sevenz/codecs/Bcj.js +102 -8
  66. package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
  67. package/dist/cjs/sevenz/codecs/BcjArm.d.cts +5 -4
  68. package/dist/cjs/sevenz/codecs/BcjArm.d.ts +5 -4
  69. package/dist/cjs/sevenz/codecs/BcjArm.js +51 -9
  70. package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
  71. package/dist/cjs/sevenz/codecs/Copy.d.cts +2 -4
  72. package/dist/cjs/sevenz/codecs/Copy.d.ts +2 -4
  73. package/dist/cjs/sevenz/codecs/Copy.js +2 -15
  74. package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
  75. package/dist/cjs/sevenz/codecs/Deflate.d.cts +6 -4
  76. package/dist/cjs/sevenz/codecs/Deflate.d.ts +6 -4
  77. package/dist/cjs/sevenz/codecs/Deflate.js +4 -9
  78. package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
  79. package/dist/cjs/sevenz/codecs/Delta.d.cts +5 -4
  80. package/dist/cjs/sevenz/codecs/Delta.d.ts +5 -4
  81. package/dist/cjs/sevenz/codecs/Delta.js +29 -10
  82. package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
  83. package/dist/cjs/sevenz/codecs/Lzma.d.cts +5 -2
  84. package/dist/cjs/sevenz/codecs/Lzma.d.ts +5 -2
  85. package/dist/cjs/sevenz/codecs/Lzma.js +13 -28
  86. package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
  87. package/dist/cjs/sevenz/codecs/Lzma2.d.cts +9 -3
  88. package/dist/cjs/sevenz/codecs/Lzma2.d.ts +9 -3
  89. package/dist/cjs/sevenz/codecs/Lzma2.js +17 -198
  90. package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
  91. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -2
  92. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
  93. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +2 -15
  94. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  95. package/dist/cjs/types.d.cts +2 -16
  96. package/dist/cjs/types.d.ts +2 -16
  97. package/dist/cjs/types.js.map +1 -1
  98. package/dist/esm/FileEntry.d.ts +12 -4
  99. package/dist/esm/FileEntry.js +52 -26
  100. package/dist/esm/FileEntry.js.map +1 -1
  101. package/dist/esm/SevenZipIterator.d.ts +25 -2
  102. package/dist/esm/SevenZipIterator.js +69 -22
  103. package/dist/esm/SevenZipIterator.js.map +1 -1
  104. package/dist/esm/compat.js +1 -8
  105. package/dist/esm/compat.js.map +1 -1
  106. package/dist/esm/index.d.ts +0 -2
  107. package/dist/esm/index.js +0 -1
  108. package/dist/esm/index.js.map +1 -1
  109. package/dist/esm/lib/streamToSource.d.ts +8 -11
  110. package/dist/esm/lib/streamToSource.js +22 -68
  111. package/dist/esm/lib/streamToSource.js.map +1 -1
  112. package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
  113. package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
  114. package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
  115. package/dist/esm/lzma/index.d.ts +13 -0
  116. package/dist/esm/lzma/index.js +15 -0
  117. package/dist/esm/lzma/index.js.map +1 -0
  118. package/dist/esm/lzma/stream/transforms.d.ts +38 -0
  119. package/dist/esm/lzma/stream/transforms.js +150 -0
  120. package/dist/esm/lzma/stream/transforms.js.map +1 -0
  121. package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +30 -0
  122. package/dist/esm/lzma/sync/Lzma2Decoder.js +115 -0
  123. package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
  124. package/dist/esm/lzma/sync/LzmaDecoder.d.ts +82 -0
  125. package/dist/esm/lzma/sync/LzmaDecoder.js +403 -0
  126. package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
  127. package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
  128. package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
  129. package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
  130. package/dist/esm/lzma/types.d.ts +110 -0
  131. package/dist/esm/lzma/types.js +154 -0
  132. package/dist/esm/lzma/types.js.map +1 -0
  133. package/dist/esm/nextEntry.js +24 -26
  134. package/dist/esm/nextEntry.js.map +1 -1
  135. package/dist/esm/sevenz/ArchiveSource.d.ts +16 -0
  136. package/dist/esm/sevenz/ArchiveSource.js +70 -1
  137. package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
  138. package/dist/esm/sevenz/FolderStreamSplitter.d.ts +101 -0
  139. package/dist/esm/sevenz/FolderStreamSplitter.js +207 -0
  140. package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -0
  141. package/dist/esm/sevenz/SevenZipParser.d.ts +71 -10
  142. package/dist/esm/sevenz/SevenZipParser.js +414 -198
  143. package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
  144. package/dist/esm/sevenz/codecs/BZip2.js +2 -1
  145. package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
  146. package/dist/esm/sevenz/codecs/Bcj.d.ts +5 -4
  147. package/dist/esm/sevenz/codecs/Bcj.js +106 -6
  148. package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
  149. package/dist/esm/sevenz/codecs/BcjArm.d.ts +5 -4
  150. package/dist/esm/sevenz/codecs/BcjArm.js +55 -7
  151. package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
  152. package/dist/esm/sevenz/codecs/Copy.d.ts +2 -4
  153. package/dist/esm/sevenz/codecs/Copy.js +1 -9
  154. package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
  155. package/dist/esm/sevenz/codecs/Deflate.d.ts +6 -4
  156. package/dist/esm/sevenz/codecs/Deflate.js +9 -7
  157. package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
  158. package/dist/esm/sevenz/codecs/Delta.d.ts +5 -4
  159. package/dist/esm/sevenz/codecs/Delta.js +33 -8
  160. package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
  161. package/dist/esm/sevenz/codecs/Lzma.d.ts +5 -2
  162. package/dist/esm/sevenz/codecs/Lzma.js +17 -24
  163. package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
  164. package/dist/esm/sevenz/codecs/Lzma2.d.ts +9 -3
  165. package/dist/esm/sevenz/codecs/Lzma2.js +15 -196
  166. package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
  167. package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
  168. package/dist/esm/sevenz/codecs/createBufferingDecoder.js +1 -9
  169. package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  170. package/dist/esm/types.d.ts +2 -16
  171. package/dist/esm/types.js.map +1 -1
  172. package/package.json +3 -3
  173. package/assets/lzma-purejs/LICENSE +0 -11
  174. package/assets/lzma-purejs/index.js +0 -19
  175. package/assets/lzma-purejs/lib/LZ/OutWindow.js +0 -78
  176. package/assets/lzma-purejs/lib/LZ.js +0 -6
  177. package/assets/lzma-purejs/lib/LZMA/Base.js +0 -48
  178. package/assets/lzma-purejs/lib/LZMA/Decoder.js +0 -328
  179. package/assets/lzma-purejs/lib/LZMA.js +0 -6
  180. package/assets/lzma-purejs/lib/RangeCoder/BitTreeDecoder.js +0 -41
  181. package/assets/lzma-purejs/lib/RangeCoder/Decoder.js +0 -58
  182. package/assets/lzma-purejs/lib/RangeCoder/Encoder.js +0 -106
  183. package/assets/lzma-purejs/lib/RangeCoder.js +0 -10
  184. package/assets/lzma-purejs/lib/Stream.js +0 -41
  185. package/assets/lzma-purejs/lib/Util.js +0 -114
  186. package/assets/lzma-purejs/lib/makeBuffer.js +0 -25
  187. package/assets/lzma-purejs/package-lock.json +0 -13
  188. package/assets/lzma-purejs/package.json +0 -8
@@ -38,30 +38,144 @@ _export(exports, {
38
38
  }
39
39
  });
40
40
  var _extractbaseiterator = require("extract-base-iterator");
41
- var _onone = /*#__PURE__*/ _interop_require_default(require("on-one"));
42
- var _stream = /*#__PURE__*/ _interop_require_default(require("stream"));
43
41
  var _indexts = require("./codecs/index.js");
42
+ var _FolderStreamSplitterts = require("./FolderStreamSplitter.js");
44
43
  var _constantsts = require("./constants.js");
45
44
  var _headersts = require("./headers.js");
46
45
  var _NumberCodects = require("./NumberCodec.js");
47
46
  var _ArchiveSourcets = require("./ArchiveSource.js");
47
+ function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) {
48
+ try {
49
+ var info = gen[key](arg);
50
+ var value = info.value;
51
+ } catch (error) {
52
+ reject(error);
53
+ return;
54
+ }
55
+ if (info.done) {
56
+ resolve(value);
57
+ } else {
58
+ Promise.resolve(value).then(_next, _throw);
59
+ }
60
+ }
61
+ function _async_to_generator(fn) {
62
+ return function() {
63
+ var self = this, args = arguments;
64
+ return new Promise(function(resolve, reject) {
65
+ var gen = fn.apply(self, args);
66
+ function _next(value) {
67
+ asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value);
68
+ }
69
+ function _throw(err) {
70
+ asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err);
71
+ }
72
+ _next(undefined);
73
+ });
74
+ };
75
+ }
48
76
  function _class_call_check(instance, Constructor) {
49
77
  if (!(instance instanceof Constructor)) {
50
78
  throw new TypeError("Cannot call a class as a function");
51
79
  }
52
80
  }
53
- function _interop_require_default(obj) {
54
- return obj && obj.__esModule ? obj : {
55
- default: obj
56
- };
57
- }
58
- // Use native streams when available, readable-stream only for Node 0.x
59
- var major = +process.versions.node.split('.')[0];
60
- var PassThrough;
61
- if (major > 0) {
62
- PassThrough = _stream.default.PassThrough;
63
- } else {
64
- PassThrough = require('readable-stream').PassThrough;
81
+ function _ts_generator(thisArg, body) {
82
+ var f, y, t, _ = {
83
+ label: 0,
84
+ sent: function() {
85
+ if (t[0] & 1) throw t[1];
86
+ return t[1];
87
+ },
88
+ trys: [],
89
+ ops: []
90
+ }, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype), d = Object.defineProperty;
91
+ return d(g, "next", {
92
+ value: verb(0)
93
+ }), d(g, "throw", {
94
+ value: verb(1)
95
+ }), d(g, "return", {
96
+ value: verb(2)
97
+ }), typeof Symbol === "function" && d(g, Symbol.iterator, {
98
+ value: function() {
99
+ return this;
100
+ }
101
+ }), g;
102
+ function verb(n) {
103
+ return function(v) {
104
+ return step([
105
+ n,
106
+ v
107
+ ]);
108
+ };
109
+ }
110
+ function step(op) {
111
+ if (f) throw new TypeError("Generator is already executing.");
112
+ while(g && (g = 0, op[0] && (_ = 0)), _)try {
113
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
114
+ if (y = 0, t) op = [
115
+ op[0] & 2,
116
+ t.value
117
+ ];
118
+ switch(op[0]){
119
+ case 0:
120
+ case 1:
121
+ t = op;
122
+ break;
123
+ case 4:
124
+ _.label++;
125
+ return {
126
+ value: op[1],
127
+ done: false
128
+ };
129
+ case 5:
130
+ _.label++;
131
+ y = op[1];
132
+ op = [
133
+ 0
134
+ ];
135
+ continue;
136
+ case 7:
137
+ op = _.ops.pop();
138
+ _.trys.pop();
139
+ continue;
140
+ default:
141
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
142
+ _ = 0;
143
+ continue;
144
+ }
145
+ if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
146
+ _.label = op[1];
147
+ break;
148
+ }
149
+ if (op[0] === 6 && _.label < t[1]) {
150
+ _.label = t[1];
151
+ t = op;
152
+ break;
153
+ }
154
+ if (t && _.label < t[2]) {
155
+ _.label = t[2];
156
+ _.ops.push(op);
157
+ break;
158
+ }
159
+ if (t[2]) _.ops.pop();
160
+ _.trys.pop();
161
+ continue;
162
+ }
163
+ op = body.call(thisArg, _);
164
+ } catch (e) {
165
+ op = [
166
+ 6,
167
+ e
168
+ ];
169
+ y = 0;
170
+ } finally{
171
+ f = t = 0;
172
+ }
173
+ if (op[0] & 5) throw op[1];
174
+ return {
175
+ value: op[0] ? op[1] : void 0,
176
+ done: true
177
+ };
178
+ }
65
179
  }
66
180
  var SevenZipParser = /*#__PURE__*/ function() {
67
181
  "use strict";
@@ -78,6 +192,8 @@ var SevenZipParser = /*#__PURE__*/ function() {
78
192
  // Track files per folder and how many have been extracted
79
193
  this.filesPerFolder = {};
80
194
  this.extractedPerFolder = {};
195
+ // Splitter cache for multi-file folder streaming (Phase 2)
196
+ this.folderSplitters = {};
81
197
  this.source = source;
82
198
  }
83
199
  var _proto = SevenZipParser.prototype;
@@ -344,6 +460,15 @@ var SevenZipParser = /*#__PURE__*/ function() {
344
460
  }
345
461
  }
346
462
  }
463
+ // Set _canStream for all entries now that we have complete folder info
464
+ // This must be done after all entries are built because canStreamFolder
465
+ // relies on the folder structure being fully parsed
466
+ for(var i1 = 0; i1 < this.entries.length; i1++){
467
+ var entry1 = this.entries[i1];
468
+ if (entry1._hasStream && entry1._folderIndex >= 0) {
469
+ entry1._canStream = this.canStreamFolder(entry1._folderIndex);
470
+ }
471
+ }
347
472
  };
348
473
  /**
349
474
  * Create an entry from file info
@@ -385,7 +510,8 @@ var SevenZipParser = /*#__PURE__*/ function() {
385
510
  _folderIndex: folderIndex,
386
511
  _streamIndex: 0,
387
512
  _streamIndexInFolder: streamInFolder,
388
- _hasStream: file.hasStream
513
+ _hasStream: file.hasStream,
514
+ _canStream: false
389
515
  };
390
516
  };
391
517
  /**
@@ -397,11 +523,13 @@ var SevenZipParser = /*#__PURE__*/ function() {
397
523
  return this.entries;
398
524
  };
399
525
  /**
400
- * Get a readable stream for an entry's content
526
+ * Get a readable stream for an entry's content.
527
+ * Returns immediately - decompression happens when data is read (proper streaming).
528
+ * Uses true streaming for codecs that support it, buffered for others.
401
529
  */ _proto.getEntryStream = function getEntryStream(entry) {
402
530
  if (!entry._hasStream || entry.type === 'directory') {
403
531
  // Return empty stream for directories and empty files
404
- var emptyStream = new PassThrough();
532
+ var emptyStream = new _extractbaseiterator.PassThrough();
405
533
  emptyStream.end();
406
534
  return emptyStream;
407
535
  }
@@ -421,107 +549,152 @@ var SevenZipParser = /*#__PURE__*/ function() {
421
549
  throw (0, _constantsts.createCodedError)("Unsupported codec: ".concat(codecName), _constantsts.ErrorCode.UNSUPPORTED_CODEC);
422
550
  }
423
551
  }
424
- // Get decompressed data for this folder (with smart caching)
425
- var folderIdx = entry._folderIndex;
426
- var data = this.getDecompressedFolder(folderIdx);
427
- // Calculate file offset within the decompressed block
428
- // For solid archives, multiple files are concatenated in the block
429
- var fileStart = 0;
430
- for(var m = 0; m < entry._streamIndexInFolder; m++){
431
- // Sum sizes of all streams before this one in the folder
432
- var prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
433
- fileStart += this.streamsInfo.unpackSizes[prevStreamGlobalIndex];
434
- }
435
- var fileSize = entry.size;
436
- // Create a PassThrough stream with the file data
437
- var outputStream = new PassThrough();
438
- // Bounds check to prevent "oob" error on older Node versions
439
- if (fileStart + fileSize > data.length) {
440
- throw (0, _constantsts.createCodedError)("File data out of bounds: offset ".concat(fileStart, " + size ").concat(fileSize, " > decompressed length ").concat(data.length), _constantsts.ErrorCode.DECOMPRESSION_FAILED);
441
- }
442
- var fileData = data.slice(fileStart, fileStart + fileSize);
443
- // Verify CRC if present
444
- if (entry._crc !== undefined) {
445
- var actualCRC = (0, _extractbaseiterator.crc32)(fileData);
446
- if (actualCRC !== entry._crc) {
447
- throw (0, _constantsts.createCodedError)("CRC mismatch for ".concat(entry.path, ": expected ").concat(entry._crc.toString(16), ", got ").concat(actualCRC.toString(16)), _constantsts.ErrorCode.CRC_MISMATCH);
448
- }
449
- }
450
- outputStream.end(fileData);
451
- // Track extraction and release cache when all files from this folder are done
452
- this.extractedPerFolder[folderIdx] = (this.extractedPerFolder[folderIdx] || 0) + 1;
453
- if (this.extractedPerFolder[folderIdx] >= this.filesPerFolder[folderIdx]) {
454
- // All files from this folder extracted, release cache
455
- delete this.decompressedCache[folderIdx];
456
- }
457
- return outputStream;
552
+ // Use true streaming for single-file folders that support it.
553
+ // Multi-file folders use buffered approach because streaming requires
554
+ // accessing files in order, which doesn't work with concurrent extraction.
555
+ var filesInFolder = this.filesPerFolder[entry._folderIndex] || 1;
556
+ if (entry._canStream && filesInFolder === 1) {
557
+ return this._getEntryStreamStreaming(entry);
558
+ }
559
+ return this._getEntryStreamBuffered(entry);
458
560
  };
459
561
  /**
460
- * Get a readable stream for an entry's content (callback-based async version)
461
- * Uses streaming decompression for non-blocking I/O
462
- */ _proto.getEntryStreamAsync = function getEntryStreamAsync(entry, callback) {
562
+ * True streaming: data flows through without buffering entire folder.
563
+ * Only used for single-file folders with streamable codecs (BZip2, Deflate, LZMA2).
564
+ */ _proto._getEntryStreamStreaming = function _getEntryStreamStreaming(entry) {
463
565
  var _this = this;
464
- if (!entry._hasStream || entry.type === 'directory') {
465
- // Return empty stream for directories and empty files
466
- var emptyStream = new PassThrough();
467
- emptyStream.end();
468
- callback(null, emptyStream);
469
- return;
470
- }
471
- if (!this.streamsInfo) {
472
- callback((0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER));
473
- return;
474
- }
475
- // Get folder info
476
- var folder = this.streamsInfo.folders[entry._folderIndex];
477
- if (!folder) {
478
- callback((0, _constantsts.createCodedError)('Invalid folder index', _constantsts.ErrorCode.CORRUPT_HEADER));
479
- return;
480
- }
481
- // Check codec support
482
- for(var i = 0; i < folder.coders.length; i++){
483
- var coder = folder.coders[i];
484
- if (!(0, _indexts.isCodecSupported)(coder.id)) {
485
- var codecName = (0, _indexts.getCodecName)(coder.id);
486
- callback((0, _constantsts.createCodedError)("Unsupported codec: ".concat(codecName), _constantsts.ErrorCode.UNSUPPORTED_CODEC));
487
- return;
566
+ var started = false;
567
+ var destroyed = false;
568
+ var folderStream = null;
569
+ var stream = new _extractbaseiterator.PassThrough();
570
+ var originalRead = stream._read.bind(stream);
571
+ stream._read = function(size) {
572
+ if (!started && !destroyed) {
573
+ started = true;
574
+ setTimeout(function() {
575
+ if (destroyed) return;
576
+ try {
577
+ var crcValue = 0;
578
+ var verifyCrc = entry._crc !== undefined;
579
+ folderStream = _this.streamFolder(entry._folderIndex);
580
+ folderStream.output.on('data', function(chunk) {
581
+ if (destroyed) return;
582
+ if (verifyCrc) {
583
+ crcValue = (0, _extractbaseiterator.crc32)(chunk, crcValue);
584
+ }
585
+ if (!stream.write(chunk)) {
586
+ folderStream === null || folderStream === void 0 ? void 0 : folderStream.pause();
587
+ stream.once('drain', function() {
588
+ return folderStream === null || folderStream === void 0 ? void 0 : folderStream.resume();
589
+ });
590
+ }
591
+ });
592
+ folderStream.output.on('end', function() {
593
+ if (destroyed) return;
594
+ if (verifyCrc && crcValue !== entry._crc) {
595
+ var _entry__crc;
596
+ stream.destroy((0, _constantsts.createCodedError)("CRC mismatch for ".concat(entry.path, ": expected ").concat((_entry__crc = entry._crc) === null || _entry__crc === void 0 ? void 0 : _entry__crc.toString(16), ", got ").concat(crcValue.toString(16)), _constantsts.ErrorCode.CRC_MISMATCH));
597
+ return;
598
+ }
599
+ stream.end();
600
+ _this.extractedPerFolder[entry._folderIndex] = (_this.extractedPerFolder[entry._folderIndex] || 0) + 1;
601
+ });
602
+ folderStream.output.on('error', function(err) {
603
+ if (!destroyed) stream.destroy(err);
604
+ });
605
+ } catch (err) {
606
+ if (!destroyed) {
607
+ stream.destroy(err);
608
+ }
609
+ }
610
+ }, 0);
488
611
  }
612
+ return originalRead(size);
613
+ };
614
+ // Override destroy to clean up folder stream
615
+ // IMPORTANT: Emit error synchronously BEFORE calling original destroy.
616
+ // On older Node, destroy() emits 'finish' and 'end' before 'error',
617
+ // which causes piped streams to complete successfully before the error fires.
618
+ var streamWithDestroy = stream;
619
+ var originalDestroy = typeof streamWithDestroy.destroy === 'function' ? streamWithDestroy.destroy.bind(stream) : null;
620
+ streamWithDestroy.destroy = function(err) {
621
+ destroyed = true;
622
+ if (err) stream.emit('error', err);
623
+ if (folderStream) folderStream.destroy();
624
+ if (originalDestroy) return originalDestroy();
625
+ return stream;
626
+ };
627
+ return stream;
628
+ };
629
+ /**
630
+ * Buffered extraction: decompress entire folder, slice out file.
631
+ * Used for codecs that don't support incremental streaming (LZMA1, BCJ2).
632
+ */ _proto._getEntryStreamBuffered = function _getEntryStreamBuffered(entry) {
633
+ var _this = this;
634
+ if (!this.streamsInfo) {
635
+ throw (0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER);
489
636
  }
490
- // Get decompressed data for this folder using async method
491
- var folderIdx = entry._folderIndex;
492
637
  var streamsInfo = this.streamsInfo;
493
- this.getDecompressedFolderAsync(folderIdx, function(err, data) {
494
- if (err) return callback(err);
495
- if (!data) return callback(new Error('No data returned from decompression'));
496
- // Calculate file offset within the decompressed block
497
- var fileStart = 0;
498
- for(var m = 0; m < entry._streamIndexInFolder; m++){
499
- var prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
500
- fileStart += streamsInfo.unpackSizes[prevStreamGlobalIndex];
501
- }
502
- var fileSize = entry.size;
503
- // Bounds check
504
- if (fileStart + fileSize > data.length) {
505
- return callback((0, _constantsts.createCodedError)("File data out of bounds: offset ".concat(fileStart, " + size ").concat(fileSize, " > decompressed length ").concat(data.length), _constantsts.ErrorCode.DECOMPRESSION_FAILED));
506
- }
507
- // Create a PassThrough stream with the file data
508
- var outputStream = new PassThrough();
509
- var fileData = data.slice(fileStart, fileStart + fileSize);
510
- // Verify CRC if present
511
- if (entry._crc !== undefined) {
512
- var actualCRC = (0, _extractbaseiterator.crc32)(fileData);
513
- if (actualCRC !== entry._crc) {
514
- return callback((0, _constantsts.createCodedError)("CRC mismatch for ".concat(entry.path, ": expected ").concat(entry._crc.toString(16), ", got ").concat(actualCRC.toString(16)), _constantsts.ErrorCode.CRC_MISMATCH));
515
- }
516
- }
517
- outputStream.end(fileData);
518
- // Track extraction and release cache when all files from this folder are done
519
- _this.extractedPerFolder[folderIdx] = (_this.extractedPerFolder[folderIdx] || 0) + 1;
520
- if (_this.extractedPerFolder[folderIdx] >= _this.filesPerFolder[folderIdx]) {
521
- delete _this.decompressedCache[folderIdx];
638
+ var folderIdx = entry._folderIndex;
639
+ var started = false;
640
+ var destroyed = false;
641
+ var stream = new _extractbaseiterator.PassThrough();
642
+ var originalRead = stream._read.bind(stream);
643
+ stream._read = function(size) {
644
+ if (!started && !destroyed) {
645
+ started = true;
646
+ setTimeout(function() {
647
+ if (destroyed) return;
648
+ try {
649
+ var data = _this.getDecompressedFolder(folderIdx);
650
+ var fileStart = 0;
651
+ for(var m = 0; m < entry._streamIndexInFolder; m++){
652
+ var prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
653
+ fileStart += streamsInfo.unpackSizes[prevStreamGlobalIndex];
654
+ }
655
+ var fileSize = entry.size;
656
+ if (fileStart + fileSize > data.length) {
657
+ stream.destroy((0, _constantsts.createCodedError)("File data out of bounds: offset ".concat(fileStart, " + size ").concat(fileSize, " > decompressed length ").concat(data.length), _constantsts.ErrorCode.DECOMPRESSION_FAILED));
658
+ return;
659
+ }
660
+ var fileData = data.slice(fileStart, fileStart + fileSize);
661
+ if (entry._crc !== undefined) {
662
+ var actualCRC = (0, _extractbaseiterator.crc32)(fileData);
663
+ if (actualCRC !== entry._crc) {
664
+ stream.destroy((0, _constantsts.createCodedError)("CRC mismatch for ".concat(entry.path, ": expected ").concat(entry._crc.toString(16), ", got ").concat(actualCRC.toString(16)), _constantsts.ErrorCode.CRC_MISMATCH));
665
+ return;
666
+ }
667
+ }
668
+ _this.extractedPerFolder[folderIdx] = (_this.extractedPerFolder[folderIdx] || 0) + 1;
669
+ if (_this.extractedPerFolder[folderIdx] >= _this.filesPerFolder[folderIdx]) {
670
+ delete _this.decompressedCache[folderIdx];
671
+ }
672
+ if (!destroyed) {
673
+ stream.push(fileData);
674
+ stream.push(null);
675
+ }
676
+ } catch (err) {
677
+ if (!destroyed) {
678
+ stream.destroy(err);
679
+ }
680
+ }
681
+ }, 0);
522
682
  }
523
- callback(null, outputStream);
524
- });
683
+ return originalRead(size);
684
+ };
685
+ // Override destroy to set destroyed flag
686
+ // IMPORTANT: Emit error synchronously BEFORE calling original destroy.
687
+ // On older Node, destroy() emits 'finish' and 'end' before 'error',
688
+ // which causes piped streams to complete successfully before the error fires.
689
+ var streamWithDestroy = stream;
690
+ var originalDestroy = typeof streamWithDestroy.destroy === 'function' ? streamWithDestroy.destroy.bind(stream) : null;
691
+ streamWithDestroy.destroy = function(err) {
692
+ destroyed = true;
693
+ if (err) stream.emit('error', err);
694
+ if (originalDestroy) return originalDestroy();
695
+ return stream;
696
+ };
697
+ return stream;
525
698
  };
526
699
  /**
527
700
  * Check if a folder uses BCJ2 codec
@@ -589,98 +762,6 @@ var SevenZipParser = /*#__PURE__*/ function() {
589
762
  return data2;
590
763
  };
591
764
  /**
592
- * Get decompressed data for a folder using streaming (callback-based async)
593
- * Uses createDecoder() streams for non-blocking decompression
594
- */ _proto.getDecompressedFolderAsync = function getDecompressedFolderAsync(folderIndex, callback) {
595
- var self = this;
596
- // Check cache first
597
- if (this.decompressedCache[folderIndex]) return callback(null, this.decompressedCache[folderIndex]);
598
- if (!this.streamsInfo) {
599
- callback((0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER));
600
- return;
601
- }
602
- var folder = this.streamsInfo.folders[folderIndex];
603
- // Check how many files remain in this folder
604
- var filesInFolder = this.filesPerFolder[folderIndex] || 1;
605
- var extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
606
- var remainingFiles = filesInFolder - extractedFromFolder;
607
- var shouldCache = remainingFiles > 1;
608
- // BCJ2 requires special handling - use sync version for now
609
- // TODO: Add async BCJ2 support
610
- if (this.folderHasBcj2(folder)) {
611
- try {
612
- var data = this.decompressBcj2Folder(folderIndex);
613
- if (shouldCache) {
614
- this.decompressedCache[folderIndex] = data;
615
- }
616
- callback(null, data);
617
- } catch (err) {
618
- callback(err);
619
- }
620
- return;
621
- }
622
- // Calculate packed data position
623
- var packPos = _constantsts.SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
624
- // Find which pack stream this folder uses
625
- var packStreamIndex = 0;
626
- for(var j = 0; j < folderIndex; j++){
627
- packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
628
- }
629
- // Calculate position of this pack stream
630
- for(var k = 0; k < packStreamIndex; k++){
631
- packPos += this.streamsInfo.packSizes[k];
632
- }
633
- var packSize = this.streamsInfo.packSizes[packStreamIndex];
634
- // Read packed data
635
- var packedData = this.source.read(packPos, packSize);
636
- // Create decoder stream chain and decompress
637
- var coders = folder.coders;
638
- var unpackSizes = folder.unpackSizes;
639
- // Helper to decompress through a single codec stream
640
- function decompressWithStream(input, coderIdx, cb) {
641
- var coderInfo = coders[coderIdx];
642
- var codec = (0, _indexts.getCodec)(coderInfo.id);
643
- var decoder = codec.createDecoder(coderInfo.properties, unpackSizes[coderIdx]);
644
- var chunks = [];
645
- var errorOccurred = false;
646
- decoder.on('data', function(chunk) {
647
- chunks.push(chunk);
648
- });
649
- (0, _onone.default)(decoder, [
650
- 'error',
651
- 'end',
652
- 'close',
653
- 'finish'
654
- ], function(err) {
655
- if (errorOccurred) return;
656
- if (err) {
657
- errorOccurred = true;
658
- return cb(err);
659
- }
660
- cb(null, Buffer.concat(chunks));
661
- });
662
- // Write input data to decoder and signal end
663
- decoder.end(input);
664
- }
665
- // Chain decompression through all codecs
666
- function decompressChain(input, idx) {
667
- if (idx >= coders.length) {
668
- // All done - cache and return
669
- if (shouldCache) {
670
- self.decompressedCache[folderIndex] = input;
671
- }
672
- callback(null, input);
673
- return;
674
- }
675
- decompressWithStream(input, idx, function(err, output) {
676
- if (err) return callback(err);
677
- decompressChain(output, idx + 1);
678
- });
679
- }
680
- // Start the chain
681
- decompressChain(packedData, 0);
682
- };
683
- /**
684
765
  * Decompress a BCJ2 folder with multi-stream handling
685
766
  * BCJ2 uses 4 input streams: main, call, jump, range coder
686
767
  */ _proto.decompressBcj2Folder = function decompressBcj2Folder(folderIndex) {
@@ -870,6 +951,296 @@ var SevenZipParser = /*#__PURE__*/ function() {
870
951
  this.source.close();
871
952
  }
872
953
  };
954
+ // ============================================================
955
+ // STREAMING METHODS (Phase 1+)
956
+ // ============================================================
957
+ /**
958
+ * Check if a codec supports true streaming decompression.
959
+ *
960
+ * Only codecs that process data incrementally (not buffering entire input) qualify.
961
+ * @param codecId - The codec ID as an array of bytes
962
+ * @returns true if the codec can stream
963
+ */ _proto.codecSupportsStreaming = function codecSupportsStreaming(codecId) {
964
+ // Convert to string key for comparison
965
+ var key = codecId.map(function(b) {
966
+ return b.toString(16).toUpperCase();
967
+ }).join('-');
968
+ // BZip2 - unbzip2-stream processes blocks incrementally
969
+ if (key === '4-2-2') return true;
970
+ // Copy/Store - PassThrough, obviously streams
971
+ if (key === '0') return true;
972
+ // Deflate - now uses zlib.createInflateRaw() which streams
973
+ if (key === '4-1-8') return true;
974
+ // Delta - now uses streaming Transform (Phase 2.5)
975
+ if (key === '3') return true;
976
+ // BCJ x86 - now uses streaming Transform (Phase 3.5)
977
+ if (key === '3-3-1-3') return true;
978
+ // BCJ ARM - now uses streaming Transform (Phase 3.5)
979
+ if (key === '3-3-1-5') return true;
980
+ // LZMA2 - now uses streaming Transform (Phase 5)
981
+ if (key === '21') return true;
982
+ // LZMA - still buffer-based (TODO: Phase 5 continuation)
983
+ // Other BCJ variants (ARM64, ARMT, IA64, PPC, SPARC) - still buffer-based
984
+ // BCJ2 - multi-stream architecture, never streamable
985
+ return false;
986
+ };
987
+ /**
988
+ * Check if a folder can be streamed (vs buffered).
989
+ *
990
+ * Streaming is possible when ALL codecs in the chain support streaming.
991
+ * BCJ2 folders are never streamable due to their 4-stream architecture.
992
+ *
993
+ * @param folderIndex - Index of the folder to check
994
+ * @returns true if the folder can be streamed
995
+ */ _proto.canStreamFolder = function canStreamFolder(folderIndex) {
996
+ if (!this.streamsInfo) return false;
997
+ var folder = this.streamsInfo.folders[folderIndex];
998
+ if (!folder) return false;
999
+ // BCJ2 requires special multi-stream handling - not streamable
1000
+ if (this.folderHasBcj2(folder)) {
1001
+ return false;
1002
+ }
1003
+ // Check if ALL codecs in chain support streaming
1004
+ for(var i = 0; i < folder.coders.length; i++){
1005
+ if (!this.codecSupportsStreaming(folder.coders[i].id)) {
1006
+ return false;
1007
+ }
1008
+ }
1009
+ return true;
1010
+ };
1011
+ /**
1012
+ * Stream a folder's decompression.
1013
+ *
1014
+ * Creates a pipeline: packed data → codec decoders → output stream
1015
+ *
1016
+ * @param folderIndex - Index of folder to decompress
1017
+ * @returns Object with output stream and control methods
1018
+ */ _proto.streamFolder = function streamFolder(folderIndex) {
1019
+ if (!this.streamsInfo) {
1020
+ throw (0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER);
1021
+ }
1022
+ if (!this.canStreamFolder(folderIndex)) {
1023
+ throw (0, _constantsts.createCodedError)('Folder does not support streaming', _constantsts.ErrorCode.UNSUPPORTED_CODEC);
1024
+ }
1025
+ var folder = this.streamsInfo.folders[folderIndex];
1026
+ // Calculate packed data position
1027
+ var packPos = _constantsts.SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
1028
+ // Find which pack stream this folder uses
1029
+ var packStreamIndex = 0;
1030
+ for(var j = 0; j < folderIndex; j++){
1031
+ packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
1032
+ }
1033
+ // Calculate position of this pack stream
1034
+ for(var k = 0; k < packStreamIndex; k++){
1035
+ packPos += this.streamsInfo.packSizes[k];
1036
+ }
1037
+ var packSize = this.streamsInfo.packSizes[packStreamIndex];
1038
+ // Create readable stream from packed data
1039
+ var packedStream = this.source.createReadStream(packPos, packSize);
1040
+ // Build codec pipeline
1041
+ var stream = packedStream;
1042
+ var decoders = [];
1043
+ for(var i = 0; i < folder.coders.length; i++){
1044
+ var coderInfo = folder.coders[i];
1045
+ var codec = (0, _indexts.getCodec)(coderInfo.id);
1046
+ var unpackSize = folder.unpackSizes[i];
1047
+ var decoder = codec.createDecoder(coderInfo.properties, unpackSize);
1048
+ decoders.push(decoder);
1049
+ stream = stream.pipe(decoder);
1050
+ }
1051
+ return {
1052
+ output: stream,
1053
+ pause: function() {
1054
+ return packedStream.pause();
1055
+ },
1056
+ resume: function() {
1057
+ return packedStream.resume();
1058
+ },
1059
+ destroy: function(err) {
1060
+ // Check for destroy method existence (not available in Node 4 and earlier)
1061
+ var ps = packedStream;
1062
+ if (typeof ps.destroy === 'function') ps.destroy(err);
1063
+ for(var i = 0; i < decoders.length; i++){
1064
+ var d = decoders[i];
1065
+ if (typeof d.destroy === 'function') d.destroy(err);
1066
+ }
1067
+ }
1068
+ };
1069
+ };
1070
+ /**
1071
+ * Get a streaming entry stream (Promise-based API).
1072
+ *
1073
+ * For streamable folders: Returns a true streaming decompression
1074
+ * For non-streamable folders: Falls back to buffered extraction
1075
+ *
1076
+ * @param entry - The entry to get stream for
1077
+ * @returns Promise resolving to readable stream
1078
+ */ _proto.getEntryStreamStreaming = function getEntryStreamStreaming(entry) {
1079
+ return _async_to_generator(function() {
1080
+ var emptyStream, folderIndex, filesInFolder;
1081
+ return _ts_generator(this, function(_state) {
1082
+ if (!entry._hasStream || entry.type === 'directory') {
1083
+ emptyStream = new _extractbaseiterator.PassThrough();
1084
+ emptyStream.end();
1085
+ return [
1086
+ 2,
1087
+ emptyStream
1088
+ ];
1089
+ }
1090
+ folderIndex = entry._folderIndex;
1091
+ // Fall back to buffered if not streamable
1092
+ if (!this.canStreamFolder(folderIndex)) {
1093
+ return [
1094
+ 2,
1095
+ this.getEntryStream(entry)
1096
+ ];
1097
+ }
1098
+ filesInFolder = this.filesPerFolder[folderIndex] || 1;
1099
+ if (filesInFolder === 1) {
1100
+ // Single file - direct streaming
1101
+ return [
1102
+ 2,
1103
+ this.getEntryStreamDirect(entry)
1104
+ ];
1105
+ }
1106
+ // Multi-file folders use FolderStreamSplitter (Phase 2)
1107
+ return [
1108
+ 2,
1109
+ this.getEntryStreamFromSplitter(entry)
1110
+ ];
1111
+ });
1112
+ }).call(this);
1113
+ };
1114
+ /**
1115
+ * Direct streaming for single-file folders.
1116
+ * Pipes folder decompression directly to output with CRC verification.
1117
+ */ _proto.getEntryStreamDirect = function getEntryStreamDirect(entry) {
1118
+ var _this = this;
1119
+ return new Promise(function(resolve, reject) {
1120
+ var outputStream = new _extractbaseiterator.PassThrough();
1121
+ var crcValue = 0;
1122
+ var verifyCrc = entry._crc !== undefined;
1123
+ try {
1124
+ var folderStream = _this.streamFolder(entry._folderIndex);
1125
+ folderStream.output.on('data', function(chunk) {
1126
+ if (verifyCrc) {
1127
+ crcValue = (0, _extractbaseiterator.crc32)(chunk, crcValue);
1128
+ }
1129
+ // Handle backpressure
1130
+ if (!outputStream.write(chunk)) {
1131
+ folderStream.pause();
1132
+ outputStream.once('drain', function() {
1133
+ return folderStream.resume();
1134
+ });
1135
+ }
1136
+ });
1137
+ folderStream.output.on('end', function() {
1138
+ // Verify CRC
1139
+ if (verifyCrc && crcValue !== entry._crc) {
1140
+ var _entry__crc;
1141
+ var _$err = (0, _constantsts.createCodedError)("CRC mismatch for ".concat(entry.path, ": expected ").concat((_entry__crc = entry._crc) === null || _entry__crc === void 0 ? void 0 : _entry__crc.toString(16), ", got ").concat(crcValue.toString(16)), _constantsts.ErrorCode.CRC_MISMATCH);
1142
+ outputStream.destroy(_$err);
1143
+ return;
1144
+ }
1145
+ outputStream.end();
1146
+ // Track extraction
1147
+ _this.extractedPerFolder[entry._folderIndex] = (_this.extractedPerFolder[entry._folderIndex] || 0) + 1;
1148
+ });
1149
+ folderStream.output.on('error', function(err) {
1150
+ outputStream.destroy(err);
1151
+ });
1152
+ resolve(outputStream);
1153
+ } catch (err) {
1154
+ reject(err);
1155
+ }
1156
+ });
1157
+ };
1158
+ /**
1159
+ * Get stream from folder splitter (for multi-file folders).
1160
+ * Creates splitter on first access, reuses for subsequent files in same folder.
1161
+ */ _proto.getEntryStreamFromSplitter = function getEntryStreamFromSplitter(entry) {
1162
+ var _this = this;
1163
+ return new Promise(function(resolve, reject) {
1164
+ var folderIndex = entry._folderIndex;
1165
+ // Get or create splitter for this folder
1166
+ var splitter = _this.folderSplitters[folderIndex];
1167
+ if (!splitter) {
1168
+ // Create new splitter with file sizes and CRCs
1169
+ var folderInfo = _this.getFolderFileInfo(folderIndex);
1170
+ splitter = new _FolderStreamSplitterts.FolderStreamSplitter({
1171
+ fileSizes: folderInfo.fileSizes,
1172
+ verifyCrc: true,
1173
+ expectedCrcs: folderInfo.expectedCrcs
1174
+ });
1175
+ _this.folderSplitters[folderIndex] = splitter;
1176
+ // Start streaming the folder
1177
+ var folderStream;
1178
+ try {
1179
+ folderStream = _this.streamFolder(folderIndex);
1180
+ } catch (err) {
1181
+ delete _this.folderSplitters[folderIndex];
1182
+ reject(err);
1183
+ return;
1184
+ }
1185
+ folderStream.output.on('data', function(chunk) {
1186
+ // Handle backpressure from splitter
1187
+ if (!(splitter === null || splitter === void 0 ? void 0 : splitter.write(chunk))) {
1188
+ folderStream.pause();
1189
+ splitter === null || splitter === void 0 ? void 0 : splitter.onDrain(function() {
1190
+ folderStream.resume();
1191
+ });
1192
+ }
1193
+ });
1194
+ folderStream.output.on('end', function() {
1195
+ splitter === null || splitter === void 0 ? void 0 : splitter.end();
1196
+ delete _this.folderSplitters[folderIndex];
1197
+ });
1198
+ folderStream.output.on('error', function(_err) {
1199
+ splitter === null || splitter === void 0 ? void 0 : splitter.end();
1200
+ delete _this.folderSplitters[folderIndex];
1201
+ });
1202
+ }
1203
+ // Get this entry's stream from splitter
1204
+ try {
1205
+ var fileStream = splitter.getFileStream(entry._streamIndexInFolder);
1206
+ // Track extraction when stream ends
1207
+ fileStream.on('end', function() {
1208
+ _this.extractedPerFolder[folderIndex] = (_this.extractedPerFolder[folderIndex] || 0) + 1;
1209
+ });
1210
+ resolve(fileStream);
1211
+ } catch (err) {
1212
+ reject(err);
1213
+ }
1214
+ });
1215
+ };
1216
+ /**
1217
+ * Get file sizes and CRCs for all files in a folder (in stream order).
1218
+ * Used by FolderStreamSplitter to know file boundaries.
1219
+ */ _proto.getFolderFileInfo = function getFolderFileInfo(folderIndex) {
1220
+ var fileSizes = [];
1221
+ var expectedCrcs = [];
1222
+ // Collect entries in this folder, sorted by stream index
1223
+ var folderEntries = [];
1224
+ for(var i = 0; i < this.entries.length; i++){
1225
+ var e = this.entries[i];
1226
+ if (e._folderIndex === folderIndex && e._hasStream) {
1227
+ folderEntries.push(e);
1228
+ }
1229
+ }
1230
+ // Sort by stream index within folder
1231
+ folderEntries.sort(function(a, b) {
1232
+ return a._streamIndexInFolder - b._streamIndexInFolder;
1233
+ });
1234
+ for(var i1 = 0; i1 < folderEntries.length; i1++){
1235
+ var entry = folderEntries[i1];
1236
+ fileSizes.push(entry.size);
1237
+ expectedCrcs.push(entry._crc);
1238
+ }
1239
+ return {
1240
+ fileSizes: fileSizes,
1241
+ expectedCrcs: expectedCrcs
1242
+ };
1243
+ };
873
1244
  return SevenZipParser;
874
1245
  }();
875
1246
  /**