7z-iterator 1.1.2 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (184) hide show
  1. package/dist/cjs/FileEntry.d.cts +12 -4
  2. package/dist/cjs/FileEntry.d.ts +12 -4
  3. package/dist/cjs/FileEntry.js +52 -24
  4. package/dist/cjs/FileEntry.js.map +1 -1
  5. package/dist/cjs/SevenZipIterator.d.cts +25 -2
  6. package/dist/cjs/SevenZipIterator.d.ts +25 -2
  7. package/dist/cjs/SevenZipIterator.js +68 -21
  8. package/dist/cjs/SevenZipIterator.js.map +1 -1
  9. package/dist/cjs/index.d.cts +0 -2
  10. package/dist/cjs/index.d.ts +0 -2
  11. package/dist/cjs/index.js +3 -12
  12. package/dist/cjs/index.js.map +1 -1
  13. package/dist/cjs/lib/streamToSource.d.cts +8 -11
  14. package/dist/cjs/lib/streamToSource.d.ts +8 -11
  15. package/dist/cjs/lib/streamToSource.js +21 -67
  16. package/dist/cjs/lib/streamToSource.js.map +1 -1
  17. package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
  18. package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
  19. package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
  20. package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
  21. package/dist/cjs/lzma/index.d.cts +13 -0
  22. package/dist/cjs/lzma/index.d.ts +13 -0
  23. package/dist/cjs/lzma/index.js +63 -0
  24. package/dist/cjs/lzma/index.js.map +1 -0
  25. package/dist/cjs/lzma/stream/transforms.d.cts +38 -0
  26. package/dist/cjs/lzma/stream/transforms.d.ts +38 -0
  27. package/dist/cjs/lzma/stream/transforms.js +149 -0
  28. package/dist/cjs/lzma/stream/transforms.js.map +1 -0
  29. package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +30 -0
  30. package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +30 -0
  31. package/dist/cjs/lzma/sync/Lzma2Decoder.js +135 -0
  32. package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
  33. package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +82 -0
  34. package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +82 -0
  35. package/dist/cjs/lzma/sync/LzmaDecoder.js +440 -0
  36. package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
  37. package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
  38. package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
  39. package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
  40. package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
  41. package/dist/cjs/lzma/types.d.cts +110 -0
  42. package/dist/cjs/lzma/types.d.ts +110 -0
  43. package/dist/cjs/lzma/types.js +264 -0
  44. package/dist/cjs/lzma/types.js.map +1 -0
  45. package/dist/cjs/nextEntry.js +24 -26
  46. package/dist/cjs/nextEntry.js.map +1 -1
  47. package/dist/cjs/sevenz/ArchiveSource.d.cts +16 -0
  48. package/dist/cjs/sevenz/ArchiveSource.d.ts +16 -0
  49. package/dist/cjs/sevenz/ArchiveSource.js +69 -0
  50. package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
  51. package/dist/cjs/sevenz/FolderStreamSplitter.d.cts +101 -0
  52. package/dist/cjs/sevenz/FolderStreamSplitter.d.ts +101 -0
  53. package/dist/cjs/sevenz/FolderStreamSplitter.js +229 -0
  54. package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -0
  55. package/dist/cjs/sevenz/SevenZipParser.d.cts +71 -10
  56. package/dist/cjs/sevenz/SevenZipParser.d.ts +71 -10
  57. package/dist/cjs/sevenz/SevenZipParser.js +574 -203
  58. package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
  59. package/dist/cjs/sevenz/codecs/BZip2.js +2 -1
  60. package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
  61. package/dist/cjs/sevenz/codecs/Bcj.d.cts +5 -4
  62. package/dist/cjs/sevenz/codecs/Bcj.d.ts +5 -4
  63. package/dist/cjs/sevenz/codecs/Bcj.js +102 -8
  64. package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
  65. package/dist/cjs/sevenz/codecs/BcjArm.d.cts +5 -4
  66. package/dist/cjs/sevenz/codecs/BcjArm.d.ts +5 -4
  67. package/dist/cjs/sevenz/codecs/BcjArm.js +51 -9
  68. package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
  69. package/dist/cjs/sevenz/codecs/Copy.d.cts +2 -4
  70. package/dist/cjs/sevenz/codecs/Copy.d.ts +2 -4
  71. package/dist/cjs/sevenz/codecs/Copy.js +2 -15
  72. package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
  73. package/dist/cjs/sevenz/codecs/Deflate.d.cts +6 -4
  74. package/dist/cjs/sevenz/codecs/Deflate.d.ts +6 -4
  75. package/dist/cjs/sevenz/codecs/Deflate.js +4 -9
  76. package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
  77. package/dist/cjs/sevenz/codecs/Delta.d.cts +5 -4
  78. package/dist/cjs/sevenz/codecs/Delta.d.ts +5 -4
  79. package/dist/cjs/sevenz/codecs/Delta.js +29 -10
  80. package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
  81. package/dist/cjs/sevenz/codecs/Lzma.d.cts +5 -2
  82. package/dist/cjs/sevenz/codecs/Lzma.d.ts +5 -2
  83. package/dist/cjs/sevenz/codecs/Lzma.js +13 -28
  84. package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
  85. package/dist/cjs/sevenz/codecs/Lzma2.d.cts +9 -3
  86. package/dist/cjs/sevenz/codecs/Lzma2.d.ts +9 -3
  87. package/dist/cjs/sevenz/codecs/Lzma2.js +17 -198
  88. package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
  89. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -2
  90. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
  91. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +2 -15
  92. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  93. package/dist/cjs/types.d.cts +2 -16
  94. package/dist/cjs/types.d.ts +2 -16
  95. package/dist/cjs/types.js.map +1 -1
  96. package/dist/esm/FileEntry.d.ts +12 -4
  97. package/dist/esm/FileEntry.js +52 -26
  98. package/dist/esm/FileEntry.js.map +1 -1
  99. package/dist/esm/SevenZipIterator.d.ts +25 -2
  100. package/dist/esm/SevenZipIterator.js +69 -22
  101. package/dist/esm/SevenZipIterator.js.map +1 -1
  102. package/dist/esm/index.d.ts +0 -2
  103. package/dist/esm/index.js +0 -1
  104. package/dist/esm/index.js.map +1 -1
  105. package/dist/esm/lib/streamToSource.d.ts +8 -11
  106. package/dist/esm/lib/streamToSource.js +22 -68
  107. package/dist/esm/lib/streamToSource.js.map +1 -1
  108. package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
  109. package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
  110. package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
  111. package/dist/esm/lzma/index.d.ts +13 -0
  112. package/dist/esm/lzma/index.js +15 -0
  113. package/dist/esm/lzma/index.js.map +1 -0
  114. package/dist/esm/lzma/stream/transforms.d.ts +38 -0
  115. package/dist/esm/lzma/stream/transforms.js +150 -0
  116. package/dist/esm/lzma/stream/transforms.js.map +1 -0
  117. package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +30 -0
  118. package/dist/esm/lzma/sync/Lzma2Decoder.js +115 -0
  119. package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
  120. package/dist/esm/lzma/sync/LzmaDecoder.d.ts +82 -0
  121. package/dist/esm/lzma/sync/LzmaDecoder.js +403 -0
  122. package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
  123. package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
  124. package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
  125. package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
  126. package/dist/esm/lzma/types.d.ts +110 -0
  127. package/dist/esm/lzma/types.js +154 -0
  128. package/dist/esm/lzma/types.js.map +1 -0
  129. package/dist/esm/nextEntry.js +24 -26
  130. package/dist/esm/nextEntry.js.map +1 -1
  131. package/dist/esm/sevenz/ArchiveSource.d.ts +16 -0
  132. package/dist/esm/sevenz/ArchiveSource.js +70 -1
  133. package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
  134. package/dist/esm/sevenz/FolderStreamSplitter.d.ts +101 -0
  135. package/dist/esm/sevenz/FolderStreamSplitter.js +207 -0
  136. package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -0
  137. package/dist/esm/sevenz/SevenZipParser.d.ts +71 -10
  138. package/dist/esm/sevenz/SevenZipParser.js +414 -198
  139. package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
  140. package/dist/esm/sevenz/codecs/BZip2.js +2 -1
  141. package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
  142. package/dist/esm/sevenz/codecs/Bcj.d.ts +5 -4
  143. package/dist/esm/sevenz/codecs/Bcj.js +106 -6
  144. package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
  145. package/dist/esm/sevenz/codecs/BcjArm.d.ts +5 -4
  146. package/dist/esm/sevenz/codecs/BcjArm.js +55 -7
  147. package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
  148. package/dist/esm/sevenz/codecs/Copy.d.ts +2 -4
  149. package/dist/esm/sevenz/codecs/Copy.js +1 -9
  150. package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
  151. package/dist/esm/sevenz/codecs/Deflate.d.ts +6 -4
  152. package/dist/esm/sevenz/codecs/Deflate.js +9 -7
  153. package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
  154. package/dist/esm/sevenz/codecs/Delta.d.ts +5 -4
  155. package/dist/esm/sevenz/codecs/Delta.js +33 -8
  156. package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
  157. package/dist/esm/sevenz/codecs/Lzma.d.ts +5 -2
  158. package/dist/esm/sevenz/codecs/Lzma.js +17 -24
  159. package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
  160. package/dist/esm/sevenz/codecs/Lzma2.d.ts +9 -3
  161. package/dist/esm/sevenz/codecs/Lzma2.js +15 -196
  162. package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
  163. package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
  164. package/dist/esm/sevenz/codecs/createBufferingDecoder.js +1 -9
  165. package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  166. package/dist/esm/types.d.ts +2 -16
  167. package/dist/esm/types.js.map +1 -1
  168. package/package.json +3 -3
  169. package/assets/lzma-purejs/LICENSE +0 -11
  170. package/assets/lzma-purejs/index.js +0 -19
  171. package/assets/lzma-purejs/lib/LZ/OutWindow.js +0 -78
  172. package/assets/lzma-purejs/lib/LZ.js +0 -6
  173. package/assets/lzma-purejs/lib/LZMA/Base.js +0 -48
  174. package/assets/lzma-purejs/lib/LZMA/Decoder.js +0 -328
  175. package/assets/lzma-purejs/lib/LZMA.js +0 -6
  176. package/assets/lzma-purejs/lib/RangeCoder/BitTreeDecoder.js +0 -41
  177. package/assets/lzma-purejs/lib/RangeCoder/Decoder.js +0 -58
  178. package/assets/lzma-purejs/lib/RangeCoder/Encoder.js +0 -106
  179. package/assets/lzma-purejs/lib/RangeCoder.js +0 -10
  180. package/assets/lzma-purejs/lib/Stream.js +0 -41
  181. package/assets/lzma-purejs/lib/Util.js +0 -114
  182. package/assets/lzma-purejs/lib/makeBuffer.js +0 -25
  183. package/assets/lzma-purejs/package-lock.json +0 -13
  184. package/assets/lzma-purejs/package.json +0 -8
@@ -1,3 +1,13 @@
1
+ // LZMA2 codec using TypeScript LZMA decoder
2
+ //
3
+ // LZMA2 format specification:
4
+ // https://github.com/ulikunitz/xz/blob/master/doc/LZMA2.md
5
+ //
6
+ // Control byte values:
7
+ // 0x00 = End of stream
8
+ // 0x01 = Uncompressed chunk, dictionary reset
9
+ // 0x02 = Uncompressed chunk, no dictionary reset
10
+ // 0x80-0xFF = LZMA compressed chunk (bits encode reset flags and size)
1
11
  "use strict";
2
12
  Object.defineProperty(exports, "__esModule", {
3
13
  value: true
@@ -16,208 +26,17 @@ _export(exports, {
16
26
  return decodeLzma2;
17
27
  }
18
28
  });
19
- var _module = /*#__PURE__*/ _interop_require_default(require("module"));
20
- var _extractbaseiterator = require("extract-base-iterator");
21
- var _createBufferingDecoderts = /*#__PURE__*/ _interop_require_default(require("./createBufferingDecoder.js"));
22
- var _streamsts = require("./streams.js");
23
- function _interop_require_default(obj) {
24
- return obj && obj.__esModule ? obj : {
25
- default: obj
26
- };
27
- }
28
- var _require = typeof require === 'undefined' ? _module.default.createRequire(require("url").pathToFileURL(__filename).toString()) : require;
29
- // Import vendored lzma-purejs - provides raw LZMA decoder (patched for LZMA2 support)
30
- // Path accounts for build output in dist/esm/sevenz/codecs/
31
- var LZMA = _require('../../../../assets/lzma-purejs').LZMA;
32
- var LzmaDecoder = LZMA.Decoder;
33
- /**
34
- * Decode LZMA2 dictionary size from properties byte
35
- * Properties byte encodes dictionary size as: 2^(dictByte/2 + 12) or similar
36
- *
37
- * Per XZ spec, dictionary sizes are:
38
- * 0x00 = 4 KiB (2^12)
39
- * 0x01 = 6 KiB
40
- * 0x02 = 8 KiB (2^13)
41
- * ...
42
- * 0x28 = 1.5 GiB
43
- */ function decodeDictionarySize(propByte) {
44
- if (propByte > 40) {
45
- throw new Error("Invalid LZMA2 dictionary size property: ".concat(propByte));
46
- }
47
- if (propByte === 40) {
48
- // Max dictionary size: 4 GiB - 1
49
- return 0xffffffff;
50
- }
51
- // Dictionary size = 2 | (propByte & 1) << (propByte / 2 + 11)
52
- var base = 2 | propByte & 1;
53
- var shift = Math.floor(propByte / 2) + 11;
54
- return base << shift;
55
- }
29
+ var _indexts = require("../../lzma/index.js");
56
30
  function decodeLzma2(input, properties, unpackSize) {
57
31
  if (!properties || properties.length < 1) {
58
32
  throw new Error('LZMA2 requires properties byte');
59
33
  }
60
- var dictSize = decodeDictionarySize(properties[0]);
61
- // Memory optimization: pre-allocate output buffer if size is known
62
- // This avoids double-memory during Buffer.concat
63
- var outputBuffer = null;
64
- var outputPos = 0;
65
- var outputChunks = [];
66
- if (unpackSize && unpackSize > 0) {
67
- outputBuffer = (0, _extractbaseiterator.allocBufferUnsafe)(unpackSize);
68
- }
69
- var offset = 0;
70
- // LZMA decoder instance - reused across chunks
71
- // The vendored decoder supports setSolid() for LZMA2 state preservation
72
- // The decoder also has _nowPos64 which tracks cumulative position for rep0 validation
73
- // and _prevByte which is used for literal decoder context selection
74
- var decoder = new LzmaDecoder();
75
- decoder.setDictionarySize(dictSize);
76
- var outWindow = decoder._outWindow;
77
- // Track current LZMA properties (lc, lp, pb)
78
- var propsSet = false;
79
- while(offset < input.length){
80
- var control = input[offset++];
81
- if (control === 0x00) {
82
- break;
83
- }
84
- if (control === 0x01 || control === 0x02) {
85
- // Uncompressed chunk
86
- // 0x01 = dictionary reset + uncompressed
87
- // 0x02 = uncompressed (no reset)
88
- // Handle dictionary reset for 0x01
89
- if (control === 0x01) {
90
- outWindow._pos = 0;
91
- outWindow._streamPos = 0;
92
- decoder._nowPos64 = 0;
93
- }
94
- if (offset + 2 > input.length) {
95
- throw new Error('Truncated LZMA2 uncompressed chunk header');
96
- }
97
- // Size is big-endian, 16-bit, value + 1
98
- var uncompSize = (input[offset] << 8 | input[offset + 1]) + 1;
99
- offset += 2;
100
- if (offset + uncompSize > input.length) {
101
- throw new Error('Truncated LZMA2 uncompressed data');
102
- }
103
- // Get the uncompressed data
104
- var uncompData = input.slice(offset, offset + uncompSize);
105
- // Copy uncompressed data to output
106
- if (outputBuffer) {
107
- uncompData.copy(outputBuffer, outputPos);
108
- outputPos += uncompData.length;
109
- } else {
110
- outputChunks === null || outputChunks === void 0 ? void 0 : outputChunks.push(uncompData);
111
- }
112
- // Also update the decoder's internal dictionary so subsequent LZMA chunks can reference it
113
- // The decoder needs to track this data for LZ77 back-references
114
- // We write directly to _buffer to avoid flush() which requires _stream to be set
115
- // We must also update _streamPos to match _pos so that flush() doesn't try to write
116
- for(var i = 0; i < uncompData.length; i++){
117
- outWindow._buffer[outWindow._pos++] = uncompData[i];
118
- // Handle circular buffer wrap-around
119
- if (outWindow._pos >= outWindow._windowSize) {
120
- outWindow._pos = 0;
121
- }
122
- }
123
- // Keep _streamPos in sync so flush() doesn't try to write these bytes
124
- // (they're already in our output buffer)
125
- outWindow._streamPos = outWindow._pos;
126
- // Update decoder's cumulative position so subsequent LZMA chunks have correct rep0 validation
127
- decoder._nowPos64 += uncompSize;
128
- // Update prevByte for literal decoder context in subsequent LZMA chunks
129
- decoder._prevByte = uncompData[uncompData.length - 1];
130
- offset += uncompSize;
131
- } else if (control >= 0x80) {
132
- // LZMA compressed chunk
133
- // Control byte format (bits 7-0):
134
- // Bit 7: always 1 for LZMA chunk
135
- // Bits 6-5: reset mode (00=nothing, 01=state, 10=state+props, 11=all)
136
- // Bits 4-0: high 5 bits of uncompressed size - 1
137
- // Control byte ranges (based on bits 6-5):
138
- // 0x80-0x9F (00): no reset - continue existing state (solid mode)
139
- // 0xA0-0xBF (01): reset state only
140
- // 0xC0-0xDF (10): reset state + new properties
141
- // 0xE0-0xFF (11): reset dictionary + state + new properties
142
- var resetState = control >= 0xa0;
143
- var newProps = control >= 0xc0;
144
- var dictReset = control >= 0xe0;
145
- var useSolidMode = !resetState;
146
- // Handle dictionary reset for control bytes 0xE0-0xFF
147
- if (dictReset) {
148
- outWindow._pos = 0;
149
- outWindow._streamPos = 0;
150
- }
151
- if (offset + 4 > input.length) {
152
- throw new Error('Truncated LZMA2 LZMA chunk header');
153
- }
154
- // Uncompressed size: 5 bits from control + 16 bits from next 2 bytes + 1
155
- var uncompHigh = control & 0x1f;
156
- var uncompSize2 = (uncompHigh << 16 | input[offset] << 8 | input[offset + 1]) + 1;
157
- offset += 2;
158
- // Compressed size: 16 bits + 1
159
- var compSize = (input[offset] << 8 | input[offset + 1]) + 1;
160
- offset += 2;
161
- // If new properties, read 1-byte LZMA properties
162
- if (newProps) {
163
- if (offset >= input.length) {
164
- throw new Error('Truncated LZMA2 properties byte');
165
- }
166
- var propsByte = input[offset++];
167
- // Properties byte: pb * 45 + lp * 9 + lc
168
- // where pb, lp, lc are LZMA parameters
169
- var lc = propsByte % 9;
170
- var remainder = Math.floor(propsByte / 9);
171
- var lp = remainder % 5;
172
- var pb = Math.floor(remainder / 5);
173
- if (!decoder.setLcLpPb(lc, lp, pb)) {
174
- throw new Error("Invalid LZMA properties: lc=".concat(lc, " lp=").concat(lp, " pb=").concat(pb));
175
- }
176
- propsSet = true;
177
- }
178
- if (!propsSet) {
179
- throw new Error('LZMA chunk without properties');
180
- }
181
- if (offset + compSize > input.length) {
182
- throw new Error('Truncated LZMA2 compressed data');
183
- }
184
- // Decode LZMA chunk
185
- var inStream = (0, _streamsts.createInputStream)(input, offset, compSize);
186
- var outStream = (0, _streamsts.createOutputStream)(uncompSize2); // Pre-allocate for memory efficiency
187
- // Set solid mode based on control byte - this preserves state across code() calls
188
- // For state reset WITHOUT dict reset (0xa0-0xdf), use resetProbabilities() to
189
- // reset probability tables while preserving _nowPos64 for dictionary references
190
- if (resetState && !dictReset) {
191
- decoder.resetProbabilities();
192
- decoder.setSolid(true); // Preserve _nowPos64 in code()
193
- } else {
194
- decoder.setSolid(useSolidMode);
195
- }
196
- // Decode the chunk
197
- var success = decoder.code(inStream, outStream, uncompSize2);
198
- if (!success) {
199
- throw new Error('LZMA decompression failed');
200
- }
201
- var chunkOutput = outStream.toBuffer();
202
- if (outputBuffer) {
203
- chunkOutput.copy(outputBuffer, outputPos);
204
- outputPos += chunkOutput.length;
205
- } else {
206
- outputChunks === null || outputChunks === void 0 ? void 0 : outputChunks.push(chunkOutput);
207
- }
208
- offset += compSize;
209
- } else {
210
- throw new Error("Invalid LZMA2 control byte: 0x".concat(control.toString(16)));
211
- }
212
- }
213
- // Return pre-allocated buffer or concatenated chunks
214
- if (outputBuffer) {
215
- // Return only the used portion if we didn't fill the buffer
216
- return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;
217
- }
218
- return Buffer.concat(outputChunks);
34
+ return (0, _indexts.decodeLzma2)(input, properties, unpackSize);
219
35
  }
220
- function createLzma2Decoder(properties, unpackSize) {
221
- return (0, _createBufferingDecoderts.default)(decodeLzma2, properties, unpackSize);
36
+ function createLzma2Decoder(properties, _unpackSize) {
37
+ if (!properties || properties.length < 1) {
38
+ throw new Error('LZMA2 requires properties byte');
39
+ }
40
+ return (0, _indexts.createLzma2Decoder)(properties);
222
41
  }
223
42
  /* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Lzma2.ts"],"sourcesContent":["import Module from 'module';\n\nconst _require = typeof require === 'undefined' ? Module.createRequire(import.meta.url) : require;\n\n// LZMA2 codec using lzma-purejs\n// LZMA2 is a container format that wraps LZMA chunks with framing\n//\n// LZMA2 format specification:\n// https://github.com/ulikunitz/xz/blob/master/doc/LZMA2.md\n//\n// Control byte values:\n// 0x00 = End of stream\n// 0x01 = Uncompressed chunk, dictionary reset\n// 0x02 = Uncompressed chunk, no dictionary reset\n// 0x80-0xFF = LZMA compressed chunk (bits encode reset flags and size)\n\nimport { allocBufferUnsafe } from 'extract-base-iterator';\nimport type { Transform } from 'readable-stream';\nimport createBufferingDecoder from './createBufferingDecoder.ts';\nimport { createInputStream, createOutputStream } from './streams.ts';\n\n// Import vendored lzma-purejs - provides raw LZMA decoder (patched for LZMA2 support)\n// Path accounts for build output in dist/esm/sevenz/codecs/\nconst { LZMA } = _require('../../../../assets/lzma-purejs');\nconst LzmaDecoder = LZMA.Decoder;\n\n/**\n * Decode LZMA2 dictionary size from properties byte\n * Properties byte encodes dictionary size as: 2^(dictByte/2 + 12) or similar\n *\n * Per XZ spec, dictionary sizes are:\n * 0x00 = 4 KiB (2^12)\n * 0x01 = 6 KiB\n * 0x02 = 8 KiB (2^13)\n * ...\n * 0x28 = 1.5 GiB\n */\nfunction decodeDictionarySize(propByte: number): number {\n if (propByte > 40) {\n throw new Error(`Invalid LZMA2 dictionary size property: ${propByte}`);\n }\n if (propByte === 40) {\n // Max dictionary size: 4 GiB - 1\n return 0xffffffff;\n }\n // Dictionary size = 2 | (propByte & 1) << (propByte / 2 + 11)\n const base = 2 | (propByte & 1);\n const shift = Math.floor(propByte / 2) + 11;\n return base << shift;\n}\n\n/**\n * Decode LZMA2 compressed data to buffer\n *\n * @param input - LZMA2 compressed data\n * @param properties - Properties buffer (1 byte: dictionary size)\n * @param unpackSize - Expected output size (used for pre-allocation to reduce memory)\n * @returns Decompressed data\n */\nexport function decodeLzma2(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n const dictSize = decodeDictionarySize(properties[0]);\n\n // Memory optimization: pre-allocate output buffer if size is known\n // This avoids double-memory during Buffer.concat\n let outputBuffer: Buffer | null = null;\n let outputPos = 0;\n const outputChunks: Buffer[] = [];\n\n if (unpackSize && unpackSize > 0) {\n outputBuffer = allocBufferUnsafe(unpackSize);\n }\n\n let offset = 0;\n\n // LZMA decoder instance - reused across chunks\n // The vendored decoder supports setSolid() for LZMA2 state preservation\n // The decoder also has _nowPos64 which tracks cumulative position for rep0 validation\n // and _prevByte which is used for literal decoder context selection\n const decoder = new LzmaDecoder() as InstanceType<typeof LzmaDecoder> & {\n setSolid: (solid: boolean) => void;\n resetProbabilities: () => void;\n _nowPos64: number;\n _prevByte: number;\n };\n decoder.setDictionarySize(dictSize);\n\n // Access internal _outWindow for dictionary management\n // We need to preserve dictionary state across LZMA2 chunks\n type OutWindowType = {\n _buffer: Buffer;\n _pos: number;\n _streamPos: number;\n _windowSize: number;\n init: (solid: boolean) => void;\n };\n const outWindow = (decoder as unknown as { _outWindow: OutWindowType })._outWindow;\n\n // Track current LZMA properties (lc, lp, pb)\n let propsSet = false;\n\n while (offset < input.length) {\n const control = input[offset++];\n\n if (control === 0x00) {\n // End of LZMA2 stream\n break;\n }\n\n if (control === 0x01 || control === 0x02) {\n // Uncompressed chunk\n // 0x01 = dictionary reset + uncompressed\n // 0x02 = uncompressed (no reset)\n\n // Handle dictionary reset for 0x01\n if (control === 0x01) {\n outWindow._pos = 0;\n outWindow._streamPos = 0;\n decoder._nowPos64 = 0;\n }\n\n if (offset + 2 > input.length) {\n throw new Error('Truncated LZMA2 uncompressed chunk header');\n }\n\n // Size is big-endian, 16-bit, value + 1\n const uncompSize = ((input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n if (offset + uncompSize > input.length) {\n throw new Error('Truncated LZMA2 uncompressed data');\n }\n\n // Get the uncompressed data\n const uncompData = input.slice(offset, offset + uncompSize);\n\n // Copy uncompressed data to output\n if (outputBuffer) {\n uncompData.copy(outputBuffer, outputPos);\n outputPos += uncompData.length;\n } else {\n outputChunks?.push(uncompData);\n }\n\n // Also update the decoder's internal dictionary so subsequent LZMA chunks can reference it\n // The decoder needs to track this data for LZ77 back-references\n // We write directly to _buffer to avoid flush() which requires _stream to be set\n // We must also update _streamPos to match _pos so that flush() doesn't try to write\n for (let i = 0; i < uncompData.length; i++) {\n outWindow._buffer[outWindow._pos++] = uncompData[i];\n // Handle circular buffer wrap-around\n if (outWindow._pos >= outWindow._windowSize) {\n outWindow._pos = 0;\n }\n }\n // Keep _streamPos in sync so flush() doesn't try to write these bytes\n // (they're already in our output buffer)\n outWindow._streamPos = outWindow._pos;\n\n // Update decoder's cumulative position so subsequent LZMA chunks have correct rep0 validation\n decoder._nowPos64 += uncompSize;\n\n // Update prevByte for literal decoder context in subsequent LZMA chunks\n decoder._prevByte = uncompData[uncompData.length - 1];\n\n offset += uncompSize;\n } else if (control >= 0x80) {\n // LZMA compressed chunk\n // Control byte format (bits 7-0):\n // Bit 7: always 1 for LZMA chunk\n // Bits 6-5: reset mode (00=nothing, 01=state, 10=state+props, 11=all)\n // Bits 4-0: high 5 bits of uncompressed size - 1\n\n // Control byte ranges (based on bits 6-5):\n // 0x80-0x9F (00): no reset - continue existing state (solid mode)\n // 0xA0-0xBF (01): reset state only\n // 0xC0-0xDF (10): reset state + new properties\n // 0xE0-0xFF (11): reset dictionary + state + new properties\n const resetState = control >= 0xa0;\n const newProps = control >= 0xc0;\n const dictReset = control >= 0xe0;\n const useSolidMode = !resetState;\n\n // Handle dictionary reset for control bytes 0xE0-0xFF\n if (dictReset) {\n outWindow._pos = 0;\n outWindow._streamPos = 0;\n }\n\n if (offset + 4 > input.length) {\n throw new Error('Truncated LZMA2 LZMA chunk header');\n }\n\n // Uncompressed size: 5 bits from control + 16 bits from next 2 bytes + 1\n const uncompHigh = control & 0x1f;\n const uncompSize2 = ((uncompHigh << 16) | (input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n // Compressed size: 16 bits + 1\n const compSize = ((input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n // If new properties, read 1-byte LZMA properties\n if (newProps) {\n if (offset >= input.length) {\n throw new Error('Truncated LZMA2 properties byte');\n }\n const propsByte = input[offset++];\n\n // Properties byte: pb * 45 + lp * 9 + lc\n // where pb, lp, lc are LZMA parameters\n const lc = propsByte % 9;\n const remainder = Math.floor(propsByte / 9);\n const lp = remainder % 5;\n const pb = Math.floor(remainder / 5);\n\n if (!decoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n propsSet = true;\n }\n\n if (!propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n if (offset + compSize > input.length) {\n throw new Error('Truncated LZMA2 compressed data');\n }\n\n // Decode LZMA chunk\n const inStream = createInputStream(input, offset, compSize);\n const outStream = createOutputStream(uncompSize2); // Pre-allocate for memory efficiency\n\n // Set solid mode based on control byte - this preserves state across code() calls\n // For state reset WITHOUT dict reset (0xa0-0xdf), use resetProbabilities() to\n // reset probability tables while preserving _nowPos64 for dictionary references\n if (resetState && !dictReset) {\n decoder.resetProbabilities();\n decoder.setSolid(true); // Preserve _nowPos64 in code()\n } else {\n decoder.setSolid(useSolidMode);\n }\n\n // Decode the chunk\n const success = decoder.code(inStream, outStream, uncompSize2);\n if (!success) {\n throw new Error('LZMA decompression failed');\n }\n\n const chunkOutput = outStream.toBuffer();\n if (outputBuffer) {\n chunkOutput.copy(outputBuffer, outputPos);\n outputPos += chunkOutput.length;\n } else {\n outputChunks?.push(chunkOutput);\n }\n\n offset += compSize;\n } else {\n throw new Error(`Invalid LZMA2 control byte: 0x${control.toString(16)}`);\n }\n }\n\n // Return pre-allocated buffer or concatenated chunks\n if (outputBuffer) {\n // Return only the used portion if we didn't fill the buffer\n return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;\n }\n return Buffer.concat(outputChunks);\n}\n\n/**\n * Create an LZMA2 decoder Transform stream\n */\nexport function createLzma2Decoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeLzma2, properties, unpackSize);\n}\n"],"names":["createLzma2Decoder","decodeLzma2","_require","require","Module","createRequire","LZMA","LzmaDecoder","Decoder","decodeDictionarySize","propByte","Error","base","shift","Math","floor","input","properties","unpackSize","length","dictSize","outputBuffer","outputPos","outputChunks","allocBufferUnsafe","offset","decoder","setDictionarySize","outWindow","_outWindow","propsSet","control","_pos","_streamPos","_nowPos64","uncompSize","uncompData","slice","copy","push","i","_buffer","_windowSize","_prevByte","resetState","newProps","dictReset","useSolidMode","uncompHigh","uncompSize2","compSize","propsByte","lc","remainder","lp","pb","setLcLpPb","inStream","createInputStream","outStream","createOutputStream","resetProbabilities","setSolid","success","code","chunkOutput","toBuffer","toString","Buffer","concat","createBufferingDecoder"],"mappings":";;;;;;;;;;;QAsRgBA;eAAAA;;QA3NAC;eAAAA;;;6DA3DG;mCAgBe;+EAEC;yBACmB;;;;;;AAjBtD,IAAMC,WAAW,OAAOC,YAAY,cAAcC,eAAM,CAACC,aAAa,CAAC,uDAAmBF;AAmB1F,sFAAsF;AACtF,4DAA4D;AAC5D,IAAM,AAAEG,OAASJ,SAAS,kCAAlBI;AACR,IAAMC,cAAcD,KAAKE,OAAO;AAEhC;;;;;;;;;;CAUC,GACD,SAASC,qBAAqBC,QAAgB;IAC5C,IAAIA,WAAW,IAAI;QACjB,MAAM,IAAIC,MAAM,AAAC,2CAAmD,OAATD;IAC7D;IACA,IAAIA,aAAa,IAAI;QACnB,iCAAiC;QACjC,OAAO;IACT;IACA,8DAA8D;IAC9D,IAAME,OAAO,IAAKF,WAAW;IAC7B,IAAMG,QAAQC,KAAKC,KAAK,CAACL,WAAW,KAAK;IACzC,OAAOE,QAAQC;AACjB;AAUO,SAASZ,YAAYe,KAAa,EAAEC,UAAmB,EAAEC,UAAmB;IACjF,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIR,MAAM;IAClB;IAEA,IAAMS,WAAWX,qBAAqBQ,UAAU,CAAC,EAAE;IAEnD,mEAAmE;IACnE,iDAAiD;IACjD,IAAII,eAA8B;IAClC,IAAIC,YAAY;IAChB,IAAMC,eAAyB,EAAE;IAEjC,IAAIL,cAAcA,aAAa,GAAG;QAChCG,eAAeG,IAAAA,sCAAiB,EAACN;IACnC;IAEA,IAAIO,SAAS;IAEb,+CAA+C;IAC/C,wEAAwE;IACxE,sFAAsF;IACtF,oEAAoE;IACpE,IAAMC,UAAU,IAAInB;IAMpBmB,QAAQC,iBAAiB,CAACP;IAW1B,IAAMQ,YAAY,AAACF,QAAqDG,UAAU;IAElF,6CAA6C;IAC7C,IAAIC,WAAW;IAEf,MAAOL,SAAST,MAAMG,MAAM,CAAE;QAC5B,IAAMY,UAAUf,KAAK,CAACS,SAAS;QAE/B,IAAIM,YAAY,MAAM;YAEpB;QACF;QAEA,IAAIA,YAAY,QAAQA,YAAY,MAAM;YACxC,qBAAqB;YACrB,yCAAyC;YACzC,iCAAiC;YAEjC,mCAAmC;YACnC,IAAIA,YAAY,MAAM;gBACpBH,UAAUI,IAAI,GAAG;gBACjBJ,UAAUK,UAAU,GAAG;gBACvBP,QAAQQ,SAAS,GAAG;YACtB;YAEA,IAAIT,SAAS,IAAIT,MAAMG,MAAM,EAAE;gBAC7B,MAAM,IAAIR,MAAM;YAClB;YAEA,wCAAwC;YACxC,IAAMwB,aAAa,AAAC,CAAA,AAACnB,KAAK,CAACS,OAAO,IAAI,IAAKT,KAAK,CAACS,SAAS,EAAE,AAAD,IAAK;YAChEA,UAAU;YAEV,IAAIA,SAASU,aAAanB,MAAMG,MAAM,EAAE;gBACtC,MAAM,IAAIR,MAAM;YAClB;YAEA,4BAA4B;YAC5B,IAAMyB,aAAapB,MAAMqB,KAAK,CAACZ,QAAQA,SAASU;YAEhD,mCAAmC;YACnC,IAAId,cAAc;gBAChBe,WAAWE,IAAI,CAACjB,cAAcC;gBAC9BA,aAAac,WAAWjB,MAAM;YAChC,OAAO;gBACLI,yBAAAA,mCAAAA,aAAcgB,IAAI,CAACH;YACrB;YAEA,2FAA2F;YAC3F,gEAAgE;YAChE,iFAAiF;YACjF,oFAAoF;YACpF,IAAK,IAAII,IAAI,GAAGA,IAAIJ,WAAWjB,MAAM,EAAEqB,IAAK;gBAC1CZ,UAAUa,OAAO,CAACb,UAAUI,IAAI,GAAG,GAAGI,UAAU,CAACI,EAAE;gBACnD,qCAAqC;gBACrC,IAAIZ,UAAUI,IAAI,IAAIJ,UAAUc,WAAW,EAAE;oBAC3Cd,UAAUI,IAAI,GAAG;gBACnB;YACF;YACA,sEAAsE;YACtE,yCAAyC;YACzCJ,UAAUK,UAAU,GAAGL,UAAUI,IAAI;YAErC,8FAA8F;YAC9FN,QAAQQ,SAAS,IAAIC;YAErB,wEAAwE;YACxET,QAAQiB,SAAS,GAAGP,UAAU,CAACA,WAAWjB,MAAM,GAAG,EAAE;YAErDM,UAAUU;QACZ,OAAO,IAAIJ,WAAW,MAAM;YAC1B,wBAAwB;YACxB,kCAAkC;YAClC,iCAAiC;YACjC,sEAAsE;YACtE,iDAAiD;YAEjD,2CAA2C;YAC3C,kEAAkE;YAClE,mCAAmC;YACnC,+CAA+C;YAC/C,4DAA4D;YAC5D,IAAMa,aAAab,WAAW;YAC9B,IAAMc,WAAWd,WAAW;YAC5B,IAAMe,YAAYf,WAAW;YAC7B,IAAMgB,eAAe,CAACH;YAEtB,sDAAsD;YACtD,IAAIE,WAAW;gBACblB,UAAUI,IAAI,GAAG;gBACjBJ,UAAUK,UAAU,GAAG;YACzB;YAEA,IAAIR,SAAS,IAAIT,MAAMG,MAAM,EAAE;gBAC7B,MAAM,IAAIR,MAAM;YAClB;YAEA,yEAAyE;YACzE,IAAMqC,aAAajB,UAAU;YAC7B,IAAMkB,cAAc,AAAC,CAAA,AAACD,cAAc,KAAOhC,KAAK,CAACS,OAAO,IAAI,IAAKT,KAAK,CAACS,SAAS,EAAE,AAAD,IAAK;YACtFA,UAAU;YAEV,+BAA+B;YAC/B,IAAMyB,WAAW,AAAC,CAAA,AAAClC,KAAK,CAACS,OAAO,IAAI,IAAKT,KAAK,CAACS,SAAS,EAAE,AAAD,IAAK;YAC9DA,UAAU;YAEV,iDAAiD;YACjD,IAAIoB,UAAU;gBACZ,IAAIpB,UAAUT,MAAMG,MAAM,EAAE;oBAC1B,MAAM,IAAIR,MAAM;gBAClB;gBACA,IAAMwC,YAAYnC,KAAK,CAACS,SAAS;gBAEjC,yCAAyC;gBACzC,uCAAuC;gBACvC,IAAM2B,KAAKD,YAAY;gBACvB,IAAME,YAAYvC,KAAKC,KAAK,CAACoC,YAAY;gBACzC,IAAMG,KAAKD,YAAY;gBACvB,IAAME,KAAKzC,KAAKC,KAAK,CAACsC,YAAY;gBAElC,IAAI,CAAC3B,QAAQ8B,SAAS,CAACJ,IAAIE,IAAIC,KAAK;oBAClC,MAAM,IAAI5C,MAAM,AAAC,+BAAuC2C,OAATF,IAAG,QAAeG,OAATD,IAAG,QAAS,OAAHC;gBACnE;gBACAzB,WAAW;YACb;YAEA,IAAI,CAACA,UAAU;gBACb,MAAM,IAAInB,MAAM;YAClB;YAEA,IAAIc,SAASyB,WAAWlC,MAAMG,MAAM,EAAE;gBACpC,MAAM,IAAIR,MAAM;YAClB;YAEA,oBAAoB;YACpB,IAAM8C,WAAWC,IAAAA,4BAAiB,EAAC1C,OAAOS,QAAQyB;YAClD,IAAMS,YAAYC,IAAAA,6BAAkB,EAACX,cAAc,qCAAqC;YAExF,kFAAkF;YAClF,8EAA8E;YAC9E,gFAAgF;YAChF,IAAIL,cAAc,CAACE,WAAW;gBAC5BpB,QAAQmC,kBAAkB;gBAC1BnC,QAAQoC,QAAQ,CAAC,OAAO,+BAA+B;YACzD,OAAO;gBACLpC,QAAQoC,QAAQ,CAACf;YACnB;YAEA,mBAAmB;YACnB,IAAMgB,UAAUrC,QAAQsC,IAAI,CAACP,UAAUE,WAAWV;YAClD,IAAI,CAACc,SAAS;gBACZ,MAAM,IAAIpD,MAAM;YAClB;YAEA,IAAMsD,cAAcN,UAAUO,QAAQ;YACtC,IAAI7C,cAAc;gBAChB4C,YAAY3B,IAAI,CAACjB,cAAcC;gBAC/BA,aAAa2C,YAAY9C,MAAM;YACjC,OAAO;gBACLI,yBAAAA,mCAAAA,aAAcgB,IAAI,CAAC0B;YACrB;YAEAxC,UAAUyB;QACZ,OAAO;YACL,MAAM,IAAIvC,MAAM,AAAC,iCAAqD,OAArBoB,QAAQoC,QAAQ,CAAC;QACpE;IACF;IAEA,qDAAqD;IACrD,IAAI9C,cAAc;QAChB,4DAA4D;QAC5D,OAAOC,YAAYD,aAAaF,MAAM,GAAGE,aAAagB,KAAK,CAAC,GAAGf,aAAaD;IAC9E;IACA,OAAO+C,OAAOC,MAAM,CAAC9C;AACvB;AAKO,SAASvB,mBAAmBiB,UAAmB,EAAEC,UAAmB;IACzE,OAAOoD,IAAAA,iCAAsB,EAACrE,aAAagB,YAAYC;AACzD"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Lzma2.ts"],"sourcesContent":["// LZMA2 codec using TypeScript LZMA decoder\n//\n// LZMA2 format specification:\n// https://github.com/ulikunitz/xz/blob/master/doc/LZMA2.md\n//\n// Control byte values:\n// 0x00 = End of stream\n// 0x01 = Uncompressed chunk, dictionary reset\n// 0x02 = Uncompressed chunk, no dictionary reset\n// 0x80-0xFF = LZMA compressed chunk (bits encode reset flags and size)\n\nimport type { Transform } from 'stream';\nimport { createLzma2Decoder as createLzma2Transform, decodeLzma2 as lzma2Decode } from '../../lzma/index.ts';\n\n/**\n * Decode LZMA2 compressed data to buffer\n *\n * @param input - LZMA2 compressed data\n * @param properties - Properties buffer (1 byte: dictionary size)\n * @param unpackSize - Expected output size (optional, for pre-allocation)\n * @returns Decompressed data\n */\nexport function decodeLzma2(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n return lzma2Decode(input, properties, unpackSize);\n}\n\n/**\n * Create an LZMA2 decoder Transform stream\n *\n * This is a true streaming decoder that processes LZMA2 chunks incrementally.\n * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).\n *\n * LZMA2 chunks are up to ~2MB uncompressed, so memory is bounded regardless of\n * total archive size.\n */\nexport function createLzma2Decoder(properties?: Buffer, _unpackSize?: number): Transform {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n return createLzma2Transform(properties) as Transform;\n}\n"],"names":["createLzma2Decoder","decodeLzma2","input","properties","unpackSize","length","Error","lzma2Decode","_unpackSize","createLzma2Transform"],"mappings":"AAAA,4CAA4C;AAC5C,EAAE;AACF,8BAA8B;AAC9B,2DAA2D;AAC3D,EAAE;AACF,uBAAuB;AACvB,+BAA+B;AAC/B,sDAAsD;AACtD,yDAAyD;AACzD,0EAA0E;;;;;;;;;;;;QA8B1DA;eAAAA;;QAjBAC;eAAAA;;;uBAVuE;AAUhF,SAASA,YAAYC,KAAa,EAAEC,UAAmB,EAAEC,UAAmB;IACjF,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,OAAOC,IAAAA,oBAAW,EAACL,OAAOC,YAAYC;AACxC;AAWO,SAASJ,mBAAmBG,UAAmB,EAAEK,WAAoB;IAC1E,IAAI,CAACL,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,OAAOG,IAAAA,2BAAoB,EAACN;AAC9B"}
@@ -1,7 +1,7 @@
1
- import Stream from 'stream';
1
+ import { Transform } from 'extract-base-iterator';
2
2
  export type DecodeFn = (input: Buffer, properties?: Buffer, unpackSize?: number) => Buffer;
3
3
  /**
4
4
  * Create a Transform stream that buffers all input, then decodes in flush
5
5
  * This is the common pattern for codecs that can't stream (need full input)
6
6
  */
7
- export default function createBufferingDecoder(decodeFn: DecodeFn, properties?: Buffer, unpackSize?: number): Stream.Transform;
7
+ export default function createBufferingDecoder(decodeFn: DecodeFn, properties?: Buffer, unpackSize?: number): InstanceType<typeof Transform>;
@@ -1,7 +1,7 @@
1
- import Stream from 'stream';
1
+ import { Transform } from 'extract-base-iterator';
2
2
  export type DecodeFn = (input: Buffer, properties?: Buffer, unpackSize?: number) => Buffer;
3
3
  /**
4
4
  * Create a Transform stream that buffers all input, then decodes in flush
5
5
  * This is the common pattern for codecs that can't stream (need full input)
6
6
  */
7
- export default function createBufferingDecoder(decodeFn: DecodeFn, properties?: Buffer, unpackSize?: number): Stream.Transform;
7
+ export default function createBufferingDecoder(decodeFn: DecodeFn, properties?: Buffer, unpackSize?: number): InstanceType<typeof Transform>;
@@ -13,23 +13,10 @@ Object.defineProperty(exports, /**
13
13
  return createBufferingDecoder;
14
14
  }
15
15
  });
16
- var _stream = /*#__PURE__*/ _interop_require_default(require("stream"));
17
- function _interop_require_default(obj) {
18
- return obj && obj.__esModule ? obj : {
19
- default: obj
20
- };
21
- }
22
- // Use native streams when available, readable-stream only for Node 0.x
23
- var major = +process.versions.node.split('.')[0];
24
- var Transform;
25
- if (major > 0) {
26
- Transform = _stream.default.Transform;
27
- } else {
28
- Transform = require('readable-stream').Transform;
29
- }
16
+ var _extractbaseiterator = require("extract-base-iterator");
30
17
  function createBufferingDecoder(decodeFn, properties, unpackSize) {
31
18
  var chunks = [];
32
- return new Transform({
19
+ return new _extractbaseiterator.Transform({
33
20
  transform: function(chunk, _encoding, callback) {
34
21
  chunks.push(chunk);
35
22
  callback();
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/createBufferingDecoder.ts"],"sourcesContent":["// Helper to create a Transform stream that buffers all input before decoding\n// Used by codecs that need the full input before decompression (LZMA, LZMA2, BZip2, etc.)\n\nimport Stream from 'stream';\n\n// Use native streams when available, readable-stream only for Node 0.x\nconst major = +process.versions.node.split('.')[0];\nlet Transform: typeof Stream.Transform;\nif (major > 0) {\n Transform = Stream.Transform;\n} else {\n Transform = require('readable-stream').Transform;\n}\ntype TransformCallback = (error?: Error | null, data?: Buffer) => void;\n\nexport type DecodeFn = (input: Buffer, properties?: Buffer, unpackSize?: number) => Buffer;\n\n/**\n * Create a Transform stream that buffers all input, then decodes in flush\n * This is the common pattern for codecs that can't stream (need full input)\n */\nexport default function createBufferingDecoder(decodeFn: DecodeFn, properties?: Buffer, unpackSize?: number): Stream.Transform {\n const chunks: Buffer[] = [];\n\n return new Transform({\n transform: (chunk: Buffer, _encoding: string, callback: TransformCallback) => {\n chunks.push(chunk);\n callback();\n },\n flush: function (callback: TransformCallback) {\n try {\n const input = Buffer.concat(chunks);\n const output = decodeFn(input, properties, unpackSize);\n this.push(output);\n callback();\n } catch (err) {\n callback(err as Error);\n }\n },\n });\n}\n"],"names":["createBufferingDecoder","major","process","versions","node","split","Transform","Stream","require","decodeFn","properties","unpackSize","chunks","transform","chunk","_encoding","callback","push","flush","input","Buffer","concat","output","err"],"mappings":"AAAA,6EAA6E;AAC7E,0FAA0F;;;;;+BAgB1F;;;CAGC,GACD;;;eAAwBA;;;6DAlBL;;;;;;AAEnB,uEAAuE;AACvE,IAAMC,QAAQ,CAACC,QAAQC,QAAQ,CAACC,IAAI,CAACC,KAAK,CAAC,IAAI,CAAC,EAAE;AAClD,IAAIC;AACJ,IAAIL,QAAQ,GAAG;IACbK,YAAYC,eAAM,CAACD,SAAS;AAC9B,OAAO;IACLA,YAAYE,QAAQ,mBAAmBF,SAAS;AAClD;AASe,SAASN,uBAAuBS,QAAkB,EAAEC,UAAmB,EAAEC,UAAmB;IACzG,IAAMC,SAAmB,EAAE;IAE3B,OAAO,IAAIN,UAAU;QACnBO,WAAW,SAACC,OAAeC,WAAmBC;YAC5CJ,OAAOK,IAAI,CAACH;YACZE;QACF;QACAE,OAAO,SAAPA,MAAiBF,QAA2B;YAC1C,IAAI;gBACF,IAAMG,QAAQC,OAAOC,MAAM,CAACT;gBAC5B,IAAMU,SAASb,SAASU,OAAOT,YAAYC;gBAC3C,IAAI,CAACM,IAAI,CAACK;gBACVN;YACF,EAAE,OAAOO,KAAK;gBACZP,SAASO;YACX;QACF;IACF;AACF"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/createBufferingDecoder.ts"],"sourcesContent":["// Helper to create a Transform stream that buffers all input before decoding\n// Used by codecs that need the full input before decompression (LZMA, LZMA2, BZip2, etc.)\n\nimport { Transform } from 'extract-base-iterator';\n\ntype TransformCallback = (error?: Error | null, data?: Buffer) => void;\n\nexport type DecodeFn = (input: Buffer, properties?: Buffer, unpackSize?: number) => Buffer;\n\n/**\n * Create a Transform stream that buffers all input, then decodes in flush\n * This is the common pattern for codecs that can't stream (need full input)\n */\nexport default function createBufferingDecoder(decodeFn: DecodeFn, properties?: Buffer, unpackSize?: number): InstanceType<typeof Transform> {\n const chunks: Buffer[] = [];\n\n return new Transform({\n transform: (chunk: Buffer, _encoding: string, callback: TransformCallback) => {\n chunks.push(chunk);\n callback();\n },\n flush: function (callback: TransformCallback) {\n try {\n const input = Buffer.concat(chunks);\n const output = decodeFn(input, properties, unpackSize);\n this.push(output);\n callback();\n } catch (err) {\n callback(err as Error);\n }\n },\n });\n}\n"],"names":["createBufferingDecoder","decodeFn","properties","unpackSize","chunks","Transform","transform","chunk","_encoding","callback","push","flush","input","Buffer","concat","output","err"],"mappings":"AAAA,6EAA6E;AAC7E,0FAA0F;;;;;+BAQ1F;;;CAGC,GACD;;;eAAwBA;;;mCAVE;AAUX,SAASA,uBAAuBC,QAAkB,EAAEC,UAAmB,EAAEC,UAAmB;IACzG,IAAMC,SAAmB,EAAE;IAE3B,OAAO,IAAIC,8BAAS,CAAC;QACnBC,WAAW,SAACC,OAAeC,WAAmBC;YAC5CL,OAAOM,IAAI,CAACH;YACZE;QACF;QACAE,OAAO,SAAPA,MAAiBF,QAA2B;YAC1C,IAAI;gBACF,IAAMG,QAAQC,OAAOC,MAAM,CAACV;gBAC5B,IAAMW,SAASd,SAASW,OAAOV,YAAYC;gBAC3C,IAAI,CAACO,IAAI,CAACK;gBACVN;YACF,EAAE,OAAOO,KAAK;gBACZP,SAASO;YACX;QACF;IACF;AACF"}
@@ -1,4 +1,6 @@
1
1
  export { DirectoryEntry, LinkEntry, Lock, SymbolicLinkEntry } from 'extract-base-iterator';
2
+ export { default as FileEntry } from './FileEntry.js';
3
+ export type { SevenZipEntry } from './sevenz/SevenZipParser.js';
2
4
  import type { ExtractOptions as BaseExtractOptions, DirectoryEntry, LinkEntry, SymbolicLinkEntry } from 'extract-base-iterator';
3
5
  import type FileEntry from './FileEntry.js';
4
6
  export type Entry = DirectoryEntry | FileEntry | LinkEntry | SymbolicLinkEntry;
@@ -10,21 +12,5 @@ export interface ExtractOptions extends BaseExtractOptions {
10
12
  * Password for encrypted archives
11
13
  */
12
14
  password?: string;
13
- /**
14
- * Memory threshold in bytes for stream input.
15
- * Archives smaller than this are buffered in memory for faster processing.
16
- * Archives larger than this are written to a temp file.
17
- * Default: 100 MB (100 * 1024 * 1024)
18
- */
19
- memoryThreshold?: number;
20
- }
21
- export { default as FileEntry } from './FileEntry.js';
22
- import type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.js';
23
- export interface SevenZipFile {
24
- getStream: () => NodeJS.ReadableStream;
25
- }
26
- export interface SevenZipFileIterator {
27
- next: () => SevenZipEntry | null;
28
- getParser: () => SevenZipParser;
29
15
  }
30
16
  export type EntryCallback = (error?: Error, result?: IteratorResult<Entry>) => void;
@@ -1,4 +1,6 @@
1
1
  export { DirectoryEntry, LinkEntry, Lock, SymbolicLinkEntry } from 'extract-base-iterator';
2
+ export { default as FileEntry } from './FileEntry.js';
3
+ export type { SevenZipEntry } from './sevenz/SevenZipParser.js';
2
4
  import type { ExtractOptions as BaseExtractOptions, DirectoryEntry, LinkEntry, SymbolicLinkEntry } from 'extract-base-iterator';
3
5
  import type FileEntry from './FileEntry.js';
4
6
  export type Entry = DirectoryEntry | FileEntry | LinkEntry | SymbolicLinkEntry;
@@ -10,21 +12,5 @@ export interface ExtractOptions extends BaseExtractOptions {
10
12
  * Password for encrypted archives
11
13
  */
12
14
  password?: string;
13
- /**
14
- * Memory threshold in bytes for stream input.
15
- * Archives smaller than this are buffered in memory for faster processing.
16
- * Archives larger than this are written to a temp file.
17
- * Default: 100 MB (100 * 1024 * 1024)
18
- */
19
- memoryThreshold?: number;
20
- }
21
- export { default as FileEntry } from './FileEntry.js';
22
- import type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.js';
23
- export interface SevenZipFile {
24
- getStream: () => NodeJS.ReadableStream;
25
- }
26
- export interface SevenZipFileIterator {
27
- next: () => SevenZipEntry | null;
28
- getParser: () => SevenZipParser;
29
15
  }
30
16
  export type EntryCallback = (error?: Error, result?: IteratorResult<Entry>) => void;
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/types.ts"],"sourcesContent":["export { DirectoryEntry, LinkEntry, Lock, SymbolicLinkEntry } from 'extract-base-iterator';\n\nimport type { ExtractOptions as BaseExtractOptions, DirectoryEntry, LinkEntry, SymbolicLinkEntry } from 'extract-base-iterator';\nimport type FileEntry from './FileEntry.ts';\n\n// 7z-specific Entry union type with 7z-specific FileEntry\nexport type Entry = DirectoryEntry | FileEntry | LinkEntry | SymbolicLinkEntry;\n\n/**\n * Options for SevenZipIterator\n */\nexport interface ExtractOptions extends BaseExtractOptions {\n /**\n * Password for encrypted archives\n */\n password?: string;\n\n /**\n * Memory threshold in bytes for stream input.\n * Archives smaller than this are buffered in memory for faster processing.\n * Archives larger than this are written to a temp file.\n * Default: 100 MB (100 * 1024 * 1024)\n */\n memoryThreshold?: number;\n}\nexport { default as FileEntry } from './FileEntry.ts';\n\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\n\nexport interface SevenZipFile {\n getStream: () => NodeJS.ReadableStream;\n}\n\nexport interface SevenZipFileIterator {\n next: () => SevenZipEntry | null;\n getParser: () => SevenZipParser;\n}\n\nexport type EntryCallback = (error?: Error, result?: IteratorResult<Entry>) => void;\n"],"names":["DirectoryEntry","FileEntry","LinkEntry","Lock","SymbolicLinkEntry"],"mappings":";;;;;;;;;;;QAASA;eAAAA,mCAAc;;QAyBHC;eAAAA,oBAAS;;QAzBJC;eAAAA,8BAAS;;QAAEC;eAAAA,yBAAI;;QAAEC;eAAAA,sCAAiB;;;mCAAQ;kEAyB9B"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/types.ts"],"sourcesContent":["export { DirectoryEntry, LinkEntry, Lock, SymbolicLinkEntry } from 'extract-base-iterator';\nexport { default as FileEntry } from './FileEntry.ts';\nexport type { SevenZipEntry } from './sevenz/SevenZipParser.ts';\n\nimport type { ExtractOptions as BaseExtractOptions, DirectoryEntry, LinkEntry, SymbolicLinkEntry } from 'extract-base-iterator';\nimport type FileEntry from './FileEntry.ts';\n\n// 7z-specific Entry union type with 7z-specific FileEntry\nexport type Entry = DirectoryEntry | FileEntry | LinkEntry | SymbolicLinkEntry;\n\n/**\n * Options for SevenZipIterator\n */\nexport interface ExtractOptions extends BaseExtractOptions {\n /**\n * Password for encrypted archives\n */\n password?: string;\n}\n\nexport type EntryCallback = (error?: Error, result?: IteratorResult<Entry>) => void;\n"],"names":["DirectoryEntry","FileEntry","LinkEntry","Lock","SymbolicLinkEntry"],"mappings":";;;;;;;;;;;QAASA;eAAAA,mCAAc;;QACHC;eAAAA,oBAAS;;QADJC;eAAAA,8BAAS;;QAAEC;eAAAA,yBAAI;;QAAEC;eAAAA,sCAAiB;;;mCAAQ;kEAC9B"}
@@ -1,11 +1,19 @@
1
+ /**
2
+ * FileEntry for 7z archives
3
+ *
4
+ * Wraps a lazy stream - decompression happens when the stream is read.
5
+ * API consistent with zip-iterator and tar-iterator.
6
+ */
1
7
  import { type FileAttributes, FileEntry, type Lock, type NoParamCallback } from 'extract-base-iterator';
2
- import type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.js';
3
8
  import type { ExtractOptions } from './types.js';
4
9
  export default class SevenZipFileEntry extends FileEntry {
5
10
  private lock;
6
- private entry;
7
- private parser;
8
- constructor(attributes: FileAttributes, entry: SevenZipEntry, parser: SevenZipParser, lock: Lock);
11
+ private stream;
12
+ /**
13
+ * Whether this entry's folder supports streaming decompression.
14
+ */
15
+ readonly _canStream: boolean;
16
+ constructor(attributes: FileAttributes, stream: NodeJS.ReadableStream, lock: Lock, canStream: boolean);
9
17
  create(dest: string, callback: NoParamCallback): void;
10
18
  create(dest: string, options: ExtractOptions, callback: NoParamCallback): void;
11
19
  create(dest: string, options?: ExtractOptions): Promise<boolean>;
@@ -1,14 +1,17 @@
1
+ /**
2
+ * FileEntry for 7z archives
3
+ *
4
+ * Wraps a lazy stream - decompression happens when the stream is read.
5
+ * API consistent with zip-iterator and tar-iterator.
6
+ */ import once from 'call-once-fn';
1
7
  import { FileEntry, waitForAccess } from 'extract-base-iterator';
2
8
  import fs from 'fs';
3
9
  import oo from 'on-one';
4
10
  let SevenZipFileEntry = class SevenZipFileEntry extends FileEntry {
5
11
  create(dest, options, callback) {
6
- if (typeof options === 'function') {
7
- callback = options;
8
- options = null;
9
- }
12
+ callback = typeof options === 'function' ? options : callback;
13
+ options = typeof options === 'function' ? {} : options || {};
10
14
  if (typeof callback === 'function') {
11
- options = options || {};
12
15
  return FileEntry.prototype.create.call(this, dest, options, (err)=>{
13
16
  callback(err);
14
17
  if (this.lock) {
@@ -17,46 +20,69 @@ let SevenZipFileEntry = class SevenZipFileEntry extends FileEntry {
17
20
  }
18
21
  });
19
22
  }
20
- return new Promise((resolve, reject)=>{
21
- this.create(dest, options, (err, done)=>{
23
+ return new Promise((resolve, reject)=>this.create(dest, options, (err, done)=>{
22
24
  err ? reject(err) : resolve(done);
23
- });
24
- });
25
+ }));
25
26
  }
26
27
  _writeFile(fullPath, _options, callback) {
27
- if (!this.entry || !this.parser) {
28
- callback(new Error('7z FileEntry missing entry. Check for calling create multiple times'));
28
+ if (!this.stream) {
29
+ callback(new Error('7z FileEntry missing stream. Check for calling create multiple times'));
29
30
  return;
30
31
  }
31
- // Use callback-based async decompression
32
- this.parser.getEntryStreamAsync(this.entry, (err, stream)=>{
33
- if (err) return callback(err);
34
- if (!stream) return callback(new Error('No stream returned'));
35
- const res = stream.pipe(fs.createWriteStream(fullPath));
36
- oo(res, [
32
+ const stream = this.stream;
33
+ this.stream = null; // Prevent reuse
34
+ // Use once since errors can come from either stream
35
+ const cb = once((err)=>{
36
+ err ? callback(err) : waitForAccess(fullPath, callback);
37
+ });
38
+ try {
39
+ const writeStream = fs.createWriteStream(fullPath);
40
+ // Listen for errors on source stream (errors don't propagate through pipe)
41
+ stream.on('error', (streamErr)=>{
42
+ // Destroy the write stream on source error.
43
+ // On Node 0.8, destroy() emits 'close' before 'error'. Since on-one is listening
44
+ // for ['error', 'close', 'finish'], it catches 'close' first, calls our callback,
45
+ // and removes ALL listeners - including the 'error' listener. The subsequent EBADF
46
+ // error then fires with no handler, causing an uncaught exception.
47
+ // Adding a no-op error handler ensures there's always a listener for any error.
48
+ const ws = writeStream;
49
+ writeStream.on('error', ()=>{});
50
+ if (typeof ws.destroy === 'function') ws.destroy();
51
+ cb(streamErr);
52
+ });
53
+ // Pipe and listen for write stream completion/errors
54
+ stream.pipe(writeStream);
55
+ oo(writeStream, [
37
56
  'error',
38
57
  'close',
39
58
  'finish'
40
- ], (writeErr)=>{
41
- writeErr ? callback(writeErr) : waitForAccess(fullPath, callback);
42
- });
43
- });
59
+ ], cb);
60
+ } catch (pipeErr) {
61
+ cb(pipeErr);
62
+ }
44
63
  }
45
64
  destroy() {
46
65
  FileEntry.prototype.destroy.call(this);
47
- this.entry = null;
48
- this.parser = null;
66
+ if (this.stream) {
67
+ // Use destroy() to prevent decompression (our stream has custom destroy that sets destroyed flag)
68
+ // Fallback to resume() for older Node versions without destroy()
69
+ const s = this.stream;
70
+ if (typeof s.destroy === 'function') {
71
+ s.destroy();
72
+ }
73
+ this.stream = null;
74
+ }
49
75
  if (this.lock) {
50
76
  this.lock.release();
51
77
  this.lock = null;
52
78
  }
53
79
  }
54
- constructor(attributes, entry, parser, lock){
80
+ constructor(attributes, stream, lock, canStream){
55
81
  super(attributes);
56
- this.entry = entry;
57
- this.parser = parser;
82
+ this.stream = stream;
58
83
  this.lock = lock;
59
84
  this.lock.retain();
85
+ this._canStream = canStream;
60
86
  }
61
87
  };
62
88
  export { SevenZipFileEntry as default };
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/FileEntry.ts"],"sourcesContent":["import { type FileAttributes, FileEntry, type Lock, type NoParamCallback, waitForAccess } from 'extract-base-iterator';\nimport fs from 'fs';\nimport oo from 'on-one';\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\nimport type { ExtractOptions } from './types.ts';\n\nexport default class SevenZipFileEntry extends FileEntry {\n private lock: Lock;\n private entry: SevenZipEntry;\n private parser: SevenZipParser;\n\n constructor(attributes: FileAttributes, entry: SevenZipEntry, parser: SevenZipParser, lock: Lock) {\n super(attributes);\n this.entry = entry;\n this.parser = parser;\n this.lock = lock;\n this.lock.retain();\n }\n\n create(dest: string, callback: NoParamCallback): void;\n create(dest: string, options: ExtractOptions, callback: NoParamCallback): void;\n create(dest: string, options?: ExtractOptions): Promise<boolean>;\n create(dest: string, options?: ExtractOptions | NoParamCallback, callback?: NoParamCallback): void | Promise<boolean> {\n if (typeof options === 'function') {\n callback = options;\n options = null;\n }\n\n if (typeof callback === 'function') {\n options = options || {};\n return FileEntry.prototype.create.call(this, dest, options, (err?: Error) => {\n callback(err);\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n });\n }\n return new Promise((resolve, reject) => {\n this.create(dest, options as ExtractOptions, (err?: Error, done?: boolean) => {\n err ? reject(err) : resolve(done);\n });\n });\n }\n\n _writeFile(fullPath: string, _options: ExtractOptions, callback: NoParamCallback): void {\n if (!this.entry || !this.parser) {\n callback(new Error('7z FileEntry missing entry. Check for calling create multiple times'));\n return;\n }\n\n // Use callback-based async decompression\n this.parser.getEntryStreamAsync(this.entry, (err, stream) => {\n if (err) return callback(err);\n if (!stream) return callback(new Error('No stream returned'));\n\n const res = stream.pipe(fs.createWriteStream(fullPath));\n oo(res, ['error', 'close', 'finish'], (writeErr?: Error) => {\n writeErr ? callback(writeErr) : waitForAccess(fullPath, callback);\n });\n });\n }\n\n destroy() {\n FileEntry.prototype.destroy.call(this);\n this.entry = null;\n this.parser = null;\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n }\n}\n"],"names":["FileEntry","waitForAccess","fs","oo","SevenZipFileEntry","create","dest","options","callback","prototype","call","err","lock","release","Promise","resolve","reject","done","_writeFile","fullPath","_options","entry","parser","Error","getEntryStreamAsync","stream","res","pipe","createWriteStream","writeErr","destroy","attributes","retain"],"mappings":"AAAA,SAA8BA,SAAS,EAAmCC,aAAa,QAAQ,wBAAwB;AACvH,OAAOC,QAAQ,KAAK;AACpB,OAAOC,QAAQ,SAAS;AAIT,IAAA,AAAMC,oBAAN,MAAMA,0BAA0BJ;IAgB7CK,OAAOC,IAAY,EAAEC,OAA0C,EAAEC,QAA0B,EAA2B;QACpH,IAAI,OAAOD,YAAY,YAAY;YACjCC,WAAWD;YACXA,UAAU;QACZ;QAEA,IAAI,OAAOC,aAAa,YAAY;YAClCD,UAAUA,WAAW,CAAC;YACtB,OAAOP,UAAUS,SAAS,CAACJ,MAAM,CAACK,IAAI,CAAC,IAAI,EAAEJ,MAAMC,SAAS,CAACI;gBAC3DH,SAASG;gBACT,IAAI,IAAI,CAACC,IAAI,EAAE;oBACb,IAAI,CAACA,IAAI,CAACC,OAAO;oBACjB,IAAI,CAACD,IAAI,GAAG;gBACd;YACF;QACF;QACA,OAAO,IAAIE,QAAQ,CAACC,SAASC;YAC3B,IAAI,CAACX,MAAM,CAACC,MAAMC,SAA2B,CAACI,KAAaM;gBACzDN,MAAMK,OAAOL,OAAOI,QAAQE;YAC9B;QACF;IACF;IAEAC,WAAWC,QAAgB,EAAEC,QAAwB,EAAEZ,QAAyB,EAAQ;QACtF,IAAI,CAAC,IAAI,CAACa,KAAK,IAAI,CAAC,IAAI,CAACC,MAAM,EAAE;YAC/Bd,SAAS,IAAIe,MAAM;YACnB;QACF;QAEA,yCAAyC;QACzC,IAAI,CAACD,MAAM,CAACE,mBAAmB,CAAC,IAAI,CAACH,KAAK,EAAE,CAACV,KAAKc;YAChD,IAAId,KAAK,OAAOH,SAASG;YACzB,IAAI,CAACc,QAAQ,OAAOjB,SAAS,IAAIe,MAAM;YAEvC,MAAMG,MAAMD,OAAOE,IAAI,CAACzB,GAAG0B,iBAAiB,CAACT;YAC7ChB,GAAGuB,KAAK;gBAAC;gBAAS;gBAAS;aAAS,EAAE,CAACG;gBACrCA,WAAWrB,SAASqB,YAAY5B,cAAckB,UAAUX;YAC1D;QACF;IACF;IAEAsB,UAAU;QACR9B,UAAUS,SAAS,CAACqB,OAAO,CAACpB,IAAI,CAAC,IAAI;QACrC,IAAI,CAACW,KAAK,GAAG;QACb,IAAI,CAACC,MAAM,GAAG;QACd,IAAI,IAAI,CAACV,IAAI,EAAE;YACb,IAAI,CAACA,IAAI,CAACC,OAAO;YACjB,IAAI,CAACD,IAAI,GAAG;QACd;IACF;IA5DA,YAAYmB,UAA0B,EAAEV,KAAoB,EAAEC,MAAsB,EAAEV,IAAU,CAAE;QAChG,KAAK,CAACmB;QACN,IAAI,CAACV,KAAK,GAAGA;QACb,IAAI,CAACC,MAAM,GAAGA;QACd,IAAI,CAACV,IAAI,GAAGA;QACZ,IAAI,CAACA,IAAI,CAACoB,MAAM;IAClB;AAuDF;AAlEA,SAAqB5B,+BAkEpB"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/FileEntry.ts"],"sourcesContent":["/**\n * FileEntry for 7z archives\n *\n * Wraps a lazy stream - decompression happens when the stream is read.\n * API consistent with zip-iterator and tar-iterator.\n */\n\nimport once from 'call-once-fn';\nimport { type FileAttributes, FileEntry, type Lock, type NoParamCallback, waitForAccess } from 'extract-base-iterator';\nimport fs from 'fs';\nimport oo from 'on-one';\nimport type { ExtractOptions } from './types.ts';\n\nexport default class SevenZipFileEntry extends FileEntry {\n private lock: Lock;\n private stream: NodeJS.ReadableStream;\n\n /**\n * Whether this entry's folder supports streaming decompression.\n */\n readonly _canStream: boolean;\n\n constructor(attributes: FileAttributes, stream: NodeJS.ReadableStream, lock: Lock, canStream: boolean) {\n super(attributes);\n this.stream = stream;\n this.lock = lock;\n this.lock.retain();\n this._canStream = canStream;\n }\n\n create(dest: string, callback: NoParamCallback): void;\n create(dest: string, options: ExtractOptions, callback: NoParamCallback): void;\n create(dest: string, options?: ExtractOptions): Promise<boolean>;\n create(dest: string, options?: ExtractOptions | NoParamCallback, callback?: NoParamCallback): void | Promise<boolean> {\n callback = typeof options === 'function' ? options : callback;\n options = typeof options === 'function' ? {} : ((options || {}) as ExtractOptions);\n\n if (typeof callback === 'function') {\n return FileEntry.prototype.create.call(this, dest, options, (err?: Error) => {\n callback(err);\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n });\n }\n return new Promise((resolve, reject) =>\n this.create(dest, options, (err?: Error, done?: boolean) => {\n err ? reject(err) : resolve(done);\n })\n );\n }\n\n _writeFile(fullPath: string, _options: ExtractOptions, callback: NoParamCallback): void {\n if (!this.stream) {\n callback(new Error('7z FileEntry missing stream. Check for calling create multiple times'));\n return;\n }\n\n const stream = this.stream;\n this.stream = null; // Prevent reuse\n\n // Use once since errors can come from either stream\n const cb = once((err?: Error) => {\n err ? callback(err) : waitForAccess(fullPath, callback);\n });\n\n try {\n const writeStream = fs.createWriteStream(fullPath);\n\n // Listen for errors on source stream (errors don't propagate through pipe)\n stream.on('error', (streamErr: Error) => {\n // Destroy the write stream on source error.\n // On Node 0.8, destroy() emits 'close' before 'error'. Since on-one is listening\n // for ['error', 'close', 'finish'], it catches 'close' first, calls our callback,\n // and removes ALL listeners - including the 'error' listener. The subsequent EBADF\n // error then fires with no handler, causing an uncaught exception.\n // Adding a no-op error handler ensures there's always a listener for any error.\n const ws = writeStream as fs.WriteStream & { destroy?: () => void };\n writeStream.on('error', () => {});\n if (typeof ws.destroy === 'function') ws.destroy();\n cb(streamErr);\n });\n\n // Pipe and listen for write stream completion/errors\n stream.pipe(writeStream);\n oo(writeStream, ['error', 'close', 'finish'], cb);\n } catch (pipeErr) {\n cb(pipeErr);\n }\n }\n\n destroy() {\n FileEntry.prototype.destroy.call(this);\n if (this.stream) {\n // Use destroy() to prevent decompression (our stream has custom destroy that sets destroyed flag)\n // Fallback to resume() for older Node versions without destroy()\n const s = this.stream as NodeJS.ReadableStream & { destroy?: () => void };\n if (typeof s.destroy === 'function') {\n s.destroy();\n }\n this.stream = null;\n }\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n }\n}\n"],"names":["once","FileEntry","waitForAccess","fs","oo","SevenZipFileEntry","create","dest","options","callback","prototype","call","err","lock","release","Promise","resolve","reject","done","_writeFile","fullPath","_options","stream","Error","cb","writeStream","createWriteStream","on","streamErr","ws","destroy","pipe","pipeErr","s","attributes","canStream","retain","_canStream"],"mappings":"AAAA;;;;;CAKC,GAED,OAAOA,UAAU,eAAe;AAChC,SAA8BC,SAAS,EAAmCC,aAAa,QAAQ,wBAAwB;AACvH,OAAOC,QAAQ,KAAK;AACpB,OAAOC,QAAQ,SAAS;AAGT,IAAA,AAAMC,oBAAN,MAAMA,0BAA0BJ;IAoB7CK,OAAOC,IAAY,EAAEC,OAA0C,EAAEC,QAA0B,EAA2B;QACpHA,WAAW,OAAOD,YAAY,aAAaA,UAAUC;QACrDD,UAAU,OAAOA,YAAY,aAAa,CAAC,IAAMA,WAAW,CAAC;QAE7D,IAAI,OAAOC,aAAa,YAAY;YAClC,OAAOR,UAAUS,SAAS,CAACJ,MAAM,CAACK,IAAI,CAAC,IAAI,EAAEJ,MAAMC,SAAS,CAACI;gBAC3DH,SAASG;gBACT,IAAI,IAAI,CAACC,IAAI,EAAE;oBACb,IAAI,CAACA,IAAI,CAACC,OAAO;oBACjB,IAAI,CAACD,IAAI,GAAG;gBACd;YACF;QACF;QACA,OAAO,IAAIE,QAAQ,CAACC,SAASC,SAC3B,IAAI,CAACX,MAAM,CAACC,MAAMC,SAAS,CAACI,KAAaM;gBACvCN,MAAMK,OAAOL,OAAOI,QAAQE;YAC9B;IAEJ;IAEAC,WAAWC,QAAgB,EAAEC,QAAwB,EAAEZ,QAAyB,EAAQ;QACtF,IAAI,CAAC,IAAI,CAACa,MAAM,EAAE;YAChBb,SAAS,IAAIc,MAAM;YACnB;QACF;QAEA,MAAMD,SAAS,IAAI,CAACA,MAAM;QAC1B,IAAI,CAACA,MAAM,GAAG,MAAM,gBAAgB;QAEpC,oDAAoD;QACpD,MAAME,KAAKxB,KAAK,CAACY;YACfA,MAAMH,SAASG,OAAOV,cAAckB,UAAUX;QAChD;QAEA,IAAI;YACF,MAAMgB,cAActB,GAAGuB,iBAAiB,CAACN;YAEzC,2EAA2E;YAC3EE,OAAOK,EAAE,CAAC,SAAS,CAACC;gBAClB,4CAA4C;gBAC5C,iFAAiF;gBACjF,kFAAkF;gBAClF,mFAAmF;gBACnF,mEAAmE;gBACnE,gFAAgF;gBAChF,MAAMC,KAAKJ;gBACXA,YAAYE,EAAE,CAAC,SAAS,KAAO;gBAC/B,IAAI,OAAOE,GAAGC,OAAO,KAAK,YAAYD,GAAGC,OAAO;gBAChDN,GAAGI;YACL;YAEA,qDAAqD;YACrDN,OAAOS,IAAI,CAACN;YACZrB,GAAGqB,aAAa;gBAAC;gBAAS;gBAAS;aAAS,EAAED;QAChD,EAAE,OAAOQ,SAAS;YAChBR,GAAGQ;QACL;IACF;IAEAF,UAAU;QACR7B,UAAUS,SAAS,CAACoB,OAAO,CAACnB,IAAI,CAAC,IAAI;QACrC,IAAI,IAAI,CAACW,MAAM,EAAE;YACf,kGAAkG;YAClG,iEAAiE;YACjE,MAAMW,IAAI,IAAI,CAACX,MAAM;YACrB,IAAI,OAAOW,EAAEH,OAAO,KAAK,YAAY;gBACnCG,EAAEH,OAAO;YACX;YACA,IAAI,CAACR,MAAM,GAAG;QAChB;QACA,IAAI,IAAI,CAACT,IAAI,EAAE;YACb,IAAI,CAACA,IAAI,CAACC,OAAO;YACjB,IAAI,CAACD,IAAI,GAAG;QACd;IACF;IArFA,YAAYqB,UAA0B,EAAEZ,MAA6B,EAAET,IAAU,EAAEsB,SAAkB,CAAE;QACrG,KAAK,CAACD;QACN,IAAI,CAACZ,MAAM,GAAGA;QACd,IAAI,CAACT,IAAI,GAAGA;QACZ,IAAI,CAACA,IAAI,CAACuB,MAAM;QAChB,IAAI,CAACC,UAAU,GAAGF;IACpB;AAgFF;AA/FA,SAAqB9B,+BA+FpB"}
@@ -1,8 +1,31 @@
1
1
  import BaseIterator, { Lock } from 'extract-base-iterator';
2
- import type { Entry, ExtractOptions, SevenZipFileIterator } from './types.js';
2
+ import { type SevenZipEntry } from './sevenz/SevenZipParser.js';
3
+ import type { Entry, ExtractOptions } from './types.js';
3
4
  export default class SevenZipIterator extends BaseIterator<Entry> {
4
5
  lock: Lock | null;
5
- iterator: SevenZipFileIterator;
6
+ /** @internal - Do not use directly */
7
+ _iterator: unknown;
6
8
  constructor(source: string | NodeJS.ReadableStream, options?: ExtractOptions);
7
9
  end(err?: Error): void;
10
+ /**
11
+ * Check if streaming extraction is available for any folder in this archive.
12
+ * Streaming is possible when folders use codecs like BZip2, Deflate, or Copy
13
+ * that can decompress incrementally without buffering the entire input.
14
+ *
15
+ * @returns true if at least one folder supports streaming
16
+ */
17
+ canStream(): boolean;
18
+ /**
19
+ * Get entries sorted for optimal streaming extraction.
20
+ *
21
+ * Entries are sorted by:
22
+ * 1. Folder index (process one folder at a time)
23
+ * 2. Stream index within folder (for solid block streaming)
24
+ *
25
+ * This ordering allows multi-file solid folders to stream with
26
+ * O(largest file) memory instead of O(folder size).
27
+ *
28
+ * @returns Array of entries in streaming order
29
+ */
30
+ getStreamingOrder(): SevenZipEntry[];
8
31
  }