7z-iterator 0.1.5 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/sevenz/SevenZipParser.d.cts +4 -1
- package/dist/cjs/sevenz/SevenZipParser.d.ts +4 -1
- package/dist/cjs/sevenz/SevenZipParser.js +45 -7
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Bcj2.js +17 -4
- package/dist/cjs/sevenz/codecs/Bcj2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma.js +2 -1
- package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma2.d.cts +2 -2
- package/dist/cjs/sevenz/codecs/Lzma2.d.ts +2 -2
- package/dist/cjs/sevenz/codecs/Lzma2.js +30 -6
- package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/streams.d.cts +7 -1
- package/dist/cjs/sevenz/codecs/streams.d.ts +7 -1
- package/dist/cjs/sevenz/codecs/streams.js +47 -17
- package/dist/cjs/sevenz/codecs/streams.js.map +1 -1
- package/dist/cjs/sevenz/constants.d.cts +1 -0
- package/dist/cjs/sevenz/constants.d.ts +1 -0
- package/dist/cjs/sevenz/constants.js +2 -1
- package/dist/cjs/sevenz/constants.js.map +1 -1
- package/dist/esm/sevenz/SevenZipParser.d.ts +4 -1
- package/dist/esm/sevenz/SevenZipParser.js +45 -7
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/Bcj2.js +17 -4
- package/dist/esm/sevenz/codecs/Bcj2.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma.js +2 -1
- package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma2.d.ts +2 -2
- package/dist/esm/sevenz/codecs/Lzma2.js +31 -7
- package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/esm/sevenz/codecs/streams.d.ts +7 -1
- package/dist/esm/sevenz/codecs/streams.js +53 -17
- package/dist/esm/sevenz/codecs/streams.js.map +1 -1
- package/dist/esm/sevenz/constants.d.ts +1 -0
- package/dist/esm/sevenz/constants.js +2 -1
- package/dist/esm/sevenz/constants.js.map +1 -1
- package/package.json +4 -2
- package/patches/lzma-purejs+0.9.3.patch +196 -0
|
@@ -32,6 +32,7 @@ _export(exports, {
|
|
|
32
32
|
return decodeLzma2;
|
|
33
33
|
}
|
|
34
34
|
});
|
|
35
|
+
var _extractbaseiterator = require("extract-base-iterator");
|
|
35
36
|
var _lzmapurejs = /*#__PURE__*/ _interop_require_default(require("lzma-purejs"));
|
|
36
37
|
var _createBufferingDecoderts = /*#__PURE__*/ _interop_require_default(require("./createBufferingDecoder.js"));
|
|
37
38
|
var _streamsts = require("./streams.js");
|
|
@@ -64,12 +65,19 @@ var LzmaDecoder = _lzmapurejs.default.LZMA.Decoder;
|
|
|
64
65
|
var shift = Math.floor(propByte / 2) + 11;
|
|
65
66
|
return base << shift;
|
|
66
67
|
}
|
|
67
|
-
function decodeLzma2(input, properties,
|
|
68
|
+
function decodeLzma2(input, properties, unpackSize) {
|
|
68
69
|
if (!properties || properties.length < 1) {
|
|
69
70
|
throw new Error('LZMA2 requires properties byte');
|
|
70
71
|
}
|
|
71
72
|
var dictSize = decodeDictionarySize(properties[0]);
|
|
72
|
-
|
|
73
|
+
// Memory optimization: pre-allocate output buffer if size is known
|
|
74
|
+
// This avoids double-memory during Buffer.concat
|
|
75
|
+
var outputBuffer = null;
|
|
76
|
+
var outputPos = 0;
|
|
77
|
+
var outputChunks = [];
|
|
78
|
+
if (unpackSize && unpackSize > 0) {
|
|
79
|
+
outputBuffer = (0, _extractbaseiterator.allocBufferUnsafe)(unpackSize);
|
|
80
|
+
}
|
|
73
81
|
var offset = 0;
|
|
74
82
|
// LZMA decoder instance - reused across chunks
|
|
75
83
|
// The decoder is patched via patch-package to support setSolid() for LZMA2 state preservation
|
|
@@ -107,7 +115,12 @@ function decodeLzma2(input, properties, _unpackSize) {
|
|
|
107
115
|
// Get the uncompressed data
|
|
108
116
|
var uncompData = input.slice(offset, offset + uncompSize);
|
|
109
117
|
// Copy uncompressed data to output
|
|
110
|
-
|
|
118
|
+
if (outputBuffer) {
|
|
119
|
+
uncompData.copy(outputBuffer, outputPos);
|
|
120
|
+
outputPos += uncompData.length;
|
|
121
|
+
} else {
|
|
122
|
+
outputChunks === null || outputChunks === void 0 ? void 0 : outputChunks.push(uncompData);
|
|
123
|
+
}
|
|
111
124
|
// Also update the decoder's internal dictionary so subsequent LZMA chunks can reference it
|
|
112
125
|
// The decoder needs to track this data for LZ77 back-references
|
|
113
126
|
// We write directly to _buffer to avoid flush() which requires _stream to be set
|
|
@@ -182,7 +195,7 @@ function decodeLzma2(input, properties, _unpackSize) {
|
|
|
182
195
|
}
|
|
183
196
|
// Decode LZMA chunk
|
|
184
197
|
var inStream = (0, _streamsts.createInputStream)(input, offset, compSize);
|
|
185
|
-
var outStream = (0, _streamsts.createOutputStream)();
|
|
198
|
+
var outStream = (0, _streamsts.createOutputStream)(uncompSize2); // Pre-allocate for memory efficiency
|
|
186
199
|
// Set solid mode based on control byte - this preserves state across code() calls
|
|
187
200
|
decoder.setSolid(useSolidMode);
|
|
188
201
|
// Decode the chunk
|
|
@@ -190,13 +203,24 @@ function decodeLzma2(input, properties, _unpackSize) {
|
|
|
190
203
|
if (!success) {
|
|
191
204
|
throw new Error('LZMA decompression failed');
|
|
192
205
|
}
|
|
193
|
-
|
|
206
|
+
var chunkOutput = outStream.toBuffer();
|
|
207
|
+
if (outputBuffer) {
|
|
208
|
+
chunkOutput.copy(outputBuffer, outputPos);
|
|
209
|
+
outputPos += chunkOutput.length;
|
|
210
|
+
} else {
|
|
211
|
+
outputChunks === null || outputChunks === void 0 ? void 0 : outputChunks.push(chunkOutput);
|
|
212
|
+
}
|
|
194
213
|
offset += compSize;
|
|
195
214
|
} else {
|
|
196
215
|
throw new Error("Invalid LZMA2 control byte: 0x".concat(control.toString(16)));
|
|
197
216
|
}
|
|
198
217
|
}
|
|
199
|
-
|
|
218
|
+
// Return pre-allocated buffer or concatenated chunks
|
|
219
|
+
if (outputBuffer) {
|
|
220
|
+
// Return only the used portion if we didn't fill the buffer
|
|
221
|
+
return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;
|
|
222
|
+
}
|
|
223
|
+
return Buffer.concat(outputChunks);
|
|
200
224
|
}
|
|
201
225
|
function createLzma2Decoder(properties, unpackSize) {
|
|
202
226
|
return (0, _createBufferingDecoderts.default)(decodeLzma2, properties, unpackSize);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Lzma2.ts"],"sourcesContent":["// LZMA2 codec - wrapper around lzma-purejs for LZMA2 decompression\n// LZMA2 is a container format that wraps LZMA chunks with framing\n//\n// LZMA2 format specification:\n// https://github.com/ulikunitz/xz/blob/master/doc/LZMA2.md\n//\n// Control byte values:\n// 0x00 = End of stream\n// 0x01 = Uncompressed chunk, dictionary reset\n// 0x02 = Uncompressed chunk, no dictionary reset\n// 0x80-0xFF = LZMA compressed chunk (bits encode reset flags and size)\n//\n// Note: lzma-purejs is patched via patch-package to support LZMA2 state preservation.\n// The patch adds setSolid(true/false) method to control whether state is preserved\n// across code() calls.\n\n// Import lzma-purejs - provides raw LZMA decoder (patched for LZMA2 support)\nimport lzmajs from 'lzma-purejs';\nimport type { Transform } from 'readable-stream';\nimport createBufferingDecoder from './createBufferingDecoder.ts';\nimport { createInputStream, createOutputStream } from './streams.ts';\n\nvar LzmaDecoder = lzmajs.LZMA.Decoder;\n\n/**\n * Decode LZMA2 dictionary size from properties byte\n * Properties byte encodes dictionary size as: 2^(dictByte/2 + 12) or similar\n *\n * Per XZ spec, dictionary sizes are:\n * 0x00 = 4 KiB (2^12)\n * 0x01 = 6 KiB\n * 0x02 = 8 KiB (2^13)\n * ...\n * 0x28 = 1.5 GiB\n */\nfunction decodeDictionarySize(propByte: number): number {\n if (propByte > 40) {\n throw new Error(`Invalid LZMA2 dictionary size property: ${propByte}`);\n }\n if (propByte === 40) {\n // Max dictionary size: 4 GiB - 1\n return 0xffffffff;\n }\n // Dictionary size = 2 | (propByte & 1) << (propByte / 2 + 11)\n var base = 2 | (propByte & 1);\n var shift = Math.floor(propByte / 2) + 11;\n return base << shift;\n}\n\n/**\n * Decode LZMA2 compressed data to buffer\n *\n * @param input - LZMA2 compressed data\n * @param properties - Properties buffer (1 byte: dictionary size)\n * @param _unpackSize - Unused (LZMA2 has internal size markers)\n * @returns Decompressed data\n */\nexport function decodeLzma2(input: Buffer, properties?: Buffer, _unpackSize?: number): Buffer {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n var dictSize = decodeDictionarySize(properties[0]);\n var output: Buffer[] = [];\n var offset = 0;\n\n // LZMA decoder instance - reused across chunks\n // The decoder is patched via patch-package to support setSolid() for LZMA2 state preservation\n // The decoder also has _nowPos64 which tracks cumulative position for rep0 validation\n // and _prevByte which is used for literal decoder context selection\n var decoder = new LzmaDecoder() as InstanceType<typeof LzmaDecoder> & {\n setSolid: (solid: boolean) => void;\n _nowPos64: number;\n _prevByte: number;\n };\n decoder.setDictionarySize(dictSize);\n\n // Access internal _outWindow for dictionary management\n // We need to preserve dictionary state across LZMA2 chunks\n type OutWindowType = {\n _buffer: Buffer;\n _pos: number;\n _streamPos: number;\n _windowSize: number;\n init: (solid: boolean) => void;\n };\n var outWindow = (decoder as unknown as { _outWindow: OutWindowType })._outWindow;\n\n // Track current LZMA properties (lc, lp, pb)\n var propsSet = false;\n\n while (offset < input.length) {\n var control = input[offset++];\n\n if (control === 0x00) {\n // End of LZMA2 stream\n break;\n }\n\n if (control === 0x01 || control === 0x02) {\n // Uncompressed chunk\n // 0x01 = dictionary reset + uncompressed\n // 0x02 = uncompressed (no reset)\n\n // Handle dictionary reset for 0x01\n if (control === 0x01) {\n outWindow._pos = 0;\n outWindow._streamPos = 0;\n decoder._nowPos64 = 0;\n }\n\n if (offset + 2 > input.length) {\n throw new Error('Truncated LZMA2 uncompressed chunk header');\n }\n\n // Size is big-endian, 16-bit, value + 1\n var uncompSize = ((input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n if (offset + uncompSize > input.length) {\n throw new Error('Truncated LZMA2 uncompressed data');\n }\n\n // Get the uncompressed data\n var uncompData = input.slice(offset, offset + uncompSize);\n\n // Copy uncompressed data to output\n output.push(uncompData);\n\n // Also update the decoder's internal dictionary so subsequent LZMA chunks can reference it\n // The decoder needs to track this data for LZ77 back-references\n // We write directly to _buffer to avoid flush() which requires _stream to be set\n // We must also update _streamPos to match _pos so that flush() doesn't try to write\n for (var i = 0; i < uncompData.length; i++) {\n outWindow._buffer[outWindow._pos++] = uncompData[i];\n // Handle circular buffer wrap-around\n if (outWindow._pos >= outWindow._windowSize) {\n outWindow._pos = 0;\n }\n }\n // Keep _streamPos in sync so flush() doesn't try to write these bytes\n // (they're already in our output buffer)\n outWindow._streamPos = outWindow._pos;\n\n // Update decoder's cumulative position so subsequent LZMA chunks have correct rep0 validation\n decoder._nowPos64 += uncompSize;\n\n // Update prevByte for literal decoder context in subsequent LZMA chunks\n decoder._prevByte = uncompData[uncompData.length - 1];\n\n offset += uncompSize;\n } else if (control >= 0x80) {\n // LZMA compressed chunk\n // Control byte format (bits 7-0):\n // Bit 7: always 1 for LZMA chunk\n // Bits 6-5: reset mode (00=nothing, 01=state, 10=state+props, 11=all)\n // Bits 4-0: high 5 bits of uncompressed size - 1\n\n // Control byte ranges (based on bits 6-5):\n // 0x80-0x9F (00): no reset - continue existing state (solid mode)\n // 0xA0-0xBF (01): reset state only\n // 0xC0-0xDF (10): reset state + new properties\n // 0xE0-0xFF (11): reset dictionary + state + new properties\n var resetState = control >= 0xa0;\n var newProps = control >= 0xc0;\n var dictReset = control >= 0xe0;\n var useSolidMode = !resetState;\n\n // Handle dictionary reset for control bytes 0xE0-0xFF\n if (dictReset) {\n outWindow._pos = 0;\n outWindow._streamPos = 0;\n }\n\n if (offset + 4 > input.length) {\n throw new Error('Truncated LZMA2 LZMA chunk header');\n }\n\n // Uncompressed size: 5 bits from control + 16 bits from next 2 bytes + 1\n var uncompHigh = control & 0x1f;\n var uncompSize2 = ((uncompHigh << 16) | (input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n // Compressed size: 16 bits + 1\n var compSize = ((input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n // If new properties, read 1-byte LZMA properties\n if (newProps) {\n if (offset >= input.length) {\n throw new Error('Truncated LZMA2 properties byte');\n }\n var propsByte = input[offset++];\n\n // Properties byte: pb * 45 + lp * 9 + lc\n // where pb, lp, lc are LZMA parameters\n var lc = propsByte % 9;\n var remainder = Math.floor(propsByte / 9);\n var lp = remainder % 5;\n var pb = Math.floor(remainder / 5);\n\n if (!decoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n propsSet = true;\n }\n\n if (!propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n if (offset + compSize > input.length) {\n throw new Error('Truncated LZMA2 compressed data');\n }\n\n // Decode LZMA chunk\n var inStream = createInputStream(input, offset, compSize);\n var outStream = createOutputStream();\n\n // Set solid mode based on control byte - this preserves state across code() calls\n decoder.setSolid(useSolidMode);\n\n // Decode the chunk\n var success = decoder.code(inStream, outStream, uncompSize2);\n if (!success) {\n throw new Error('LZMA decompression failed');\n }\n\n output.push(outStream.toBuffer());\n\n offset += compSize;\n } else {\n throw new Error(`Invalid LZMA2 control byte: 0x${control.toString(16)}`);\n }\n }\n\n return Buffer.concat(output);\n}\n\n/**\n * Create an LZMA2 decoder Transform stream\n */\nexport function createLzma2Decoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeLzma2, properties, unpackSize);\n}\n"],"names":["createLzma2Decoder","decodeLzma2","LzmaDecoder","lzmajs","LZMA","Decoder","decodeDictionarySize","propByte","Error","base","shift","Math","floor","input","properties","_unpackSize","length","dictSize","output","offset","decoder","setDictionarySize","outWindow","_outWindow","propsSet","control","_pos","_streamPos","_nowPos64","uncompSize","uncompData","slice","push","i","_buffer","_windowSize","_prevByte","resetState","newProps","dictReset","useSolidMode","uncompHigh","uncompSize2","compSize","propsByte","lc","remainder","lp","pb","setLcLpPb","inStream","createInputStream","outStream","createOutputStream","setSolid","success","code","toBuffer","toString","Buffer","concat","unpackSize","createBufferingDecoder"],"mappings":"AAAA,mEAAmE;AACnE,kEAAkE;AAClE,EAAE;AACF,8BAA8B;AAC9B,2DAA2D;AAC3D,EAAE;AACF,uBAAuB;AACvB,+BAA+B;AAC/B,sDAAsD;AACtD,yDAAyD;AACzD,0EAA0E;AAC1E,EAAE;AACF,sFAAsF;AACtF,mFAAmF;AACnF,uBAAuB;AAEvB,6EAA6E;;;;;;;;;;;;QAkO7DA;eAAAA;;QAzLAC;eAAAA;;;iEAxCG;+EAEgB;yBACmB;;;;;;AAEtD,IAAIC,cAAcC,mBAAM,CAACC,IAAI,CAACC,OAAO;AAErC;;;;;;;;;;CAUC,GACD,SAASC,qBAAqBC,QAAgB;IAC5C,IAAIA,WAAW,IAAI;QACjB,MAAM,IAAIC,MAAM,AAAC,2CAAmD,OAATD;IAC7D;IACA,IAAIA,aAAa,IAAI;QACnB,iCAAiC;QACjC,OAAO;IACT;IACA,8DAA8D;IAC9D,IAAIE,OAAO,IAAKF,WAAW;IAC3B,IAAIG,QAAQC,KAAKC,KAAK,CAACL,WAAW,KAAK;IACvC,OAAOE,QAAQC;AACjB;AAUO,SAAST,YAAYY,KAAa,EAAEC,UAAmB,EAAEC,WAAoB;IAClF,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIR,MAAM;IAClB;IAEA,IAAIS,WAAWX,qBAAqBQ,UAAU,CAAC,EAAE;IACjD,IAAII,SAAmB,EAAE;IACzB,IAAIC,SAAS;IAEb,+CAA+C;IAC/C,8FAA8F;IAC9F,sFAAsF;IACtF,oEAAoE;IACpE,IAAIC,UAAU,IAAIlB;IAKlBkB,QAAQC,iBAAiB,CAACJ;IAW1B,IAAIK,YAAY,AAACF,QAAqDG,UAAU;IAEhF,6CAA6C;IAC7C,IAAIC,WAAW;IAEf,MAAOL,SAASN,MAAMG,MAAM,CAAE;QAC5B,IAAIS,UAAUZ,KAAK,CAACM,SAAS;QAE7B,IAAIM,YAAY,MAAM;YAEpB;QACF;QAEA,IAAIA,YAAY,QAAQA,YAAY,MAAM;YACxC,qBAAqB;YACrB,yCAAyC;YACzC,iCAAiC;YAEjC,mCAAmC;YACnC,IAAIA,YAAY,MAAM;gBACpBH,UAAUI,IAAI,GAAG;gBACjBJ,UAAUK,UAAU,GAAG;gBACvBP,QAAQQ,SAAS,GAAG;YACtB;YAEA,IAAIT,SAAS,IAAIN,MAAMG,MAAM,EAAE;gBAC7B,MAAM,IAAIR,MAAM;YAClB;YAEA,wCAAwC;YACxC,IAAIqB,aAAa,AAAC,CAAA,AAAChB,KAAK,CAACM,OAAO,IAAI,IAAKN,KAAK,CAACM,SAAS,EAAE,AAAD,IAAK;YAC9DA,UAAU;YAEV,IAAIA,SAASU,aAAahB,MAAMG,MAAM,EAAE;gBACtC,MAAM,IAAIR,MAAM;YAClB;YAEA,4BAA4B;YAC5B,IAAIsB,aAAajB,MAAMkB,KAAK,CAACZ,QAAQA,SAASU;YAE9C,mCAAmC;YACnCX,OAAOc,IAAI,CAACF;YAEZ,2FAA2F;YAC3F,gEAAgE;YAChE,iFAAiF;YACjF,oFAAoF;YACpF,IAAK,IAAIG,IAAI,GAAGA,IAAIH,WAAWd,MAAM,EAAEiB,IAAK;gBAC1CX,UAAUY,OAAO,CAACZ,UAAUI,IAAI,GAAG,GAAGI,UAAU,CAACG,EAAE;gBACnD,qCAAqC;gBACrC,IAAIX,UAAUI,IAAI,IAAIJ,UAAUa,WAAW,EAAE;oBAC3Cb,UAAUI,IAAI,GAAG;gBACnB;YACF;YACA,sEAAsE;YACtE,yCAAyC;YACzCJ,UAAUK,UAAU,GAAGL,UAAUI,IAAI;YAErC,8FAA8F;YAC9FN,QAAQQ,SAAS,IAAIC;YAErB,wEAAwE;YACxET,QAAQgB,SAAS,GAAGN,UAAU,CAACA,WAAWd,MAAM,GAAG,EAAE;YAErDG,UAAUU;QACZ,OAAO,IAAIJ,WAAW,MAAM;YAC1B,wBAAwB;YACxB,kCAAkC;YAClC,iCAAiC;YACjC,sEAAsE;YACtE,iDAAiD;YAEjD,2CAA2C;YAC3C,kEAAkE;YAClE,mCAAmC;YACnC,+CAA+C;YAC/C,4DAA4D;YAC5D,IAAIY,aAAaZ,WAAW;YAC5B,IAAIa,WAAWb,WAAW;YAC1B,IAAIc,YAAYd,WAAW;YAC3B,IAAIe,eAAe,CAACH;YAEpB,sDAAsD;YACtD,IAAIE,WAAW;gBACbjB,UAAUI,IAAI,GAAG;gBACjBJ,UAAUK,UAAU,GAAG;YACzB;YAEA,IAAIR,SAAS,IAAIN,MAAMG,MAAM,EAAE;gBAC7B,MAAM,IAAIR,MAAM;YAClB;YAEA,yEAAyE;YACzE,IAAIiC,aAAahB,UAAU;YAC3B,IAAIiB,cAAc,AAAC,CAAA,AAACD,cAAc,KAAO5B,KAAK,CAACM,OAAO,IAAI,IAAKN,KAAK,CAACM,SAAS,EAAE,AAAD,IAAK;YACpFA,UAAU;YAEV,+BAA+B;YAC/B,IAAIwB,WAAW,AAAC,CAAA,AAAC9B,KAAK,CAACM,OAAO,IAAI,IAAKN,KAAK,CAACM,SAAS,EAAE,AAAD,IAAK;YAC5DA,UAAU;YAEV,iDAAiD;YACjD,IAAImB,UAAU;gBACZ,IAAInB,UAAUN,MAAMG,MAAM,EAAE;oBAC1B,MAAM,IAAIR,MAAM;gBAClB;gBACA,IAAIoC,YAAY/B,KAAK,CAACM,SAAS;gBAE/B,yCAAyC;gBACzC,uCAAuC;gBACvC,IAAI0B,KAAKD,YAAY;gBACrB,IAAIE,YAAYnC,KAAKC,KAAK,CAACgC,YAAY;gBACvC,IAAIG,KAAKD,YAAY;gBACrB,IAAIE,KAAKrC,KAAKC,KAAK,CAACkC,YAAY;gBAEhC,IAAI,CAAC1B,QAAQ6B,SAAS,CAACJ,IAAIE,IAAIC,KAAK;oBAClC,MAAM,IAAIxC,MAAM,AAAC,+BAAuCuC,OAATF,IAAG,QAAeG,OAATD,IAAG,QAAS,OAAHC;gBACnE;gBACAxB,WAAW;YACb;YAEA,IAAI,CAACA,UAAU;gBACb,MAAM,IAAIhB,MAAM;YAClB;YAEA,IAAIW,SAASwB,WAAW9B,MAAMG,MAAM,EAAE;gBACpC,MAAM,IAAIR,MAAM;YAClB;YAEA,oBAAoB;YACpB,IAAI0C,WAAWC,IAAAA,4BAAiB,EAACtC,OAAOM,QAAQwB;YAChD,IAAIS,YAAYC,IAAAA,6BAAkB;YAElC,kFAAkF;YAClFjC,QAAQkC,QAAQ,CAACd;YAEjB,mBAAmB;YACnB,IAAIe,UAAUnC,QAAQoC,IAAI,CAACN,UAAUE,WAAWV;YAChD,IAAI,CAACa,SAAS;gBACZ,MAAM,IAAI/C,MAAM;YAClB;YAEAU,OAAOc,IAAI,CAACoB,UAAUK,QAAQ;YAE9BtC,UAAUwB;QACZ,OAAO;YACL,MAAM,IAAInC,MAAM,AAAC,iCAAqD,OAArBiB,QAAQiC,QAAQ,CAAC;QACpE;IACF;IAEA,OAAOC,OAAOC,MAAM,CAAC1C;AACvB;AAKO,SAASlB,mBAAmBc,UAAmB,EAAE+C,UAAmB;IACzE,OAAOC,IAAAA,iCAAsB,EAAC7D,aAAaa,YAAY+C;AACzD"}
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Lzma2.ts"],"sourcesContent":["// LZMA2 codec - wrapper around lzma-purejs for LZMA2 decompression\n// LZMA2 is a container format that wraps LZMA chunks with framing\n//\n// LZMA2 format specification:\n// https://github.com/ulikunitz/xz/blob/master/doc/LZMA2.md\n//\n// Control byte values:\n// 0x00 = End of stream\n// 0x01 = Uncompressed chunk, dictionary reset\n// 0x02 = Uncompressed chunk, no dictionary reset\n// 0x80-0xFF = LZMA compressed chunk (bits encode reset flags and size)\n//\n// Note: lzma-purejs is patched via patch-package to support LZMA2 state preservation.\n// The patch adds setSolid(true/false) method to control whether state is preserved\n// across code() calls.\n\n// Import lzma-purejs - provides raw LZMA decoder (patched for LZMA2 support)\nimport { allocBufferUnsafe } from 'extract-base-iterator';\nimport lzmajs from 'lzma-purejs';\nimport type { Transform } from 'readable-stream';\nimport createBufferingDecoder from './createBufferingDecoder.ts';\nimport { createInputStream, createOutputStream } from './streams.ts';\n\nvar LzmaDecoder = lzmajs.LZMA.Decoder;\n\n/**\n * Decode LZMA2 dictionary size from properties byte\n * Properties byte encodes dictionary size as: 2^(dictByte/2 + 12) or similar\n *\n * Per XZ spec, dictionary sizes are:\n * 0x00 = 4 KiB (2^12)\n * 0x01 = 6 KiB\n * 0x02 = 8 KiB (2^13)\n * ...\n * 0x28 = 1.5 GiB\n */\nfunction decodeDictionarySize(propByte: number): number {\n if (propByte > 40) {\n throw new Error(`Invalid LZMA2 dictionary size property: ${propByte}`);\n }\n if (propByte === 40) {\n // Max dictionary size: 4 GiB - 1\n return 0xffffffff;\n }\n // Dictionary size = 2 | (propByte & 1) << (propByte / 2 + 11)\n var base = 2 | (propByte & 1);\n var shift = Math.floor(propByte / 2) + 11;\n return base << shift;\n}\n\n/**\n * Decode LZMA2 compressed data to buffer\n *\n * @param input - LZMA2 compressed data\n * @param properties - Properties buffer (1 byte: dictionary size)\n * @param unpackSize - Expected output size (used for pre-allocation to reduce memory)\n * @returns Decompressed data\n */\nexport function decodeLzma2(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n var dictSize = decodeDictionarySize(properties[0]);\n\n // Memory optimization: pre-allocate output buffer if size is known\n // This avoids double-memory during Buffer.concat\n var outputBuffer: Buffer | null = null;\n var outputPos = 0;\n var outputChunks: Buffer[] = [];\n\n if (unpackSize && unpackSize > 0) {\n outputBuffer = allocBufferUnsafe(unpackSize);\n }\n\n var offset = 0;\n\n // LZMA decoder instance - reused across chunks\n // The decoder is patched via patch-package to support setSolid() for LZMA2 state preservation\n // The decoder also has _nowPos64 which tracks cumulative position for rep0 validation\n // and _prevByte which is used for literal decoder context selection\n var decoder = new LzmaDecoder() as InstanceType<typeof LzmaDecoder> & {\n setSolid: (solid: boolean) => void;\n _nowPos64: number;\n _prevByte: number;\n };\n decoder.setDictionarySize(dictSize);\n\n // Access internal _outWindow for dictionary management\n // We need to preserve dictionary state across LZMA2 chunks\n type OutWindowType = {\n _buffer: Buffer;\n _pos: number;\n _streamPos: number;\n _windowSize: number;\n init: (solid: boolean) => void;\n };\n var outWindow = (decoder as unknown as { _outWindow: OutWindowType })._outWindow;\n\n // Track current LZMA properties (lc, lp, pb)\n var propsSet = false;\n\n while (offset < input.length) {\n var control = input[offset++];\n\n if (control === 0x00) {\n // End of LZMA2 stream\n break;\n }\n\n if (control === 0x01 || control === 0x02) {\n // Uncompressed chunk\n // 0x01 = dictionary reset + uncompressed\n // 0x02 = uncompressed (no reset)\n\n // Handle dictionary reset for 0x01\n if (control === 0x01) {\n outWindow._pos = 0;\n outWindow._streamPos = 0;\n decoder._nowPos64 = 0;\n }\n\n if (offset + 2 > input.length) {\n throw new Error('Truncated LZMA2 uncompressed chunk header');\n }\n\n // Size is big-endian, 16-bit, value + 1\n var uncompSize = ((input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n if (offset + uncompSize > input.length) {\n throw new Error('Truncated LZMA2 uncompressed data');\n }\n\n // Get the uncompressed data\n var uncompData = input.slice(offset, offset + uncompSize);\n\n // Copy uncompressed data to output\n if (outputBuffer) {\n uncompData.copy(outputBuffer, outputPos);\n outputPos += uncompData.length;\n } else {\n outputChunks?.push(uncompData);\n }\n\n // Also update the decoder's internal dictionary so subsequent LZMA chunks can reference it\n // The decoder needs to track this data for LZ77 back-references\n // We write directly to _buffer to avoid flush() which requires _stream to be set\n // We must also update _streamPos to match _pos so that flush() doesn't try to write\n for (var i = 0; i < uncompData.length; i++) {\n outWindow._buffer[outWindow._pos++] = uncompData[i];\n // Handle circular buffer wrap-around\n if (outWindow._pos >= outWindow._windowSize) {\n outWindow._pos = 0;\n }\n }\n // Keep _streamPos in sync so flush() doesn't try to write these bytes\n // (they're already in our output buffer)\n outWindow._streamPos = outWindow._pos;\n\n // Update decoder's cumulative position so subsequent LZMA chunks have correct rep0 validation\n decoder._nowPos64 += uncompSize;\n\n // Update prevByte for literal decoder context in subsequent LZMA chunks\n decoder._prevByte = uncompData[uncompData.length - 1];\n\n offset += uncompSize;\n } else if (control >= 0x80) {\n // LZMA compressed chunk\n // Control byte format (bits 7-0):\n // Bit 7: always 1 for LZMA chunk\n // Bits 6-5: reset mode (00=nothing, 01=state, 10=state+props, 11=all)\n // Bits 4-0: high 5 bits of uncompressed size - 1\n\n // Control byte ranges (based on bits 6-5):\n // 0x80-0x9F (00): no reset - continue existing state (solid mode)\n // 0xA0-0xBF (01): reset state only\n // 0xC0-0xDF (10): reset state + new properties\n // 0xE0-0xFF (11): reset dictionary + state + new properties\n var resetState = control >= 0xa0;\n var newProps = control >= 0xc0;\n var dictReset = control >= 0xe0;\n var useSolidMode = !resetState;\n\n // Handle dictionary reset for control bytes 0xE0-0xFF\n if (dictReset) {\n outWindow._pos = 0;\n outWindow._streamPos = 0;\n }\n\n if (offset + 4 > input.length) {\n throw new Error('Truncated LZMA2 LZMA chunk header');\n }\n\n // Uncompressed size: 5 bits from control + 16 bits from next 2 bytes + 1\n var uncompHigh = control & 0x1f;\n var uncompSize2 = ((uncompHigh << 16) | (input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n // Compressed size: 16 bits + 1\n var compSize = ((input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n // If new properties, read 1-byte LZMA properties\n if (newProps) {\n if (offset >= input.length) {\n throw new Error('Truncated LZMA2 properties byte');\n }\n var propsByte = input[offset++];\n\n // Properties byte: pb * 45 + lp * 9 + lc\n // where pb, lp, lc are LZMA parameters\n var lc = propsByte % 9;\n var remainder = Math.floor(propsByte / 9);\n var lp = remainder % 5;\n var pb = Math.floor(remainder / 5);\n\n if (!decoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n propsSet = true;\n }\n\n if (!propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n if (offset + compSize > input.length) {\n throw new Error('Truncated LZMA2 compressed data');\n }\n\n // Decode LZMA chunk\n var inStream = createInputStream(input, offset, compSize);\n var outStream = createOutputStream(uncompSize2); // Pre-allocate for memory efficiency\n\n // Set solid mode based on control byte - this preserves state across code() calls\n decoder.setSolid(useSolidMode);\n\n // Decode the chunk\n var success = decoder.code(inStream, outStream, uncompSize2);\n if (!success) {\n throw new Error('LZMA decompression failed');\n }\n\n var chunkOutput = outStream.toBuffer();\n if (outputBuffer) {\n chunkOutput.copy(outputBuffer, outputPos);\n outputPos += chunkOutput.length;\n } else {\n outputChunks?.push(chunkOutput);\n }\n\n offset += compSize;\n } else {\n throw new Error(`Invalid LZMA2 control byte: 0x${control.toString(16)}`);\n }\n }\n\n // Return pre-allocated buffer or concatenated chunks\n if (outputBuffer) {\n // Return only the used portion if we didn't fill the buffer\n return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;\n }\n return Buffer.concat(outputChunks);\n}\n\n/**\n * Create an LZMA2 decoder Transform stream\n */\nexport function createLzma2Decoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeLzma2, properties, unpackSize);\n}\n"],"names":["createLzma2Decoder","decodeLzma2","LzmaDecoder","lzmajs","LZMA","Decoder","decodeDictionarySize","propByte","Error","base","shift","Math","floor","input","properties","unpackSize","length","dictSize","outputBuffer","outputPos","outputChunks","allocBufferUnsafe","offset","decoder","setDictionarySize","outWindow","_outWindow","propsSet","control","_pos","_streamPos","_nowPos64","uncompSize","uncompData","slice","copy","push","i","_buffer","_windowSize","_prevByte","resetState","newProps","dictReset","useSolidMode","uncompHigh","uncompSize2","compSize","propsByte","lc","remainder","lp","pb","setLcLpPb","inStream","createInputStream","outStream","createOutputStream","setSolid","success","code","chunkOutput","toBuffer","toString","Buffer","concat","createBufferingDecoder"],"mappings":"AAAA,mEAAmE;AACnE,kEAAkE;AAClE,EAAE;AACF,8BAA8B;AAC9B,2DAA2D;AAC3D,EAAE;AACF,uBAAuB;AACvB,+BAA+B;AAC/B,sDAAsD;AACtD,yDAAyD;AACzD,0EAA0E;AAC1E,EAAE;AACF,sFAAsF;AACtF,mFAAmF;AACnF,uBAAuB;AAEvB,6EAA6E;;;;;;;;;;;;QA6P7DA;eAAAA;;QAnNAC;eAAAA;;;mCAzCkB;iEACf;+EAEgB;yBACmB;;;;;;AAEtD,IAAIC,cAAcC,mBAAM,CAACC,IAAI,CAACC,OAAO;AAErC;;;;;;;;;;CAUC,GACD,SAASC,qBAAqBC,QAAgB;IAC5C,IAAIA,WAAW,IAAI;QACjB,MAAM,IAAIC,MAAM,AAAC,2CAAmD,OAATD;IAC7D;IACA,IAAIA,aAAa,IAAI;QACnB,iCAAiC;QACjC,OAAO;IACT;IACA,8DAA8D;IAC9D,IAAIE,OAAO,IAAKF,WAAW;IAC3B,IAAIG,QAAQC,KAAKC,KAAK,CAACL,WAAW,KAAK;IACvC,OAAOE,QAAQC;AACjB;AAUO,SAAST,YAAYY,KAAa,EAAEC,UAAmB,EAAEC,UAAmB;IACjF,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIR,MAAM;IAClB;IAEA,IAAIS,WAAWX,qBAAqBQ,UAAU,CAAC,EAAE;IAEjD,mEAAmE;IACnE,iDAAiD;IACjD,IAAII,eAA8B;IAClC,IAAIC,YAAY;IAChB,IAAIC,eAAyB,EAAE;IAE/B,IAAIL,cAAcA,aAAa,GAAG;QAChCG,eAAeG,IAAAA,sCAAiB,EAACN;IACnC;IAEA,IAAIO,SAAS;IAEb,+CAA+C;IAC/C,8FAA8F;IAC9F,sFAAsF;IACtF,oEAAoE;IACpE,IAAIC,UAAU,IAAIrB;IAKlBqB,QAAQC,iBAAiB,CAACP;IAW1B,IAAIQ,YAAY,AAACF,QAAqDG,UAAU;IAEhF,6CAA6C;IAC7C,IAAIC,WAAW;IAEf,MAAOL,SAAST,MAAMG,MAAM,CAAE;QAC5B,IAAIY,UAAUf,KAAK,CAACS,SAAS;QAE7B,IAAIM,YAAY,MAAM;YAEpB;QACF;QAEA,IAAIA,YAAY,QAAQA,YAAY,MAAM;YACxC,qBAAqB;YACrB,yCAAyC;YACzC,iCAAiC;YAEjC,mCAAmC;YACnC,IAAIA,YAAY,MAAM;gBACpBH,UAAUI,IAAI,GAAG;gBACjBJ,UAAUK,UAAU,GAAG;gBACvBP,QAAQQ,SAAS,GAAG;YACtB;YAEA,IAAIT,SAAS,IAAIT,MAAMG,MAAM,EAAE;gBAC7B,MAAM,IAAIR,MAAM;YAClB;YAEA,wCAAwC;YACxC,IAAIwB,aAAa,AAAC,CAAA,AAACnB,KAAK,CAACS,OAAO,IAAI,IAAKT,KAAK,CAACS,SAAS,EAAE,AAAD,IAAK;YAC9DA,UAAU;YAEV,IAAIA,SAASU,aAAanB,MAAMG,MAAM,EAAE;gBACtC,MAAM,IAAIR,MAAM;YAClB;YAEA,4BAA4B;YAC5B,IAAIyB,aAAapB,MAAMqB,KAAK,CAACZ,QAAQA,SAASU;YAE9C,mCAAmC;YACnC,IAAId,cAAc;gBAChBe,WAAWE,IAAI,CAACjB,cAAcC;gBAC9BA,aAAac,WAAWjB,MAAM;YAChC,OAAO;gBACLI,yBAAAA,mCAAAA,aAAcgB,IAAI,CAACH;YACrB;YAEA,2FAA2F;YAC3F,gEAAgE;YAChE,iFAAiF;YACjF,oFAAoF;YACpF,IAAK,IAAII,IAAI,GAAGA,IAAIJ,WAAWjB,MAAM,EAAEqB,IAAK;gBAC1CZ,UAAUa,OAAO,CAACb,UAAUI,IAAI,GAAG,GAAGI,UAAU,CAACI,EAAE;gBACnD,qCAAqC;gBACrC,IAAIZ,UAAUI,IAAI,IAAIJ,UAAUc,WAAW,EAAE;oBAC3Cd,UAAUI,IAAI,GAAG;gBACnB;YACF;YACA,sEAAsE;YACtE,yCAAyC;YACzCJ,UAAUK,UAAU,GAAGL,UAAUI,IAAI;YAErC,8FAA8F;YAC9FN,QAAQQ,SAAS,IAAIC;YAErB,wEAAwE;YACxET,QAAQiB,SAAS,GAAGP,UAAU,CAACA,WAAWjB,MAAM,GAAG,EAAE;YAErDM,UAAUU;QACZ,OAAO,IAAIJ,WAAW,MAAM;YAC1B,wBAAwB;YACxB,kCAAkC;YAClC,iCAAiC;YACjC,sEAAsE;YACtE,iDAAiD;YAEjD,2CAA2C;YAC3C,kEAAkE;YAClE,mCAAmC;YACnC,+CAA+C;YAC/C,4DAA4D;YAC5D,IAAIa,aAAab,WAAW;YAC5B,IAAIc,WAAWd,WAAW;YAC1B,IAAIe,YAAYf,WAAW;YAC3B,IAAIgB,eAAe,CAACH;YAEpB,sDAAsD;YACtD,IAAIE,WAAW;gBACblB,UAAUI,IAAI,GAAG;gBACjBJ,UAAUK,UAAU,GAAG;YACzB;YAEA,IAAIR,SAAS,IAAIT,MAAMG,MAAM,EAAE;gBAC7B,MAAM,IAAIR,MAAM;YAClB;YAEA,yEAAyE;YACzE,IAAIqC,aAAajB,UAAU;YAC3B,IAAIkB,cAAc,AAAC,CAAA,AAACD,cAAc,KAAOhC,KAAK,CAACS,OAAO,IAAI,IAAKT,KAAK,CAACS,SAAS,EAAE,AAAD,IAAK;YACpFA,UAAU;YAEV,+BAA+B;YAC/B,IAAIyB,WAAW,AAAC,CAAA,AAAClC,KAAK,CAACS,OAAO,IAAI,IAAKT,KAAK,CAACS,SAAS,EAAE,AAAD,IAAK;YAC5DA,UAAU;YAEV,iDAAiD;YACjD,IAAIoB,UAAU;gBACZ,IAAIpB,UAAUT,MAAMG,MAAM,EAAE;oBAC1B,MAAM,IAAIR,MAAM;gBAClB;gBACA,IAAIwC,YAAYnC,KAAK,CAACS,SAAS;gBAE/B,yCAAyC;gBACzC,uCAAuC;gBACvC,IAAI2B,KAAKD,YAAY;gBACrB,IAAIE,YAAYvC,KAAKC,KAAK,CAACoC,YAAY;gBACvC,IAAIG,KAAKD,YAAY;gBACrB,IAAIE,KAAKzC,KAAKC,KAAK,CAACsC,YAAY;gBAEhC,IAAI,CAAC3B,QAAQ8B,SAAS,CAACJ,IAAIE,IAAIC,KAAK;oBAClC,MAAM,IAAI5C,MAAM,AAAC,+BAAuC2C,OAATF,IAAG,QAAeG,OAATD,IAAG,QAAS,OAAHC;gBACnE;gBACAzB,WAAW;YACb;YAEA,IAAI,CAACA,UAAU;gBACb,MAAM,IAAInB,MAAM;YAClB;YAEA,IAAIc,SAASyB,WAAWlC,MAAMG,MAAM,EAAE;gBACpC,MAAM,IAAIR,MAAM;YAClB;YAEA,oBAAoB;YACpB,IAAI8C,WAAWC,IAAAA,4BAAiB,EAAC1C,OAAOS,QAAQyB;YAChD,IAAIS,YAAYC,IAAAA,6BAAkB,EAACX,cAAc,qCAAqC;YAEtF,kFAAkF;YAClFvB,QAAQmC,QAAQ,CAACd;YAEjB,mBAAmB;YACnB,IAAIe,UAAUpC,QAAQqC,IAAI,CAACN,UAAUE,WAAWV;YAChD,IAAI,CAACa,SAAS;gBACZ,MAAM,IAAInD,MAAM;YAClB;YAEA,IAAIqD,cAAcL,UAAUM,QAAQ;YACpC,IAAI5C,cAAc;gBAChB2C,YAAY1B,IAAI,CAACjB,cAAcC;gBAC/BA,aAAa0C,YAAY7C,MAAM;YACjC,OAAO;gBACLI,yBAAAA,mCAAAA,aAAcgB,IAAI,CAACyB;YACrB;YAEAvC,UAAUyB;QACZ,OAAO;YACL,MAAM,IAAIvC,MAAM,AAAC,iCAAqD,OAArBoB,QAAQmC,QAAQ,CAAC;QACpE;IACF;IAEA,qDAAqD;IACrD,IAAI7C,cAAc;QAChB,4DAA4D;QAC5D,OAAOC,YAAYD,aAAaF,MAAM,GAAGE,aAAagB,KAAK,CAAC,GAAGf,aAAaD;IAC9E;IACA,OAAO8C,OAAOC,MAAM,CAAC7C;AACvB;AAKO,SAASpB,mBAAmBc,UAAmB,EAAEC,UAAmB;IACzE,OAAOmD,IAAAA,iCAAsB,EAACjE,aAAaa,YAAYC;AACzD"}
|
|
@@ -9,8 +9,14 @@ export declare function createInputStream(buffer: Buffer, offset: number, length
|
|
|
9
9
|
/**
|
|
10
10
|
* Output stream wrapper for lzma-purejs
|
|
11
11
|
* Collects output bytes into Buffer chunks
|
|
12
|
+
* Uses typed arrays for memory efficiency (1 byte per element instead of 8)
|
|
13
|
+
*
|
|
14
|
+
* Memory optimization: If expectedSize is provided, pre-allocates a single buffer
|
|
15
|
+
* to avoid double-memory during Buffer.concat.
|
|
16
|
+
*
|
|
17
|
+
* @param expectedSize - Optional expected output size for pre-allocation
|
|
12
18
|
*/
|
|
13
|
-
export declare function createOutputStream(): {
|
|
19
|
+
export declare function createOutputStream(expectedSize?: number): {
|
|
14
20
|
writeByte: (b: number) => void;
|
|
15
21
|
write: (buf: number[], bufOffset: number, len: number) => number;
|
|
16
22
|
flush: () => void;
|
|
@@ -9,8 +9,14 @@ export declare function createInputStream(buffer: Buffer, offset: number, length
|
|
|
9
9
|
/**
|
|
10
10
|
* Output stream wrapper for lzma-purejs
|
|
11
11
|
* Collects output bytes into Buffer chunks
|
|
12
|
+
* Uses typed arrays for memory efficiency (1 byte per element instead of 8)
|
|
13
|
+
*
|
|
14
|
+
* Memory optimization: If expectedSize is provided, pre-allocates a single buffer
|
|
15
|
+
* to avoid double-memory during Buffer.concat.
|
|
16
|
+
*
|
|
17
|
+
* @param expectedSize - Optional expected output size for pre-allocation
|
|
12
18
|
*/
|
|
13
|
-
export declare function createOutputStream(): {
|
|
19
|
+
export declare function createOutputStream(expectedSize?: number): {
|
|
14
20
|
writeByte: (b: number) => void;
|
|
15
21
|
write: (buf: number[], bufOffset: number, len: number) => number;
|
|
16
22
|
flush: () => void;
|
|
@@ -39,19 +39,47 @@ function createInputStream(buffer, offset, length) {
|
|
|
39
39
|
}
|
|
40
40
|
};
|
|
41
41
|
}
|
|
42
|
-
function createOutputStream() {
|
|
42
|
+
function createOutputStream(expectedSize) {
|
|
43
|
+
// Pre-allocation mode: single buffer, no concat needed
|
|
44
|
+
// Includes bounds checking for safety on older Node.js versions
|
|
45
|
+
if (expectedSize && expectedSize > 0) {
|
|
46
|
+
var buffer = (0, _extractbaseiterator.allocBufferUnsafe)(expectedSize);
|
|
47
|
+
var bufPos = 0;
|
|
48
|
+
var bufLen = buffer.length;
|
|
49
|
+
return {
|
|
50
|
+
writeByte: function(b) {
|
|
51
|
+
if (bufPos < bufLen) {
|
|
52
|
+
buffer[bufPos++] = b;
|
|
53
|
+
}
|
|
54
|
+
// Silently ignore overflow (should not happen with correct size)
|
|
55
|
+
},
|
|
56
|
+
write: function(buf, bufOffset, len) {
|
|
57
|
+
for(var i = 0; i < len && bufPos < bufLen; i++){
|
|
58
|
+
buffer[bufPos++] = buf[bufOffset + i];
|
|
59
|
+
}
|
|
60
|
+
return len;
|
|
61
|
+
},
|
|
62
|
+
flush: function() {
|
|
63
|
+
// No-op for pre-allocated buffer
|
|
64
|
+
},
|
|
65
|
+
toBuffer: function() {
|
|
66
|
+
// Return only the used portion
|
|
67
|
+
return bufPos < buffer.length ? buffer.slice(0, bufPos) : buffer;
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
// Chunked mode: accumulate in 64KB chunks (fallback for unknown size)
|
|
43
72
|
var chunks = [];
|
|
44
|
-
var
|
|
45
|
-
var
|
|
73
|
+
var CHUNK_SIZE = 65536; // 64KB chunks for better memory efficiency
|
|
74
|
+
var currentChunk = (0, _extractbaseiterator.allocBufferUnsafe)(CHUNK_SIZE);
|
|
75
|
+
var pos = 0;
|
|
46
76
|
return {
|
|
47
77
|
writeByte: function(b) {
|
|
48
|
-
currentChunk
|
|
49
|
-
if (
|
|
50
|
-
chunks.push(
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
}
|
|
54
|
-
currentChunk = [];
|
|
78
|
+
currentChunk[pos++] = b;
|
|
79
|
+
if (pos >= CHUNK_SIZE) {
|
|
80
|
+
chunks.push(currentChunk);
|
|
81
|
+
currentChunk = (0, _extractbaseiterator.allocBufferUnsafe)(CHUNK_SIZE);
|
|
82
|
+
pos = 0;
|
|
55
83
|
}
|
|
56
84
|
},
|
|
57
85
|
write: function write(buf, bufOffset, len) {
|
|
@@ -61,17 +89,19 @@ function createOutputStream() {
|
|
|
61
89
|
return len;
|
|
62
90
|
},
|
|
63
91
|
flush: function() {
|
|
64
|
-
if (
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
chunks.push(finalChunk);
|
|
70
|
-
currentChunk = [];
|
|
92
|
+
if (pos > 0) {
|
|
93
|
+
// Only keep the used portion of the current chunk
|
|
94
|
+
chunks.push(currentChunk.slice(0, pos));
|
|
95
|
+
currentChunk = (0, _extractbaseiterator.allocBufferUnsafe)(CHUNK_SIZE);
|
|
96
|
+
pos = 0;
|
|
71
97
|
}
|
|
72
98
|
},
|
|
73
99
|
toBuffer: function toBuffer() {
|
|
74
100
|
this.flush();
|
|
101
|
+
// Optimization: if single chunk, return it directly
|
|
102
|
+
if (chunks.length === 1) {
|
|
103
|
+
return chunks[0];
|
|
104
|
+
}
|
|
75
105
|
return Buffer.concat(chunks);
|
|
76
106
|
}
|
|
77
107
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/streams.ts"],"sourcesContent":["// Shared stream wrappers for lzma-purejs codec interface\n// These adapters convert between Buffer/lzma-purejs stream interfaces\n\nimport { allocBufferUnsafe } from 'extract-base-iterator';\n\n/**\n * Input stream wrapper for lzma-purejs\n * Wraps a Buffer region as a readable stream interface\n */\nexport function createInputStream(buffer: Buffer, offset: number, length: number) {\n var pos = 0;\n var end = Math.min(offset + length, buffer.length);\n var start = offset;\n\n return {\n readByte: (): number => {\n if (start + pos >= end) return -1;\n return buffer[start + pos++];\n },\n read: (buf: number[], bufOffset: number, len: number): number => {\n var bytesRead = 0;\n while (bytesRead < len && start + pos < end) {\n buf[bufOffset + bytesRead] = buffer[start + pos];\n pos++;\n bytesRead++;\n }\n return bytesRead === 0 ? -1 : bytesRead;\n },\n };\n}\n\n/**\n * Output stream wrapper for lzma-purejs\n * Collects output bytes into Buffer chunks\n */\nexport function createOutputStream() {\n
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/streams.ts"],"sourcesContent":["// Shared stream wrappers for lzma-purejs codec interface\n// These adapters convert between Buffer/lzma-purejs stream interfaces\n\nimport { allocBufferUnsafe } from 'extract-base-iterator';\n\n/**\n * Input stream wrapper for lzma-purejs\n * Wraps a Buffer region as a readable stream interface\n */\nexport function createInputStream(buffer: Buffer, offset: number, length: number) {\n var pos = 0;\n var end = Math.min(offset + length, buffer.length);\n var start = offset;\n\n return {\n readByte: (): number => {\n if (start + pos >= end) return -1;\n return buffer[start + pos++];\n },\n read: (buf: number[], bufOffset: number, len: number): number => {\n var bytesRead = 0;\n while (bytesRead < len && start + pos < end) {\n buf[bufOffset + bytesRead] = buffer[start + pos];\n pos++;\n bytesRead++;\n }\n return bytesRead === 0 ? -1 : bytesRead;\n },\n };\n}\n\n/**\n * Output stream wrapper for lzma-purejs\n * Collects output bytes into Buffer chunks\n * Uses typed arrays for memory efficiency (1 byte per element instead of 8)\n *\n * Memory optimization: If expectedSize is provided, pre-allocates a single buffer\n * to avoid double-memory during Buffer.concat.\n *\n * @param expectedSize - Optional expected output size for pre-allocation\n */\nexport function createOutputStream(expectedSize?: number) {\n // Pre-allocation mode: single buffer, no concat needed\n // Includes bounds checking for safety on older Node.js versions\n if (expectedSize && expectedSize > 0) {\n var buffer = allocBufferUnsafe(expectedSize);\n var bufPos = 0;\n var bufLen = buffer.length;\n\n return {\n writeByte: (b: number): void => {\n if (bufPos < bufLen) {\n buffer[bufPos++] = b;\n }\n // Silently ignore overflow (should not happen with correct size)\n },\n write: (buf: number[], bufOffset: number, len: number): number => {\n for (var i = 0; i < len && bufPos < bufLen; i++) {\n buffer[bufPos++] = buf[bufOffset + i];\n }\n return len;\n },\n flush: (): void => {\n // No-op for pre-allocated buffer\n },\n toBuffer: (): Buffer => {\n // Return only the used portion\n return bufPos < buffer.length ? buffer.slice(0, bufPos) : buffer;\n },\n };\n }\n\n // Chunked mode: accumulate in 64KB chunks (fallback for unknown size)\n var chunks: Buffer[] = [];\n var CHUNK_SIZE = 65536; // 64KB chunks for better memory efficiency\n var currentChunk: Buffer = allocBufferUnsafe(CHUNK_SIZE);\n var pos = 0;\n\n return {\n writeByte: (b: number): void => {\n currentChunk[pos++] = b;\n if (pos >= CHUNK_SIZE) {\n chunks.push(currentChunk);\n currentChunk = allocBufferUnsafe(CHUNK_SIZE);\n pos = 0;\n }\n },\n write: function (buf: number[], bufOffset: number, len: number): number {\n for (var i = 0; i < len; i++) {\n this.writeByte(buf[bufOffset + i]);\n }\n return len;\n },\n flush: (): void => {\n if (pos > 0) {\n // Only keep the used portion of the current chunk\n chunks.push(currentChunk.slice(0, pos));\n currentChunk = allocBufferUnsafe(CHUNK_SIZE);\n pos = 0;\n }\n },\n toBuffer: function (): Buffer {\n this.flush();\n // Optimization: if single chunk, return it directly\n if (chunks.length === 1) {\n return chunks[0];\n }\n return Buffer.concat(chunks);\n },\n };\n}\n"],"names":["createInputStream","createOutputStream","buffer","offset","length","pos","end","Math","min","start","readByte","read","buf","bufOffset","len","bytesRead","expectedSize","allocBufferUnsafe","bufPos","bufLen","writeByte","b","write","i","flush","toBuffer","slice","chunks","CHUNK_SIZE","currentChunk","push","Buffer","concat"],"mappings":"AAAA,yDAAyD;AACzD,sEAAsE;;;;;;;;;;;;QAQtDA;eAAAA;;QAgCAC;eAAAA;;;mCAtCkB;AAM3B,SAASD,kBAAkBE,MAAc,EAAEC,MAAc,EAAEC,MAAc;IAC9E,IAAIC,MAAM;IACV,IAAIC,MAAMC,KAAKC,GAAG,CAACL,SAASC,QAAQF,OAAOE,MAAM;IACjD,IAAIK,QAAQN;IAEZ,OAAO;QACLO,UAAU;YACR,IAAID,QAAQJ,OAAOC,KAAK,OAAO,CAAC;YAChC,OAAOJ,MAAM,CAACO,QAAQJ,MAAM;QAC9B;QACAM,MAAM,SAACC,KAAeC,WAAmBC;YACvC,IAAIC,YAAY;YAChB,MAAOA,YAAYD,OAAOL,QAAQJ,MAAMC,IAAK;gBAC3CM,GAAG,CAACC,YAAYE,UAAU,GAAGb,MAAM,CAACO,QAAQJ,IAAI;gBAChDA;gBACAU;YACF;YACA,OAAOA,cAAc,IAAI,CAAC,IAAIA;QAChC;IACF;AACF;AAYO,SAASd,mBAAmBe,YAAqB;IACtD,uDAAuD;IACvD,gEAAgE;IAChE,IAAIA,gBAAgBA,eAAe,GAAG;QACpC,IAAId,SAASe,IAAAA,sCAAiB,EAACD;QAC/B,IAAIE,SAAS;QACb,IAAIC,SAASjB,OAAOE,MAAM;QAE1B,OAAO;YACLgB,WAAW,SAACC;gBACV,IAAIH,SAASC,QAAQ;oBACnBjB,MAAM,CAACgB,SAAS,GAAGG;gBACrB;YACA,iEAAiE;YACnE;YACAC,OAAO,SAACV,KAAeC,WAAmBC;gBACxC,IAAK,IAAIS,IAAI,GAAGA,IAAIT,OAAOI,SAASC,QAAQI,IAAK;oBAC/CrB,MAAM,CAACgB,SAAS,GAAGN,GAAG,CAACC,YAAYU,EAAE;gBACvC;gBACA,OAAOT;YACT;YACAU,OAAO;YACL,iCAAiC;YACnC;YACAC,UAAU;gBACR,+BAA+B;gBAC/B,OAAOP,SAAShB,OAAOE,MAAM,GAAGF,OAAOwB,KAAK,CAAC,GAAGR,UAAUhB;YAC5D;QACF;IACF;IAEA,sEAAsE;IACtE,IAAIyB,SAAmB,EAAE;IACzB,IAAIC,aAAa,OAAO,2CAA2C;IACnE,IAAIC,eAAuBZ,IAAAA,sCAAiB,EAACW;IAC7C,IAAIvB,MAAM;IAEV,OAAO;QACLe,WAAW,SAACC;YACVQ,YAAY,CAACxB,MAAM,GAAGgB;YACtB,IAAIhB,OAAOuB,YAAY;gBACrBD,OAAOG,IAAI,CAACD;gBACZA,eAAeZ,IAAAA,sCAAiB,EAACW;gBACjCvB,MAAM;YACR;QACF;QACAiB,OAAO,SAAPA,MAAiBV,GAAa,EAAEC,SAAiB,EAAEC,GAAW;YAC5D,IAAK,IAAIS,IAAI,GAAGA,IAAIT,KAAKS,IAAK;gBAC5B,IAAI,CAACH,SAAS,CAACR,GAAG,CAACC,YAAYU,EAAE;YACnC;YACA,OAAOT;QACT;QACAU,OAAO;YACL,IAAInB,MAAM,GAAG;gBACX,kDAAkD;gBAClDsB,OAAOG,IAAI,CAACD,aAAaH,KAAK,CAAC,GAAGrB;gBAClCwB,eAAeZ,IAAAA,sCAAiB,EAACW;gBACjCvB,MAAM;YACR;QACF;QACAoB,UAAU,SAAVA;YACE,IAAI,CAACD,KAAK;YACV,oDAAoD;YACpD,IAAIG,OAAOvB,MAAM,KAAK,GAAG;gBACvB,OAAOuB,MAAM,CAAC,EAAE;YAClB;YACA,OAAOI,OAAOC,MAAM,CAACL;QACvB;IACF;AACF"}
|
|
@@ -158,7 +158,8 @@ var ErrorCode = {
|
|
|
158
158
|
TRUNCATED_ARCHIVE: 'TRUNCATED_ARCHIVE',
|
|
159
159
|
CORRUPT_HEADER: 'CORRUPT_HEADER',
|
|
160
160
|
ENCRYPTED_ARCHIVE: 'ENCRYPTED_ARCHIVE',
|
|
161
|
-
COMPRESSED_HEADER: 'COMPRESSED_HEADER'
|
|
161
|
+
COMPRESSED_HEADER: 'COMPRESSED_HEADER',
|
|
162
|
+
DECOMPRESSION_FAILED: 'DECOMPRESSION_FAILED'
|
|
162
163
|
};
|
|
163
164
|
function createCodedError(message, code) {
|
|
164
165
|
var err = new Error(message);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/constants.ts"],"sourcesContent":["// 7z format constants\n// Reference: https://py7zr.readthedocs.io/en/latest/archive_format.html\n\n// 7z signature: '7z' + magic bytes\nexport var SEVENZ_MAGIC = [0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c];\n\n// Header sizes\nexport var SIGNATURE_HEADER_SIZE = 32;\nexport var START_HEADER_SIZE = 20; // Part of signature header after magic + version\n\n// Property IDs for encoded header\nexport var PropertyId = {\n kEnd: 0x00,\n kHeader: 0x01,\n kArchiveProperties: 0x02,\n kAdditionalStreamsInfo: 0x03,\n kMainStreamsInfo: 0x04,\n kFilesInfo: 0x05,\n kPackInfo: 0x06,\n kUnpackInfo: 0x07,\n kSubStreamsInfo: 0x08,\n kSize: 0x09,\n kCRC: 0x0a,\n kFolder: 0x0b,\n kCodersUnpackSize: 0x0c,\n kNumUnpackStream: 0x0d,\n kEmptyStream: 0x0e,\n kEmptyFile: 0x0f,\n kAnti: 0x10,\n kName: 0x11,\n kCTime: 0x12,\n kATime: 0x13,\n kMTime: 0x14,\n kWinAttributes: 0x15,\n kComment: 0x16,\n kEncodedHeader: 0x17,\n kStartPos: 0x18,\n kDummy: 0x19,\n};\n\n// Codec IDs\n// 7z uses variable-length codec IDs\nexport var CodecId = {\n COPY: [0x00],\n DELTA: [0x03],\n LZMA: [0x03, 0x01, 0x01],\n LZMA2: [0x21],\n BCJ_X86: [0x03, 0x03, 0x01, 0x03],\n BCJ2: [0x03, 0x03, 0x01, 0x1b],\n DEFLATE: [0x04, 0x01, 0x08],\n BZIP2: [0x04, 0x02, 0x02],\n AES: [0x06, 0xf1, 0x07, 0x01],\n};\n\n// File attribute flags (Windows style, stored in FilesInfo)\nexport var FileAttribute = {\n READONLY: 0x01,\n HIDDEN: 0x02,\n SYSTEM: 0x04,\n DIRECTORY: 0x10,\n ARCHIVE: 0x20,\n DEVICE: 0x40,\n NORMAL: 0x80,\n TEMPORARY: 0x100,\n SPARSE_FILE: 0x200,\n REPARSE_POINT: 0x400,\n COMPRESSED: 0x800,\n OFFLINE: 0x1000,\n NOT_CONTENT_INDEXED: 0x2000,\n ENCRYPTED: 0x4000,\n UNIX_EXTENSION: 0x8000,\n};\n\n// Unix permission modes (decimal values for Node 0.8 compatibility)\nexport var UnixMode = {\n DIR: 16384, // 0o40000 - directory\n FILE: 32768, // 0o100000 - regular file\n SYMLINK: 40960, // 0o120000 - symbolic link\n RWXRWXRWX: 511, // 0o777\n RWXRXRX: 493, // 0o755\n RWRR: 420, // 0o644\n DEFAULT_DIR: 493, // 0o755 - rwxr-xr-x\n DEFAULT_FILE: 420, // 0o644 - rw-r--r--\n};\n\n// Error codes\nexport var ErrorCode = {\n INVALID_SIGNATURE: 'INVALID_SIGNATURE',\n CRC_MISMATCH: 'CRC_MISMATCH',\n UNSUPPORTED_CODEC: 'UNSUPPORTED_CODEC',\n UNSUPPORTED_VERSION: 'UNSUPPORTED_VERSION',\n UNSUPPORTED_FEATURE: 'UNSUPPORTED_FEATURE',\n TRUNCATED_ARCHIVE: 'TRUNCATED_ARCHIVE',\n CORRUPT_HEADER: 'CORRUPT_HEADER',\n ENCRYPTED_ARCHIVE: 'ENCRYPTED_ARCHIVE',\n COMPRESSED_HEADER: 'COMPRESSED_HEADER',\n};\n\n// Error with code property\nexport interface CodedError extends Error {\n code: string;\n}\n\n/**\n * Create an error with a code property\n */\nexport function createCodedError(message: string, code: string): CodedError {\n var err = new Error(message) as CodedError;\n err.code = code;\n return err;\n}\n"],"names":["CodecId","ErrorCode","FileAttribute","PropertyId","SEVENZ_MAGIC","SIGNATURE_HEADER_SIZE","START_HEADER_SIZE","UnixMode","createCodedError","kEnd","kHeader","kArchiveProperties","kAdditionalStreamsInfo","kMainStreamsInfo","kFilesInfo","kPackInfo","kUnpackInfo","kSubStreamsInfo","kSize","kCRC","kFolder","kCodersUnpackSize","kNumUnpackStream","kEmptyStream","kEmptyFile","kAnti","kName","kCTime","kATime","kMTime","kWinAttributes","kComment","kEncodedHeader","kStartPos","kDummy","COPY","DELTA","LZMA","LZMA2","BCJ_X86","BCJ2","DEFLATE","BZIP2","AES","READONLY","HIDDEN","SYSTEM","DIRECTORY","ARCHIVE","DEVICE","NORMAL","TEMPORARY","SPARSE_FILE","REPARSE_POINT","COMPRESSED","OFFLINE","NOT_CONTENT_INDEXED","ENCRYPTED","UNIX_EXTENSION","DIR","FILE","SYMLINK","RWXRWXRWX","RWXRXRX","RWRR","DEFAULT_DIR","DEFAULT_FILE","INVALID_SIGNATURE","CRC_MISMATCH","UNSUPPORTED_CODEC","UNSUPPORTED_VERSION","UNSUPPORTED_FEATURE","TRUNCATED_ARCHIVE","CORRUPT_HEADER","ENCRYPTED_ARCHIVE","COMPRESSED_HEADER","message","code","err","Error"],"mappings":"AAAA,sBAAsB;AACtB,wEAAwE;AAExE,mCAAmC;;;;;;;;;;;;QAuCxBA;eAAAA;;QA4CAC;eAAAA;;QA/BAC;eAAAA;;QA5CAC;eAAAA;;QAPAC;eAAAA;;QAGAC;eAAAA;;QACAC;eAAAA;;QAkEAC;eAAAA;;
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/constants.ts"],"sourcesContent":["// 7z format constants\n// Reference: https://py7zr.readthedocs.io/en/latest/archive_format.html\n\n// 7z signature: '7z' + magic bytes\nexport var SEVENZ_MAGIC = [0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c];\n\n// Header sizes\nexport var SIGNATURE_HEADER_SIZE = 32;\nexport var START_HEADER_SIZE = 20; // Part of signature header after magic + version\n\n// Property IDs for encoded header\nexport var PropertyId = {\n kEnd: 0x00,\n kHeader: 0x01,\n kArchiveProperties: 0x02,\n kAdditionalStreamsInfo: 0x03,\n kMainStreamsInfo: 0x04,\n kFilesInfo: 0x05,\n kPackInfo: 0x06,\n kUnpackInfo: 0x07,\n kSubStreamsInfo: 0x08,\n kSize: 0x09,\n kCRC: 0x0a,\n kFolder: 0x0b,\n kCodersUnpackSize: 0x0c,\n kNumUnpackStream: 0x0d,\n kEmptyStream: 0x0e,\n kEmptyFile: 0x0f,\n kAnti: 0x10,\n kName: 0x11,\n kCTime: 0x12,\n kATime: 0x13,\n kMTime: 0x14,\n kWinAttributes: 0x15,\n kComment: 0x16,\n kEncodedHeader: 0x17,\n kStartPos: 0x18,\n kDummy: 0x19,\n};\n\n// Codec IDs\n// 7z uses variable-length codec IDs\nexport var CodecId = {\n COPY: [0x00],\n DELTA: [0x03],\n LZMA: [0x03, 0x01, 0x01],\n LZMA2: [0x21],\n BCJ_X86: [0x03, 0x03, 0x01, 0x03],\n BCJ2: [0x03, 0x03, 0x01, 0x1b],\n DEFLATE: [0x04, 0x01, 0x08],\n BZIP2: [0x04, 0x02, 0x02],\n AES: [0x06, 0xf1, 0x07, 0x01],\n};\n\n// File attribute flags (Windows style, stored in FilesInfo)\nexport var FileAttribute = {\n READONLY: 0x01,\n HIDDEN: 0x02,\n SYSTEM: 0x04,\n DIRECTORY: 0x10,\n ARCHIVE: 0x20,\n DEVICE: 0x40,\n NORMAL: 0x80,\n TEMPORARY: 0x100,\n SPARSE_FILE: 0x200,\n REPARSE_POINT: 0x400,\n COMPRESSED: 0x800,\n OFFLINE: 0x1000,\n NOT_CONTENT_INDEXED: 0x2000,\n ENCRYPTED: 0x4000,\n UNIX_EXTENSION: 0x8000,\n};\n\n// Unix permission modes (decimal values for Node 0.8 compatibility)\nexport var UnixMode = {\n DIR: 16384, // 0o40000 - directory\n FILE: 32768, // 0o100000 - regular file\n SYMLINK: 40960, // 0o120000 - symbolic link\n RWXRWXRWX: 511, // 0o777\n RWXRXRX: 493, // 0o755\n RWRR: 420, // 0o644\n DEFAULT_DIR: 493, // 0o755 - rwxr-xr-x\n DEFAULT_FILE: 420, // 0o644 - rw-r--r--\n};\n\n// Error codes\nexport var ErrorCode = {\n INVALID_SIGNATURE: 'INVALID_SIGNATURE',\n CRC_MISMATCH: 'CRC_MISMATCH',\n UNSUPPORTED_CODEC: 'UNSUPPORTED_CODEC',\n UNSUPPORTED_VERSION: 'UNSUPPORTED_VERSION',\n UNSUPPORTED_FEATURE: 'UNSUPPORTED_FEATURE',\n TRUNCATED_ARCHIVE: 'TRUNCATED_ARCHIVE',\n CORRUPT_HEADER: 'CORRUPT_HEADER',\n ENCRYPTED_ARCHIVE: 'ENCRYPTED_ARCHIVE',\n COMPRESSED_HEADER: 'COMPRESSED_HEADER',\n DECOMPRESSION_FAILED: 'DECOMPRESSION_FAILED',\n};\n\n// Error with code property\nexport interface CodedError extends Error {\n code: string;\n}\n\n/**\n * Create an error with a code property\n */\nexport function createCodedError(message: string, code: string): CodedError {\n var err = new Error(message) as CodedError;\n err.code = code;\n return err;\n}\n"],"names":["CodecId","ErrorCode","FileAttribute","PropertyId","SEVENZ_MAGIC","SIGNATURE_HEADER_SIZE","START_HEADER_SIZE","UnixMode","createCodedError","kEnd","kHeader","kArchiveProperties","kAdditionalStreamsInfo","kMainStreamsInfo","kFilesInfo","kPackInfo","kUnpackInfo","kSubStreamsInfo","kSize","kCRC","kFolder","kCodersUnpackSize","kNumUnpackStream","kEmptyStream","kEmptyFile","kAnti","kName","kCTime","kATime","kMTime","kWinAttributes","kComment","kEncodedHeader","kStartPos","kDummy","COPY","DELTA","LZMA","LZMA2","BCJ_X86","BCJ2","DEFLATE","BZIP2","AES","READONLY","HIDDEN","SYSTEM","DIRECTORY","ARCHIVE","DEVICE","NORMAL","TEMPORARY","SPARSE_FILE","REPARSE_POINT","COMPRESSED","OFFLINE","NOT_CONTENT_INDEXED","ENCRYPTED","UNIX_EXTENSION","DIR","FILE","SYMLINK","RWXRWXRWX","RWXRXRX","RWRR","DEFAULT_DIR","DEFAULT_FILE","INVALID_SIGNATURE","CRC_MISMATCH","UNSUPPORTED_CODEC","UNSUPPORTED_VERSION","UNSUPPORTED_FEATURE","TRUNCATED_ARCHIVE","CORRUPT_HEADER","ENCRYPTED_ARCHIVE","COMPRESSED_HEADER","DECOMPRESSION_FAILED","message","code","err","Error"],"mappings":"AAAA,sBAAsB;AACtB,wEAAwE;AAExE,mCAAmC;;;;;;;;;;;;QAuCxBA;eAAAA;;QA4CAC;eAAAA;;QA/BAC;eAAAA;;QA5CAC;eAAAA;;QAPAC;eAAAA;;QAGAC;eAAAA;;QACAC;eAAAA;;QAkEAC;eAAAA;;QAiCKC;eAAAA;;;AAvGT,IAAIJ,eAAe;IAAC;IAAM;IAAM;IAAM;IAAM;IAAM;CAAK;AAGvD,IAAIC,wBAAwB;AAC5B,IAAIC,oBAAoB,IAAI,iDAAiD;AAG7E,IAAIH,aAAa;IACtBM,MAAM;IACNC,SAAS;IACTC,oBAAoB;IACpBC,wBAAwB;IACxBC,kBAAkB;IAClBC,YAAY;IACZC,WAAW;IACXC,aAAa;IACbC,iBAAiB;IACjBC,OAAO;IACPC,MAAM;IACNC,SAAS;IACTC,mBAAmB;IACnBC,kBAAkB;IAClBC,cAAc;IACdC,YAAY;IACZC,OAAO;IACPC,OAAO;IACPC,QAAQ;IACRC,QAAQ;IACRC,QAAQ;IACRC,gBAAgB;IAChBC,UAAU;IACVC,gBAAgB;IAChBC,WAAW;IACXC,QAAQ;AACV;AAIO,IAAIlC,UAAU;IACnBmC,MAAM;QAAC;KAAK;IACZC,OAAO;QAAC;KAAK;IACbC,MAAM;QAAC;QAAM;QAAM;KAAK;IACxBC,OAAO;QAAC;KAAK;IACbC,SAAS;QAAC;QAAM;QAAM;QAAM;KAAK;IACjCC,MAAM;QAAC;QAAM;QAAM;QAAM;KAAK;IAC9BC,SAAS;QAAC;QAAM;QAAM;KAAK;IAC3BC,OAAO;QAAC;QAAM;QAAM;KAAK;IACzBC,KAAK;QAAC;QAAM;QAAM;QAAM;KAAK;AAC/B;AAGO,IAAIzC,gBAAgB;IACzB0C,UAAU;IACVC,QAAQ;IACRC,QAAQ;IACRC,WAAW;IACXC,SAAS;IACTC,QAAQ;IACRC,QAAQ;IACRC,WAAW;IACXC,aAAa;IACbC,eAAe;IACfC,YAAY;IACZC,SAAS;IACTC,qBAAqB;IACrBC,WAAW;IACXC,gBAAgB;AAClB;AAGO,IAAInD,WAAW;IACpBoD,KAAK;IACLC,MAAM;IACNC,SAAS;IACTC,WAAW;IACXC,SAAS;IACTC,MAAM;IACNC,aAAa;IACbC,cAAc;AAChB;AAGO,IAAIjE,YAAY;IACrBkE,mBAAmB;IACnBC,cAAc;IACdC,mBAAmB;IACnBC,qBAAqB;IACrBC,qBAAqB;IACrBC,mBAAmB;IACnBC,gBAAgB;IAChBC,mBAAmB;IACnBC,mBAAmB;IACnBC,sBAAsB;AACxB;AAUO,SAASpE,iBAAiBqE,OAAe,EAAEC,IAAY;IAC5D,IAAIC,MAAM,IAAIC,MAAMH;IACpBE,IAAID,IAAI,GAAGA;IACX,OAAOC;AACT"}
|
|
@@ -54,6 +54,8 @@ export declare class SevenZipParser {
|
|
|
54
54
|
private entries;
|
|
55
55
|
private parsed;
|
|
56
56
|
private decompressedCache;
|
|
57
|
+
private filesPerFolder;
|
|
58
|
+
private extractedPerFolder;
|
|
57
59
|
constructor(source: ArchiveSource);
|
|
58
60
|
/**
|
|
59
61
|
* Parse the archive structure
|
|
@@ -90,7 +92,8 @@ export declare class SevenZipParser {
|
|
|
90
92
|
*/
|
|
91
93
|
private folderHasBcj2;
|
|
92
94
|
/**
|
|
93
|
-
* Get decompressed data for a folder, with caching for solid archives
|
|
95
|
+
* Get decompressed data for a folder, with smart caching for solid archives
|
|
96
|
+
* Only caches when multiple files share a block, releases when last file extracted
|
|
94
97
|
*/
|
|
95
98
|
private getDecompressedFolder;
|
|
96
99
|
/**
|
|
@@ -279,6 +279,11 @@ import { readNumber } from './NumberCodec.js';
|
|
|
279
279
|
}
|
|
280
280
|
// Use the properly parsed numUnpackStreamsPerFolder from the archive header
|
|
281
281
|
var streamsPerFolder = this.streamsInfo.numUnpackStreamsPerFolder;
|
|
282
|
+
// Initialize files per folder count (for smart caching)
|
|
283
|
+
for(var f = 0; f < streamsPerFolder.length; f++){
|
|
284
|
+
this.filesPerFolder[f] = streamsPerFolder[f];
|
|
285
|
+
this.extractedPerFolder[f] = 0;
|
|
286
|
+
}
|
|
282
287
|
// Now build entries with proper folder/stream tracking
|
|
283
288
|
var streamIndex = 0;
|
|
284
289
|
var folderIndex = 0;
|
|
@@ -383,8 +388,9 @@ import { readNumber } from './NumberCodec.js';
|
|
|
383
388
|
throw createCodedError(`Unsupported codec: ${codecName}`, ErrorCode.UNSUPPORTED_CODEC);
|
|
384
389
|
}
|
|
385
390
|
}
|
|
386
|
-
// Get decompressed data for this folder (with caching
|
|
387
|
-
var
|
|
391
|
+
// Get decompressed data for this folder (with smart caching)
|
|
392
|
+
var folderIdx = entry._folderIndex;
|
|
393
|
+
var data = this.getDecompressedFolder(folderIdx);
|
|
388
394
|
// Calculate file offset within the decompressed block
|
|
389
395
|
// For solid archives, multiple files are concatenated in the block
|
|
390
396
|
var fileStart = 0;
|
|
@@ -396,8 +402,18 @@ import { readNumber } from './NumberCodec.js';
|
|
|
396
402
|
var fileSize = entry.size;
|
|
397
403
|
// Create a PassThrough stream with the file data
|
|
398
404
|
var outputStream = new PassThrough();
|
|
405
|
+
// Bounds check to prevent "oob" error on older Node versions
|
|
406
|
+
if (fileStart + fileSize > data.length) {
|
|
407
|
+
throw createCodedError(`File data out of bounds: offset ${fileStart} + size ${fileSize} > decompressed length ${data.length}`, ErrorCode.DECOMPRESSION_FAILED);
|
|
408
|
+
}
|
|
399
409
|
var fileData = data.slice(fileStart, fileStart + fileSize);
|
|
400
410
|
outputStream.end(fileData);
|
|
411
|
+
// Track extraction and release cache when all files from this folder are done
|
|
412
|
+
this.extractedPerFolder[folderIdx] = (this.extractedPerFolder[folderIdx] || 0) + 1;
|
|
413
|
+
if (this.extractedPerFolder[folderIdx] >= this.filesPerFolder[folderIdx]) {
|
|
414
|
+
// All files from this folder extracted, release cache
|
|
415
|
+
delete this.decompressedCache[folderIdx];
|
|
416
|
+
}
|
|
401
417
|
return outputStream;
|
|
402
418
|
}
|
|
403
419
|
/**
|
|
@@ -411,7 +427,8 @@ import { readNumber } from './NumberCodec.js';
|
|
|
411
427
|
return false;
|
|
412
428
|
}
|
|
413
429
|
/**
|
|
414
|
-
* Get decompressed data for a folder, with caching for solid archives
|
|
430
|
+
* Get decompressed data for a folder, with smart caching for solid archives
|
|
431
|
+
* Only caches when multiple files share a block, releases when last file extracted
|
|
415
432
|
*/ getDecompressedFolder(folderIndex) {
|
|
416
433
|
// Check cache first
|
|
417
434
|
if (this.decompressedCache[folderIndex]) {
|
|
@@ -421,10 +438,18 @@ import { readNumber } from './NumberCodec.js';
|
|
|
421
438
|
throw createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER);
|
|
422
439
|
}
|
|
423
440
|
var folder = this.streamsInfo.folders[folderIndex];
|
|
441
|
+
// Check how many files remain in this folder
|
|
442
|
+
var filesInFolder = this.filesPerFolder[folderIndex] || 1;
|
|
443
|
+
var extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
|
|
444
|
+
var remainingFiles = filesInFolder - extractedFromFolder;
|
|
445
|
+
// Only cache if more than 1 file remains (including the current one being extracted)
|
|
446
|
+
var shouldCache = remainingFiles > 1;
|
|
424
447
|
// Check if this folder uses BCJ2 (requires special multi-stream handling)
|
|
425
448
|
if (this.folderHasBcj2(folder)) {
|
|
426
449
|
var data = this.decompressBcj2Folder(folderIndex);
|
|
427
|
-
|
|
450
|
+
if (shouldCache) {
|
|
451
|
+
this.decompressedCache[folderIndex] = data;
|
|
452
|
+
}
|
|
428
453
|
return data;
|
|
429
454
|
}
|
|
430
455
|
// Calculate packed data position
|
|
@@ -450,8 +475,10 @@ import { readNumber } from './NumberCodec.js';
|
|
|
450
475
|
var unpackSize = folder.unpackSizes[l];
|
|
451
476
|
data2 = codec.decode(data2, coderInfo.properties, unpackSize);
|
|
452
477
|
}
|
|
453
|
-
// Cache
|
|
454
|
-
|
|
478
|
+
// Cache only if more files remain in this folder
|
|
479
|
+
if (shouldCache) {
|
|
480
|
+
this.decompressedCache[folderIndex] = data2;
|
|
481
|
+
}
|
|
455
482
|
return data2;
|
|
456
483
|
}
|
|
457
484
|
/**
|
|
@@ -580,6 +607,13 @@ import { readNumber } from './NumberCodec.js';
|
|
|
580
607
|
bcj2OutputStart += folder.coders[co3].numOutStreams;
|
|
581
608
|
}
|
|
582
609
|
var bcj2UnpackSize = folder.unpackSizes[bcj2OutputStart];
|
|
610
|
+
// Memory optimization: Clear intermediate buffers to help GC
|
|
611
|
+
// These are no longer needed after bcj2Inputs is built
|
|
612
|
+
for(var key in coderOutputs){
|
|
613
|
+
delete coderOutputs[key];
|
|
614
|
+
}
|
|
615
|
+
// Clear packStreams array (allows GC to free compressed data)
|
|
616
|
+
packStreams.length = 0;
|
|
583
617
|
// Decode BCJ2
|
|
584
618
|
return decodeBcj2Multi(bcj2Inputs, undefined, bcj2UnpackSize);
|
|
585
619
|
}
|
|
@@ -643,8 +677,12 @@ import { readNumber } from './NumberCodec.js';
|
|
|
643
677
|
this.filesInfo = [];
|
|
644
678
|
this.entries = [];
|
|
645
679
|
this.parsed = false;
|
|
646
|
-
//
|
|
680
|
+
// Smart cache for decompressed solid blocks
|
|
681
|
+
// Only caches when multiple files share a block, releases when last file extracted
|
|
647
682
|
this.decompressedCache = {};
|
|
683
|
+
// Track files per folder and how many have been extracted
|
|
684
|
+
this.filesPerFolder = {};
|
|
685
|
+
this.extractedPerFolder = {};
|
|
648
686
|
this.source = source;
|
|
649
687
|
}
|
|
650
688
|
}
|