7z-iterator 2.2.2 → 2.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/sevenz/ArchiveSource.d.cts +4 -3
- package/dist/cjs/sevenz/ArchiveSource.d.ts +4 -3
- package/dist/cjs/sevenz/ArchiveSource.js +10 -8
- package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
- package/dist/cjs/sevenz/SevenZipParser.d.cts +7 -0
- package/dist/cjs/sevenz/SevenZipParser.d.ts +7 -0
- package/dist/cjs/sevenz/SevenZipParser.js +21 -8
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/index.js +5 -7
- package/dist/cjs/sevenz/codecs/index.js.map +1 -1
- package/dist/esm/sevenz/ArchiveSource.d.ts +4 -3
- package/dist/esm/sevenz/ArchiveSource.js +11 -9
- package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
- package/dist/esm/sevenz/SevenZipParser.d.ts +7 -0
- package/dist/esm/sevenz/SevenZipParser.js +21 -8
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/index.js +5 -7
- package/dist/esm/sevenz/codecs/index.js.map +1 -1
- package/package.json +3 -3
|
@@ -4,12 +4,13 @@
|
|
|
4
4
|
* Provides a common interface for reading archive data from either
|
|
5
5
|
* a file descriptor or an in-memory buffer.
|
|
6
6
|
*/
|
|
7
|
+
import { type BufferLike } from 'extract-base-iterator';
|
|
7
8
|
import type Stream from 'stream';
|
|
8
9
|
/**
|
|
9
10
|
* Archive source abstraction - allows reading from file descriptor or buffer
|
|
10
11
|
*/
|
|
11
12
|
export interface ArchiveSource {
|
|
12
|
-
read(position: number, length: number):
|
|
13
|
+
read(position: number, length: number): BufferLike;
|
|
13
14
|
getSize(): number;
|
|
14
15
|
close(): void;
|
|
15
16
|
/**
|
|
@@ -26,7 +27,7 @@ export interface ArchiveSource {
|
|
|
26
27
|
export declare class BufferSource implements ArchiveSource {
|
|
27
28
|
private buffer;
|
|
28
29
|
constructor(buffer: Buffer);
|
|
29
|
-
read(position: number, length: number):
|
|
30
|
+
read(position: number, length: number): BufferLike;
|
|
30
31
|
getSize(): number;
|
|
31
32
|
close(): void;
|
|
32
33
|
/**
|
|
@@ -45,7 +46,7 @@ export declare class FileSource implements ArchiveSource {
|
|
|
45
46
|
private fd;
|
|
46
47
|
private size;
|
|
47
48
|
constructor(fd: number, size: number);
|
|
48
|
-
read(position: number, length: number):
|
|
49
|
+
read(position: number, length: number): BufferLike;
|
|
49
50
|
private readChunk;
|
|
50
51
|
getSize(): number;
|
|
51
52
|
close(): void;
|
|
@@ -4,12 +4,13 @@
|
|
|
4
4
|
* Provides a common interface for reading archive data from either
|
|
5
5
|
* a file descriptor or an in-memory buffer.
|
|
6
6
|
*/
|
|
7
|
+
import { type BufferLike } from 'extract-base-iterator';
|
|
7
8
|
import type Stream from 'stream';
|
|
8
9
|
/**
|
|
9
10
|
* Archive source abstraction - allows reading from file descriptor or buffer
|
|
10
11
|
*/
|
|
11
12
|
export interface ArchiveSource {
|
|
12
|
-
read(position: number, length: number):
|
|
13
|
+
read(position: number, length: number): BufferLike;
|
|
13
14
|
getSize(): number;
|
|
14
15
|
close(): void;
|
|
15
16
|
/**
|
|
@@ -26,7 +27,7 @@ export interface ArchiveSource {
|
|
|
26
27
|
export declare class BufferSource implements ArchiveSource {
|
|
27
28
|
private buffer;
|
|
28
29
|
constructor(buffer: Buffer);
|
|
29
|
-
read(position: number, length: number):
|
|
30
|
+
read(position: number, length: number): BufferLike;
|
|
30
31
|
getSize(): number;
|
|
31
32
|
close(): void;
|
|
32
33
|
/**
|
|
@@ -45,7 +46,7 @@ export declare class FileSource implements ArchiveSource {
|
|
|
45
46
|
private fd;
|
|
46
47
|
private size;
|
|
47
48
|
constructor(fd: number, size: number);
|
|
48
|
-
read(position: number, length: number):
|
|
49
|
+
read(position: number, length: number): BufferLike;
|
|
49
50
|
private readChunk;
|
|
50
51
|
getSize(): number;
|
|
51
52
|
close(): void;
|
|
@@ -87,27 +87,29 @@ var FileSource = /*#__PURE__*/ function() {
|
|
|
87
87
|
}
|
|
88
88
|
var _proto = FileSource.prototype;
|
|
89
89
|
_proto.read = function read(position, length) {
|
|
90
|
-
//
|
|
91
|
-
|
|
92
|
-
if (
|
|
90
|
+
// For small reads that fit in a single buffer, return directly
|
|
91
|
+
// This is efficient on all Node versions
|
|
92
|
+
if ((0, _extractbaseiterator.canAllocateBufferSize)(length)) {
|
|
93
93
|
return this.readChunk(position, length);
|
|
94
94
|
}
|
|
95
|
-
// For large reads,
|
|
96
|
-
|
|
95
|
+
// For large reads, return a BufferList to avoid large contiguous allocation
|
|
96
|
+
// This enables LZMA1 decompression on old Node versions
|
|
97
|
+
var result = new _extractbaseiterator.BufferList();
|
|
97
98
|
var totalBytesRead = 0;
|
|
98
99
|
var currentPos = position;
|
|
99
100
|
while(totalBytesRead < length){
|
|
100
101
|
var remaining = length - totalBytesRead;
|
|
101
|
-
|
|
102
|
+
// Use safe chunk size that works on all Node versions
|
|
103
|
+
var chunkSize = Math.min(remaining, (0, _extractbaseiterator.canAllocateBufferSize)(remaining) ? remaining : 256 * 1024 * 1024);
|
|
102
104
|
var chunk = this.readChunk(currentPos, chunkSize);
|
|
103
|
-
|
|
105
|
+
result.append(chunk);
|
|
104
106
|
totalBytesRead += chunk.length;
|
|
105
107
|
currentPos += chunk.length;
|
|
106
108
|
if (chunk.length < chunkSize) {
|
|
107
109
|
break;
|
|
108
110
|
}
|
|
109
111
|
}
|
|
110
|
-
return
|
|
112
|
+
return result;
|
|
111
113
|
};
|
|
112
114
|
_proto.readChunk = function readChunk(position, length) {
|
|
113
115
|
var buf = (0, _extractbaseiterator.allocBuffer)(length);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/ArchiveSource.ts"],"sourcesContent":["/**\n * ArchiveSource - Abstraction for reading 7z archive data\n *\n * Provides a common interface for reading archive data from either\n * a file descriptor or an in-memory buffer.\n */\n\nimport { allocBuffer,
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/ArchiveSource.ts"],"sourcesContent":["/**\n * ArchiveSource - Abstraction for reading 7z archive data\n *\n * Provides a common interface for reading archive data from either\n * a file descriptor or an in-memory buffer.\n */\n\nimport { allocBuffer, type BufferLike, BufferList, canAllocateBufferSize, Readable } from 'extract-base-iterator';\nimport fs from 'fs';\nimport type Stream from 'stream';\n\n// Helper to create a Readable stream compatible with Node 0.8\nfunction createReadableStream(readFn: (stream: Stream.Readable) => void): Stream.Readable {\n const stream = new Readable();\n stream._read = function () {\n readFn(this);\n };\n return stream;\n}\n\n/**\n * Archive source abstraction - allows reading from file descriptor or buffer\n */\nexport interface ArchiveSource {\n read(position: number, length: number): BufferLike;\n getSize(): number;\n close(): void;\n /**\n * Create a readable stream for a portion of the archive.\n * Used for streaming decompression.\n */\n createReadStream(offset: number, length: number): Stream.Readable;\n}\n\n/**\n * Buffer-based archive source\n *\n * Used when the entire archive is already in memory.\n */\nexport class BufferSource implements ArchiveSource {\n private buffer: Buffer;\n\n constructor(buffer: Buffer) {\n this.buffer = buffer;\n }\n\n read(position: number, length: number): BufferLike {\n return this.buffer.slice(position, position + length);\n }\n\n getSize(): number {\n return this.buffer.length;\n }\n\n close(): void {\n // Nothing to close for buffer\n }\n\n /**\n * Create a readable stream for a portion of the buffer.\n * Streams the data in chunks to avoid blocking.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const buffer = this.buffer;\n const end = Math.min(offset + length, buffer.length);\n let currentPos = offset;\n const chunkSize = 65536; // 64KB chunks\n\n return createReadableStream((stream) => {\n if (currentPos >= end) {\n stream.push(null);\n return;\n }\n\n const toRead = Math.min(chunkSize, end - currentPos);\n const chunk = buffer.slice(currentPos, currentPos + toRead);\n currentPos += toRead;\n stream.push(chunk);\n });\n }\n}\n\n/**\n * File descriptor based archive source\n *\n * Used for reading directly from a file on disk.\n * More memory efficient for large archives.\n */\nexport class FileSource implements ArchiveSource {\n private fd: number;\n private size: number;\n\n constructor(fd: number, size: number) {\n this.fd = fd;\n this.size = size;\n }\n\n read(position: number, length: number): BufferLike {\n // For small reads that fit in a single buffer, return directly\n // This is efficient on all Node versions\n if (canAllocateBufferSize(length)) {\n return this.readChunk(position, length);\n }\n\n // For large reads, return a BufferList to avoid large contiguous allocation\n // This enables LZMA1 decompression on old Node versions\n const result = new BufferList();\n let totalBytesRead = 0;\n let currentPos = position;\n\n while (totalBytesRead < length) {\n const remaining = length - totalBytesRead;\n // Use safe chunk size that works on all Node versions\n const chunkSize = Math.min(remaining, canAllocateBufferSize(remaining) ? remaining : 256 * 1024 * 1024);\n const chunk = this.readChunk(currentPos, chunkSize);\n\n result.append(chunk);\n totalBytesRead += chunk.length;\n currentPos += chunk.length;\n\n if (chunk.length < chunkSize) {\n // EOF reached\n break;\n }\n }\n\n return result;\n }\n\n private readChunk(position: number, length: number): Buffer {\n const buf = allocBuffer(length);\n const bytesRead = fs.readSync(this.fd, buf, 0, length, position);\n if (bytesRead < length) {\n return buf.slice(0, bytesRead);\n }\n return buf;\n }\n\n getSize(): number {\n return this.size;\n }\n\n close(): void {\n try {\n fs.closeSync(this.fd);\n } catch (_e) {\n // Ignore close errors\n }\n }\n\n /**\n * Create a readable stream for a portion of the file.\n * Uses async fs.read() to avoid blocking the event loop.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const fd = this.fd;\n let bytesRead = 0;\n let reading = false;\n let finished = false;\n const chunkSize = 65536; // 64KB chunks\n let _streamRef: Stream.Readable | null = null;\n\n const stream = createReadableStream((s) => {\n _streamRef = s;\n if (reading || finished) return; // Prevent re-entrant reads\n\n const toRead = Math.min(chunkSize, length - bytesRead);\n if (toRead <= 0) {\n finished = true;\n s.push(null);\n return;\n }\n\n reading = true;\n const buffer = allocBuffer(toRead);\n const currentOffset = offset + bytesRead;\n\n fs.read(fd, buffer, 0, toRead, currentOffset, (err, n) => {\n reading = false;\n\n if (err) {\n // Emit error for Node 0.8 compatibility (no destroy method)\n s.emit('error', err);\n finished = true;\n s.push(null);\n return;\n }\n\n if (n === 0) {\n finished = true;\n s.push(null);\n } else {\n bytesRead += n;\n s.push(buffer.slice(0, n));\n }\n });\n });\n\n return stream;\n }\n}\n"],"names":["BufferSource","FileSource","createReadableStream","readFn","stream","Readable","_read","buffer","read","position","length","slice","getSize","close","createReadStream","offset","end","Math","min","currentPos","chunkSize","push","toRead","chunk","fd","size","canAllocateBufferSize","readChunk","result","BufferList","totalBytesRead","remaining","append","buf","allocBuffer","bytesRead","fs","readSync","closeSync","_e","reading","finished","_streamRef","s","currentOffset","err","n","emit"],"mappings":"AAAA;;;;;CAKC;;;;;;;;;;;QAkCYA;eAAAA;;QAiDAC;eAAAA;;;mCAjF6E;yDAC3E;;;;;;;;;;;AAGf,8DAA8D;AAC9D,SAASC,qBAAqBC,MAAyC;IACrE,IAAMC,SAAS,IAAIC,6BAAQ;IAC3BD,OAAOE,KAAK,GAAG;QACbH,OAAO,IAAI;IACb;IACA,OAAOC;AACT;AAqBO,IAAA,AAAMJ,6BAAN;;aAAMA,aAGCO,MAAc;gCAHfP;QAIT,IAAI,CAACO,MAAM,GAAGA;;iBAJLP;IAOXQ,OAAAA,IAEC,GAFDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,OAAO,IAAI,CAACH,MAAM,CAACI,KAAK,CAACF,UAAUA,WAAWC;IAChD;IAEAE,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACL,MAAM,CAACG,MAAM;IAC3B;IAEAG,OAAAA,KAEC,GAFDA,SAAAA;IACE,8BAA8B;IAChC;IAEA;;;GAGC,GACDC,OAAAA,gBAiBC,GAjBDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMH,SAAS,IAAI,CAACA,MAAM;QAC1B,IAAMS,MAAMC,KAAKC,GAAG,CAACH,SAASL,QAAQH,OAAOG,MAAM;QACnD,IAAIS,aAAaJ;QACjB,IAAMK,YAAY,OAAO,cAAc;QAEvC,OAAOlB,qBAAqB,SAACE;YAC3B,IAAIe,cAAcH,KAAK;gBACrBZ,OAAOiB,IAAI,CAAC;gBACZ;YACF;YAEA,IAAMC,SAASL,KAAKC,GAAG,CAACE,WAAWJ,MAAMG;YACzC,IAAMI,QAAQhB,OAAOI,KAAK,CAACQ,YAAYA,aAAaG;YACpDH,cAAcG;YACdlB,OAAOiB,IAAI,CAACE;QACd;IACF;WAxCWvB;;AAiDN,IAAA,AAAMC,2BAAN;;aAAMA,WAICuB,EAAU,EAAEC,IAAY;gCAJzBxB;QAKT,IAAI,CAACuB,EAAE,GAAGA;QACV,IAAI,CAACC,IAAI,GAAGA;;iBANHxB;IASXO,OAAAA,IA8BC,GA9BDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,+DAA+D;QAC/D,yCAAyC;QACzC,IAAIgB,IAAAA,0CAAqB,EAAChB,SAAS;YACjC,OAAO,IAAI,CAACiB,SAAS,CAAClB,UAAUC;QAClC;QAEA,4EAA4E;QAC5E,wDAAwD;QACxD,IAAMkB,SAAS,IAAIC,+BAAU;QAC7B,IAAIC,iBAAiB;QACrB,IAAIX,aAAaV;QAEjB,MAAOqB,iBAAiBpB,OAAQ;YAC9B,IAAMqB,YAAYrB,SAASoB;YAC3B,sDAAsD;YACtD,IAAMV,YAAYH,KAAKC,GAAG,CAACa,WAAWL,IAAAA,0CAAqB,EAACK,aAAaA,YAAY,MAAM,OAAO;YAClG,IAAMR,QAAQ,IAAI,CAACI,SAAS,CAACR,YAAYC;YAEzCQ,OAAOI,MAAM,CAACT;YACdO,kBAAkBP,MAAMb,MAAM;YAC9BS,cAAcI,MAAMb,MAAM;YAE1B,IAAIa,MAAMb,MAAM,GAAGU,WAAW;gBAE5B;YACF;QACF;QAEA,OAAOQ;IACT;IAEA,OAAQD,SAOP,GAPD,SAAQA,UAAUlB,QAAgB,EAAEC,MAAc;QAChD,IAAMuB,MAAMC,IAAAA,gCAAW,EAACxB;QACxB,IAAMyB,YAAYC,WAAE,CAACC,QAAQ,CAAC,IAAI,CAACb,EAAE,EAAES,KAAK,GAAGvB,QAAQD;QACvD,IAAI0B,YAAYzB,QAAQ;YACtB,OAAOuB,IAAItB,KAAK,CAAC,GAAGwB;QACtB;QACA,OAAOF;IACT;IAEArB,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACa,IAAI;IAClB;IAEAZ,OAAAA,KAMC,GANDA,SAAAA;QACE,IAAI;YACFuB,WAAE,CAACE,SAAS,CAAC,IAAI,CAACd,EAAE;QACtB,EAAE,OAAOe,IAAI;QACX,sBAAsB;QACxB;IACF;IAEA;;;GAGC,GACDzB,OAAAA,gBA6CC,GA7CDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMc,KAAK,IAAI,CAACA,EAAE;QAClB,IAAIW,YAAY;QAChB,IAAIK,UAAU;QACd,IAAIC,WAAW;QACf,IAAMrB,YAAY,OAAO,cAAc;QACvC,IAAIsB,aAAqC;QAEzC,IAAMtC,SAASF,qBAAqB,SAACyC;YACnCD,aAAaC;YACb,IAAIH,WAAWC,UAAU,QAAQ,2BAA2B;YAE5D,IAAMnB,SAASL,KAAKC,GAAG,CAACE,WAAWV,SAASyB;YAC5C,IAAIb,UAAU,GAAG;gBACfmB,WAAW;gBACXE,EAAEtB,IAAI,CAAC;gBACP;YACF;YAEAmB,UAAU;YACV,IAAMjC,SAAS2B,IAAAA,gCAAW,EAACZ;YAC3B,IAAMsB,gBAAgB7B,SAASoB;YAE/BC,WAAE,CAAC5B,IAAI,CAACgB,IAAIjB,QAAQ,GAAGe,QAAQsB,eAAe,SAACC,KAAKC;gBAClDN,UAAU;gBAEV,IAAIK,KAAK;oBACP,4DAA4D;oBAC5DF,EAAEI,IAAI,CAAC,SAASF;oBAChBJ,WAAW;oBACXE,EAAEtB,IAAI,CAAC;oBACP;gBACF;gBAEA,IAAIyB,MAAM,GAAG;oBACXL,WAAW;oBACXE,EAAEtB,IAAI,CAAC;gBACT,OAAO;oBACLc,aAAaW;oBACbH,EAAEtB,IAAI,CAACd,OAAOI,KAAK,CAAC,GAAGmC;gBACzB;YACF;QACF;QAEA,OAAO1C;IACT;WA/GWH"}
|
|
@@ -56,6 +56,13 @@ export declare class SevenZipParser {
|
|
|
56
56
|
private folderSplitters;
|
|
57
57
|
private pendingFolders;
|
|
58
58
|
constructor(source: ArchiveSource);
|
|
59
|
+
/**
|
|
60
|
+
* Convert BufferLike to Buffer (for small data like headers)
|
|
61
|
+
*/
|
|
62
|
+
private toBuffer;
|
|
63
|
+
/**
|
|
64
|
+
* Decode using codec - accepts BufferLike for LZMA1 support
|
|
65
|
+
*/
|
|
59
66
|
private decodeWithCodec;
|
|
60
67
|
/**
|
|
61
68
|
* Parse the archive structure
|
|
@@ -56,6 +56,13 @@ export declare class SevenZipParser {
|
|
|
56
56
|
private folderSplitters;
|
|
57
57
|
private pendingFolders;
|
|
58
58
|
constructor(source: ArchiveSource);
|
|
59
|
+
/**
|
|
60
|
+
* Convert BufferLike to Buffer (for small data like headers)
|
|
61
|
+
*/
|
|
62
|
+
private toBuffer;
|
|
63
|
+
/**
|
|
64
|
+
* Decode using codec - accepts BufferLike for LZMA1 support
|
|
65
|
+
*/
|
|
59
66
|
private decodeWithCodec;
|
|
60
67
|
/**
|
|
61
68
|
* Parse the archive structure
|
|
@@ -212,7 +212,14 @@ var SevenZipParser = /*#__PURE__*/ function() {
|
|
|
212
212
|
this.source = source;
|
|
213
213
|
}
|
|
214
214
|
var _proto = SevenZipParser.prototype;
|
|
215
|
-
|
|
215
|
+
/**
|
|
216
|
+
* Convert BufferLike to Buffer (for small data like headers)
|
|
217
|
+
*/ _proto.toBuffer = function toBuffer(buf) {
|
|
218
|
+
return Buffer.isBuffer(buf) ? buf : buf.toBuffer();
|
|
219
|
+
};
|
|
220
|
+
/**
|
|
221
|
+
* Decode using codec - accepts BufferLike for LZMA1 support
|
|
222
|
+
*/ _proto.decodeWithCodec = function decodeWithCodec(codec, input, properties, unpackSize, callback) {
|
|
216
223
|
var done = (0, _calloncefn.default)(callback);
|
|
217
224
|
try {
|
|
218
225
|
codec.decode(input, properties, unpackSize, function(err, result) {
|
|
@@ -273,7 +280,7 @@ var SevenZipParser = /*#__PURE__*/ function() {
|
|
|
273
280
|
callback((0, _constantsts.createCodedError)('Archive too small', _constantsts.ErrorCode.TRUNCATED_ARCHIVE));
|
|
274
281
|
return;
|
|
275
282
|
}
|
|
276
|
-
signature = (0, _headersts.parseSignatureHeader)(sigBuf);
|
|
283
|
+
signature = (0, _headersts.parseSignatureHeader)(this.toBuffer(sigBuf));
|
|
277
284
|
this.signature = signature;
|
|
278
285
|
var headerOffset = _constantsts.SIGNATURE_HEADER_SIZE + signature.nextHeaderOffset;
|
|
279
286
|
headerBuf = this.source.read(headerOffset, signature.nextHeaderSize);
|
|
@@ -297,7 +304,7 @@ var SevenZipParser = /*#__PURE__*/ function() {
|
|
|
297
304
|
try {
|
|
298
305
|
var _ref;
|
|
299
306
|
var _this_signature;
|
|
300
|
-
var headerResult = (0, _headersts.parseEncodedHeader)(headerBuf, (_ref = (_this_signature = this.signature) === null || _this_signature === void 0 ? void 0 : _this_signature.nextHeaderCRC) !== null && _ref !== void 0 ? _ref : 0);
|
|
307
|
+
var headerResult = (0, _headersts.parseEncodedHeader)(this.toBuffer(headerBuf), (_ref = (_this_signature = this.signature) === null || _this_signature === void 0 ? void 0 : _this_signature.nextHeaderCRC) !== null && _ref !== void 0 ? _ref : 0);
|
|
301
308
|
this.streamsInfo = headerResult.streamsInfo || null;
|
|
302
309
|
this.filesInfo = headerResult.filesInfo;
|
|
303
310
|
finalize();
|
|
@@ -321,15 +328,17 @@ var SevenZipParser = /*#__PURE__*/ function() {
|
|
|
321
328
|
*/ _proto.handleCompressedHeader = function handleCompressedHeader(headerBuf, callback) {
|
|
322
329
|
var _this = this;
|
|
323
330
|
// Parse the encoded header info to get decompression parameters
|
|
331
|
+
// Convert to Buffer for header parsing (small data)
|
|
332
|
+
var headerBuffer = this.toBuffer(headerBuf);
|
|
324
333
|
var offset = 1; // Skip kEncodedHeader byte
|
|
325
|
-
var propertyId =
|
|
334
|
+
var propertyId = headerBuffer[offset++];
|
|
326
335
|
if (propertyId !== _constantsts.PropertyId.kMainStreamsInfo && propertyId !== _constantsts.PropertyId.kPackInfo) {
|
|
327
336
|
callback((0, _constantsts.createCodedError)('Expected StreamsInfo in encoded header', _constantsts.ErrorCode.CORRUPT_HEADER));
|
|
328
337
|
return;
|
|
329
338
|
}
|
|
330
339
|
var packInfoResult;
|
|
331
340
|
try {
|
|
332
|
-
packInfoResult = this.parseEncodedHeaderStreams(
|
|
341
|
+
packInfoResult = this.parseEncodedHeaderStreams(headerBuffer, 1);
|
|
333
342
|
} catch (err) {
|
|
334
343
|
callback(err);
|
|
335
344
|
return;
|
|
@@ -344,7 +353,9 @@ var SevenZipParser = /*#__PURE__*/ function() {
|
|
|
344
353
|
var searchEnd = Math.max(_constantsts.SIGNATURE_HEADER_SIZE, compressedStart - 100000);
|
|
345
354
|
var scanChunkSize = 4096;
|
|
346
355
|
for(var chunkStart = searchStart; chunkStart >= searchEnd; chunkStart -= scanChunkSize){
|
|
347
|
-
var
|
|
356
|
+
var chunkRaw = this.source.read(chunkStart, scanChunkSize + packInfoResult.packSize);
|
|
357
|
+
// Convert to Buffer for scanning (small data)
|
|
358
|
+
var chunk = this.toBuffer(chunkRaw);
|
|
348
359
|
var limit = Math.min(chunk.length, scanChunkSize);
|
|
349
360
|
for(var i = 0; i < limit; i++){
|
|
350
361
|
if (chunk[i] === 0x00) {
|
|
@@ -886,7 +897,8 @@ var SevenZipParser = /*#__PURE__*/ function() {
|
|
|
886
897
|
_proto.decodeFolderCoders = function decodeFolderCoders(folder, input, index, callback) {
|
|
887
898
|
var _this = this;
|
|
888
899
|
if (index >= folder.coders.length) {
|
|
889
|
-
|
|
900
|
+
// Convert BufferList to Buffer for final output
|
|
901
|
+
callback(null, this.toBuffer(input));
|
|
890
902
|
return;
|
|
891
903
|
}
|
|
892
904
|
var coderInfo = folder.coders[index];
|
|
@@ -1005,7 +1017,8 @@ var SevenZipParser = /*#__PURE__*/ function() {
|
|
|
1005
1017
|
bcj2Inputs.push(coderOutputs[boundOutput]);
|
|
1006
1018
|
} else {
|
|
1007
1019
|
var psIdx = inputToPackStream[globalIdx];
|
|
1008
|
-
|
|
1020
|
+
// Convert BufferList to Buffer for BCJ2 (small data, safe to concatenate)
|
|
1021
|
+
bcj2Inputs.push(this.toBuffer(packStreams[psIdx]));
|
|
1009
1022
|
}
|
|
1010
1023
|
}
|
|
1011
1024
|
var bcj2OutputStart = 0;
|