7z-iterator 0.2.8 → 0.2.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/FileEntry.js.map +1 -1
- package/dist/cjs/SevenZipIterator.js.map +1 -1
- package/dist/cjs/compat.js.map +1 -1
- package/dist/cjs/lib/streamToSource.js.map +1 -1
- package/dist/cjs/nextEntry.js.map +1 -1
- package/dist/cjs/sevenz/NumberCodec.js.map +1 -1
- package/dist/cjs/sevenz/SevenZipParser.d.cts +1 -0
- package/dist/cjs/sevenz/SevenZipParser.d.ts +1 -0
- package/dist/cjs/sevenz/SevenZipParser.js +18 -0
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Aes.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Bcj.js +1 -1
- package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Bcj2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BcjArm64.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BcjArmt.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BcjIa64.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BcjPpc.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BcjSparc.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
- package/dist/cjs/sevenz/codecs/index.js.map +1 -1
- package/dist/cjs/sevenz/codecs/lzmaCompat.d.cts +1 -1
- package/dist/cjs/sevenz/codecs/lzmaCompat.d.ts +1 -1
- package/dist/cjs/sevenz/codecs/lzmaCompat.js.map +1 -1
- package/dist/cjs/sevenz/codecs/streams.js.map +1 -1
- package/dist/cjs/sevenz/constants.d.cts +8 -8
- package/dist/cjs/sevenz/constants.d.ts +8 -8
- package/dist/cjs/sevenz/constants.js.map +1 -1
- package/dist/cjs/sevenz/headers.js +1 -1
- package/dist/cjs/sevenz/headers.js.map +1 -1
- package/dist/esm/FileEntry.js +1 -1
- package/dist/esm/FileEntry.js.map +1 -1
- package/dist/esm/SevenZipIterator.js +6 -6
- package/dist/esm/SevenZipIterator.js.map +1 -1
- package/dist/esm/compat.js +2 -2
- package/dist/esm/compat.js.map +1 -1
- package/dist/esm/lib/streamToSource.js +12 -12
- package/dist/esm/lib/streamToSource.js.map +1 -1
- package/dist/esm/nextEntry.js +11 -11
- package/dist/esm/nextEntry.js.map +1 -1
- package/dist/esm/sevenz/NumberCodec.js +19 -19
- package/dist/esm/sevenz/NumberCodec.js.map +1 -1
- package/dist/esm/sevenz/SevenZipParser.d.ts +1 -0
- package/dist/esm/sevenz/SevenZipParser.js +207 -189
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/Aes.js +29 -29
- package/dist/esm/sevenz/codecs/Aes.js.map +1 -1
- package/dist/esm/sevenz/codecs/Bcj.js +8 -8
- package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
- package/dist/esm/sevenz/codecs/Bcj2.js +35 -35
- package/dist/esm/sevenz/codecs/Bcj2.js.map +1 -1
- package/dist/esm/sevenz/codecs/BcjArm.js +4 -4
- package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
- package/dist/esm/sevenz/codecs/BcjArm64.js +5 -5
- package/dist/esm/sevenz/codecs/BcjArm64.js.map +1 -1
- package/dist/esm/sevenz/codecs/BcjArmt.js +10 -10
- package/dist/esm/sevenz/codecs/BcjArmt.js.map +1 -1
- package/dist/esm/sevenz/codecs/BcjIa64.js +24 -24
- package/dist/esm/sevenz/codecs/BcjIa64.js.map +1 -1
- package/dist/esm/sevenz/codecs/BcjPpc.js +5 -5
- package/dist/esm/sevenz/codecs/BcjPpc.js.map +1 -1
- package/dist/esm/sevenz/codecs/BcjSparc.js +8 -8
- package/dist/esm/sevenz/codecs/BcjSparc.js.map +1 -1
- package/dist/esm/sevenz/codecs/Delta.js +6 -6
- package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma.js +16 -16
- package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma2.js +35 -35
- package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/esm/sevenz/codecs/createBufferingDecoder.js +3 -3
- package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
- package/dist/esm/sevenz/codecs/index.js +6 -6
- package/dist/esm/sevenz/codecs/index.js.map +1 -1
- package/dist/esm/sevenz/codecs/lzmaCompat.d.ts +1 -1
- package/dist/esm/sevenz/codecs/lzmaCompat.js +5 -5
- package/dist/esm/sevenz/codecs/lzmaCompat.js.map +1 -1
- package/dist/esm/sevenz/codecs/streams.js +13 -13
- package/dist/esm/sevenz/codecs/streams.js.map +1 -1
- package/dist/esm/sevenz/constants.d.ts +8 -8
- package/dist/esm/sevenz/constants.js +9 -9
- package/dist/esm/sevenz/constants.js.map +1 -1
- package/dist/esm/sevenz/headers.js +134 -134
- package/dist/esm/sevenz/headers.js.map +1 -1
- package/package.json +20 -20
|
@@ -36,8 +36,8 @@ import { readNumber } from './NumberCodec.js';
|
|
|
36
36
|
* File descriptor based archive source
|
|
37
37
|
*/ export class FileSource {
|
|
38
38
|
read(position, length) {
|
|
39
|
-
|
|
40
|
-
|
|
39
|
+
const buf = allocBuffer(length);
|
|
40
|
+
const bytesRead = fs.readSync(this.fd, buf, 0, length, position);
|
|
41
41
|
if (bytesRead < length) {
|
|
42
42
|
return buf.slice(0, bytesRead);
|
|
43
43
|
}
|
|
@@ -67,24 +67,24 @@ import { readNumber } from './NumberCodec.js';
|
|
|
67
67
|
*/ parse() {
|
|
68
68
|
if (this.parsed) return;
|
|
69
69
|
// Read signature header
|
|
70
|
-
|
|
70
|
+
const sigBuf = this.source.read(0, SIGNATURE_HEADER_SIZE);
|
|
71
71
|
if (sigBuf.length < SIGNATURE_HEADER_SIZE) {
|
|
72
72
|
throw createCodedError('Archive too small', ErrorCode.TRUNCATED_ARCHIVE);
|
|
73
73
|
}
|
|
74
74
|
this.signature = parseSignatureHeader(sigBuf);
|
|
75
75
|
// Read encoded header
|
|
76
|
-
|
|
77
|
-
|
|
76
|
+
const headerOffset = SIGNATURE_HEADER_SIZE + this.signature.nextHeaderOffset;
|
|
77
|
+
const headerBuf = this.source.read(headerOffset, this.signature.nextHeaderSize);
|
|
78
78
|
if (headerBuf.length < this.signature.nextHeaderSize) {
|
|
79
79
|
throw createCodedError('Truncated header', ErrorCode.TRUNCATED_ARCHIVE);
|
|
80
80
|
}
|
|
81
81
|
// Parse encoded header (may need decompression)
|
|
82
82
|
try {
|
|
83
|
-
|
|
83
|
+
const headerResult = parseEncodedHeader(headerBuf, this.signature.nextHeaderCRC);
|
|
84
84
|
this.streamsInfo = headerResult.streamsInfo || null;
|
|
85
85
|
this.filesInfo = headerResult.filesInfo;
|
|
86
86
|
} catch (err) {
|
|
87
|
-
|
|
87
|
+
const codedErr = err;
|
|
88
88
|
if (codedErr && codedErr.code === ErrorCode.COMPRESSED_HEADER) {
|
|
89
89
|
// Header is compressed - need to decompress first
|
|
90
90
|
this.handleCompressedHeader(headerBuf);
|
|
@@ -100,31 +100,31 @@ import { readNumber } from './NumberCodec.js';
|
|
|
100
100
|
* Handle compressed header (kEncodedHeader)
|
|
101
101
|
*/ handleCompressedHeader(headerBuf) {
|
|
102
102
|
// Parse the encoded header info to get decompression parameters
|
|
103
|
-
|
|
103
|
+
let offset = 1; // Skip kEncodedHeader byte
|
|
104
104
|
// Should have StreamsInfo for the header itself
|
|
105
|
-
|
|
105
|
+
const propertyId = headerBuf[offset++];
|
|
106
106
|
if (propertyId !== PropertyId.kMainStreamsInfo && propertyId !== PropertyId.kPackInfo) {
|
|
107
107
|
throw createCodedError('Expected StreamsInfo in encoded header', ErrorCode.CORRUPT_HEADER);
|
|
108
108
|
}
|
|
109
109
|
// For now, we parse the streams info from the encoded header block
|
|
110
110
|
// This tells us how to decompress the actual header
|
|
111
111
|
// Read pack info from the encoded header structure
|
|
112
|
-
|
|
112
|
+
const packInfoResult = this.parseEncodedHeaderStreams(headerBuf, 1);
|
|
113
113
|
// Calculate compressed header position
|
|
114
114
|
// For simple archives: header is at SIGNATURE_HEADER_SIZE + packPos
|
|
115
115
|
// For BCJ2/complex archives: header may be at the END of pack data area
|
|
116
116
|
// The pack data area ends at nextHeaderOffset (where encoded header starts)
|
|
117
|
-
|
|
118
|
-
|
|
117
|
+
const compressedStart = SIGNATURE_HEADER_SIZE + packInfoResult.packPos;
|
|
118
|
+
const compressedData = this.source.read(compressedStart, packInfoResult.packSize);
|
|
119
119
|
// Decompress using the specified codec
|
|
120
|
-
|
|
121
|
-
|
|
120
|
+
const codec = getCodec(packInfoResult.codecId);
|
|
121
|
+
let decompressedHeader = null;
|
|
122
122
|
// Try decompressing from the calculated position first
|
|
123
123
|
try {
|
|
124
124
|
decompressedHeader = codec.decode(compressedData, packInfoResult.properties, packInfoResult.unpackSize);
|
|
125
125
|
// Verify CRC if present
|
|
126
126
|
if (packInfoResult.unpackCRC !== undefined) {
|
|
127
|
-
|
|
127
|
+
const actualCRC = crc32(decompressedHeader);
|
|
128
128
|
if (actualCRC !== packInfoResult.unpackCRC) {
|
|
129
129
|
decompressedHeader = null; // CRC mismatch, need to search
|
|
130
130
|
}
|
|
@@ -135,22 +135,22 @@ import { readNumber } from './NumberCodec.js';
|
|
|
135
135
|
// If initial decompression failed, search for the correct position as a fallback
|
|
136
136
|
// This handles edge cases where packPos doesn't point directly to header pack data
|
|
137
137
|
if (decompressedHeader === null && this.signature) {
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
138
|
+
const packAreaEnd = SIGNATURE_HEADER_SIZE + this.signature.nextHeaderOffset;
|
|
139
|
+
const searchStart = packAreaEnd - packInfoResult.packSize;
|
|
140
|
+
const searchEnd = Math.max(SIGNATURE_HEADER_SIZE, compressedStart - 100000);
|
|
141
141
|
// Scan for LZMA data starting with 0x00 (range coder init)
|
|
142
142
|
// Try each candidate and validate with CRC
|
|
143
|
-
|
|
144
|
-
searchLoop: for(
|
|
145
|
-
|
|
146
|
-
for(
|
|
143
|
+
const scanChunkSize = 4096;
|
|
144
|
+
searchLoop: for(let chunkStart = searchStart; chunkStart >= searchEnd; chunkStart -= scanChunkSize){
|
|
145
|
+
const chunk = this.source.read(chunkStart, scanChunkSize + packInfoResult.packSize);
|
|
146
|
+
for(let i = 0; i < Math.min(chunk.length, scanChunkSize); i++){
|
|
147
147
|
if (chunk[i] === 0x00) {
|
|
148
|
-
|
|
148
|
+
const candidateData = chunk.subarray(i, i + packInfoResult.packSize);
|
|
149
149
|
if (candidateData.length === packInfoResult.packSize) {
|
|
150
150
|
try {
|
|
151
|
-
|
|
151
|
+
const candidateDecompressed = codec.decode(candidateData, packInfoResult.properties, packInfoResult.unpackSize);
|
|
152
152
|
if (packInfoResult.unpackCRC !== undefined) {
|
|
153
|
-
|
|
153
|
+
const candCRC = crc32(candidateDecompressed);
|
|
154
154
|
if (candCRC === packInfoResult.unpackCRC) {
|
|
155
155
|
decompressedHeader = candidateDecompressed;
|
|
156
156
|
break searchLoop;
|
|
@@ -172,13 +172,13 @@ import { readNumber } from './NumberCodec.js';
|
|
|
172
172
|
}
|
|
173
173
|
// Now parse the decompressed header
|
|
174
174
|
// It should start with kHeader
|
|
175
|
-
|
|
176
|
-
|
|
175
|
+
let decompOffset = 0;
|
|
176
|
+
const headerId = decompressedHeader[decompOffset++];
|
|
177
177
|
if (headerId !== PropertyId.kHeader) {
|
|
178
178
|
throw createCodedError('Expected kHeader in decompressed header', ErrorCode.CORRUPT_HEADER);
|
|
179
179
|
}
|
|
180
180
|
// Parse the decompressed header using shared function from headers.ts
|
|
181
|
-
|
|
181
|
+
const result = parseHeaderContent(decompressedHeader, decompOffset);
|
|
182
182
|
this.streamsInfo = result.streamsInfo || null;
|
|
183
183
|
this.filesInfo = result.filesInfo;
|
|
184
184
|
}
|
|
@@ -187,30 +187,30 @@ import { readNumber } from './NumberCodec.js';
|
|
|
187
187
|
* This is a simplified parser for the header's own compression info
|
|
188
188
|
*/ parseEncodedHeaderStreams(buf, offset) {
|
|
189
189
|
// This is a simplified parser for the encoded header's own streams info
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
190
|
+
let packPos = 0;
|
|
191
|
+
let packSize = 0;
|
|
192
|
+
let unpackSize = 0;
|
|
193
|
+
let codecId = [];
|
|
194
|
+
let properties;
|
|
195
|
+
let unpackCRC;
|
|
196
196
|
while(offset < buf.length){
|
|
197
|
-
|
|
197
|
+
const propertyId = buf[offset++];
|
|
198
198
|
if (propertyId === PropertyId.kEnd) {
|
|
199
199
|
break;
|
|
200
200
|
}
|
|
201
201
|
switch(propertyId){
|
|
202
202
|
case PropertyId.kPackInfo:
|
|
203
203
|
{
|
|
204
|
-
|
|
204
|
+
const packPosResult = readNumber(buf, offset);
|
|
205
205
|
packPos = packPosResult.value;
|
|
206
206
|
offset += packPosResult.bytesRead;
|
|
207
|
-
|
|
207
|
+
const numPackResult = readNumber(buf, offset);
|
|
208
208
|
offset += numPackResult.bytesRead;
|
|
209
209
|
// Read until kEnd
|
|
210
210
|
while(buf[offset] !== PropertyId.kEnd){
|
|
211
211
|
if (buf[offset] === PropertyId.kSize) {
|
|
212
212
|
offset++;
|
|
213
|
-
|
|
213
|
+
const sizeResult = readNumber(buf, offset);
|
|
214
214
|
packSize = sizeResult.value;
|
|
215
215
|
offset += sizeResult.bytesRead;
|
|
216
216
|
} else {
|
|
@@ -225,21 +225,21 @@ import { readNumber } from './NumberCodec.js';
|
|
|
225
225
|
while(offset < buf.length && buf[offset] !== PropertyId.kEnd){
|
|
226
226
|
if (buf[offset] === PropertyId.kFolder) {
|
|
227
227
|
offset++;
|
|
228
|
-
|
|
228
|
+
const numFoldersResult = readNumber(buf, offset);
|
|
229
229
|
offset += numFoldersResult.bytesRead;
|
|
230
230
|
offset++; // external flag
|
|
231
231
|
// Parse coder
|
|
232
|
-
|
|
232
|
+
const numCodersResult = readNumber(buf, offset);
|
|
233
233
|
offset += numCodersResult.bytesRead;
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
234
|
+
const flags = buf[offset++];
|
|
235
|
+
const idSize = flags & 0x0f;
|
|
236
|
+
const hasAttributes = (flags & 0x20) !== 0;
|
|
237
237
|
codecId = [];
|
|
238
|
-
for(
|
|
238
|
+
for(let i = 0; i < idSize; i++){
|
|
239
239
|
codecId.push(buf[offset++]);
|
|
240
240
|
}
|
|
241
241
|
if (hasAttributes) {
|
|
242
|
-
|
|
242
|
+
const propsLenResult = readNumber(buf, offset);
|
|
243
243
|
offset += propsLenResult.bytesRead;
|
|
244
244
|
properties = buf.slice(offset, offset + propsLenResult.value);
|
|
245
245
|
offset += propsLenResult.value;
|
|
@@ -247,12 +247,12 @@ import { readNumber } from './NumberCodec.js';
|
|
|
247
247
|
} else if (buf[offset] === PropertyId.kCodersUnpackSize) {
|
|
248
248
|
offset++;
|
|
249
249
|
// Read unpack size - needed for LZMA decoder
|
|
250
|
-
|
|
250
|
+
const unpackSizeResult = readNumber(buf, offset);
|
|
251
251
|
unpackSize = unpackSizeResult.value;
|
|
252
252
|
offset += unpackSizeResult.bytesRead;
|
|
253
253
|
} else if (buf[offset] === PropertyId.kCRC) {
|
|
254
254
|
offset++;
|
|
255
|
-
|
|
255
|
+
const allDefined = buf[offset++];
|
|
256
256
|
if (allDefined) {
|
|
257
257
|
unpackCRC = buf.readUInt32LE(offset);
|
|
258
258
|
offset += 4;
|
|
@@ -280,33 +280,37 @@ import { readNumber } from './NumberCodec.js';
|
|
|
280
280
|
this.entries = [];
|
|
281
281
|
if (!this.streamsInfo) {
|
|
282
282
|
// No streams info - just create entries from file info
|
|
283
|
-
for(
|
|
284
|
-
|
|
283
|
+
for(let i = 0; i < this.filesInfo.length; i++){
|
|
284
|
+
const file = this.filesInfo[i];
|
|
285
285
|
this.entries.push(this.createEntry(file, 0, 0, 0));
|
|
286
286
|
}
|
|
287
287
|
return;
|
|
288
288
|
}
|
|
289
289
|
// Use the properly parsed numUnpackStreamsPerFolder from the archive header
|
|
290
|
-
|
|
290
|
+
const streamsPerFolder = this.streamsInfo.numUnpackStreamsPerFolder;
|
|
291
291
|
// Initialize files per folder count (for smart caching)
|
|
292
|
-
for(
|
|
292
|
+
for(let f = 0; f < streamsPerFolder.length; f++){
|
|
293
293
|
this.filesPerFolder[f] = streamsPerFolder[f];
|
|
294
294
|
this.extractedPerFolder[f] = 0;
|
|
295
295
|
}
|
|
296
296
|
// Now build entries with proper folder/stream tracking
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
for(
|
|
302
|
-
|
|
297
|
+
let streamIndex = 0;
|
|
298
|
+
let folderIndex = 0;
|
|
299
|
+
let streamInFolder = 0;
|
|
300
|
+
let folderStreamCount = streamsPerFolder[0] || 0;
|
|
301
|
+
for(let j = 0; j < this.filesInfo.length; j++){
|
|
302
|
+
const fileInfo = this.filesInfo[j];
|
|
303
303
|
// Get size from unpackSizes for files with streams
|
|
304
|
-
|
|
304
|
+
let size = 0;
|
|
305
305
|
if (fileInfo.hasStream && streamIndex < this.streamsInfo.unpackSizes.length) {
|
|
306
306
|
size = this.streamsInfo.unpackSizes[streamIndex];
|
|
307
307
|
}
|
|
308
|
-
|
|
308
|
+
const entry = this.createEntry(fileInfo, size, folderIndex, streamInFolder);
|
|
309
309
|
entry._streamIndex = streamIndex;
|
|
310
|
+
// Set CRC if available
|
|
311
|
+
if (fileInfo.hasStream && this.streamsInfo.unpackCRCs && this.streamsInfo.unpackCRCs[streamIndex] !== undefined) {
|
|
312
|
+
entry._crc = this.streamsInfo.unpackCRCs[streamIndex];
|
|
313
|
+
}
|
|
310
314
|
this.entries.push(entry);
|
|
311
315
|
// Advance stream tracking for files with streams
|
|
312
316
|
if (fileInfo.hasStream) {
|
|
@@ -327,12 +331,12 @@ import { readNumber } from './NumberCodec.js';
|
|
|
327
331
|
// Determine entry type
|
|
328
332
|
// Note: 7z format doesn't natively support symlinks. p7zip with -snl stores
|
|
329
333
|
// symlinks as regular files with the target path as content.
|
|
330
|
-
|
|
334
|
+
let type = 'file';
|
|
331
335
|
if (file.isDirectory) {
|
|
332
336
|
type = 'directory';
|
|
333
337
|
}
|
|
334
338
|
// Calculate mode from Windows attributes
|
|
335
|
-
|
|
339
|
+
let mode;
|
|
336
340
|
if (file.attributes !== undefined) {
|
|
337
341
|
// Check for Unix extension bit
|
|
338
342
|
if ((file.attributes & FileAttribute.UNIX_EXTENSION) !== 0) {
|
|
@@ -377,7 +381,7 @@ import { readNumber } from './NumberCodec.js';
|
|
|
377
381
|
*/ getEntryStream(entry) {
|
|
378
382
|
if (!entry._hasStream || entry.type === 'directory') {
|
|
379
383
|
// Return empty stream for directories and empty files
|
|
380
|
-
|
|
384
|
+
const emptyStream = new PassThrough();
|
|
381
385
|
emptyStream.end();
|
|
382
386
|
return emptyStream;
|
|
383
387
|
}
|
|
@@ -385,37 +389,44 @@ import { readNumber } from './NumberCodec.js';
|
|
|
385
389
|
throw createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER);
|
|
386
390
|
}
|
|
387
391
|
// Get folder info
|
|
388
|
-
|
|
392
|
+
const folder = this.streamsInfo.folders[entry._folderIndex];
|
|
389
393
|
if (!folder) {
|
|
390
394
|
throw createCodedError('Invalid folder index', ErrorCode.CORRUPT_HEADER);
|
|
391
395
|
}
|
|
392
396
|
// Check codec support
|
|
393
|
-
for(
|
|
394
|
-
|
|
397
|
+
for(let i = 0; i < folder.coders.length; i++){
|
|
398
|
+
const coder = folder.coders[i];
|
|
395
399
|
if (!isCodecSupported(coder.id)) {
|
|
396
|
-
|
|
400
|
+
const codecName = getCodecName(coder.id);
|
|
397
401
|
throw createCodedError(`Unsupported codec: ${codecName}`, ErrorCode.UNSUPPORTED_CODEC);
|
|
398
402
|
}
|
|
399
403
|
}
|
|
400
404
|
// Get decompressed data for this folder (with smart caching)
|
|
401
|
-
|
|
402
|
-
|
|
405
|
+
const folderIdx = entry._folderIndex;
|
|
406
|
+
const data = this.getDecompressedFolder(folderIdx);
|
|
403
407
|
// Calculate file offset within the decompressed block
|
|
404
408
|
// For solid archives, multiple files are concatenated in the block
|
|
405
|
-
|
|
406
|
-
for(
|
|
409
|
+
let fileStart = 0;
|
|
410
|
+
for(let m = 0; m < entry._streamIndexInFolder; m++){
|
|
407
411
|
// Sum sizes of all streams before this one in the folder
|
|
408
|
-
|
|
412
|
+
const prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
|
|
409
413
|
fileStart += this.streamsInfo.unpackSizes[prevStreamGlobalIndex];
|
|
410
414
|
}
|
|
411
|
-
|
|
415
|
+
const fileSize = entry.size;
|
|
412
416
|
// Create a PassThrough stream with the file data
|
|
413
|
-
|
|
417
|
+
const outputStream = new PassThrough();
|
|
414
418
|
// Bounds check to prevent "oob" error on older Node versions
|
|
415
419
|
if (fileStart + fileSize > data.length) {
|
|
416
420
|
throw createCodedError(`File data out of bounds: offset ${fileStart} + size ${fileSize} > decompressed length ${data.length}`, ErrorCode.DECOMPRESSION_FAILED);
|
|
417
421
|
}
|
|
418
|
-
|
|
422
|
+
const fileData = data.slice(fileStart, fileStart + fileSize);
|
|
423
|
+
// Verify CRC if present
|
|
424
|
+
if (entry._crc !== undefined) {
|
|
425
|
+
const actualCRC = crc32(fileData);
|
|
426
|
+
if (actualCRC !== entry._crc) {
|
|
427
|
+
throw createCodedError(`CRC mismatch for ${entry.path}: expected ${entry._crc.toString(16)}, got ${actualCRC.toString(16)}`, ErrorCode.CRC_MISMATCH);
|
|
428
|
+
}
|
|
429
|
+
}
|
|
419
430
|
outputStream.end(fileData);
|
|
420
431
|
// Track extraction and release cache when all files from this folder are done
|
|
421
432
|
this.extractedPerFolder[folderIdx] = (this.extractedPerFolder[folderIdx] || 0) + 1;
|
|
@@ -431,7 +442,7 @@ import { readNumber } from './NumberCodec.js';
|
|
|
431
442
|
*/ getEntryStreamAsync(entry, callback) {
|
|
432
443
|
if (!entry._hasStream || entry.type === 'directory') {
|
|
433
444
|
// Return empty stream for directories and empty files
|
|
434
|
-
|
|
445
|
+
const emptyStream = new PassThrough();
|
|
435
446
|
emptyStream.end();
|
|
436
447
|
callback(null, emptyStream);
|
|
437
448
|
return;
|
|
@@ -441,40 +452,47 @@ import { readNumber } from './NumberCodec.js';
|
|
|
441
452
|
return;
|
|
442
453
|
}
|
|
443
454
|
// Get folder info
|
|
444
|
-
|
|
455
|
+
const folder = this.streamsInfo.folders[entry._folderIndex];
|
|
445
456
|
if (!folder) {
|
|
446
457
|
callback(createCodedError('Invalid folder index', ErrorCode.CORRUPT_HEADER));
|
|
447
458
|
return;
|
|
448
459
|
}
|
|
449
460
|
// Check codec support
|
|
450
|
-
for(
|
|
451
|
-
|
|
461
|
+
for(let i = 0; i < folder.coders.length; i++){
|
|
462
|
+
const coder = folder.coders[i];
|
|
452
463
|
if (!isCodecSupported(coder.id)) {
|
|
453
|
-
|
|
464
|
+
const codecName = getCodecName(coder.id);
|
|
454
465
|
callback(createCodedError(`Unsupported codec: ${codecName}`, ErrorCode.UNSUPPORTED_CODEC));
|
|
455
466
|
return;
|
|
456
467
|
}
|
|
457
468
|
}
|
|
458
469
|
// Get decompressed data for this folder using async method
|
|
459
|
-
|
|
460
|
-
|
|
470
|
+
const folderIdx = entry._folderIndex;
|
|
471
|
+
const streamsInfo = this.streamsInfo;
|
|
461
472
|
this.getDecompressedFolderAsync(folderIdx, (err, data)=>{
|
|
462
473
|
if (err) return callback(err);
|
|
463
474
|
if (!data) return callback(new Error('No data returned from decompression'));
|
|
464
475
|
// Calculate file offset within the decompressed block
|
|
465
|
-
|
|
466
|
-
for(
|
|
467
|
-
|
|
476
|
+
let fileStart = 0;
|
|
477
|
+
for(let m = 0; m < entry._streamIndexInFolder; m++){
|
|
478
|
+
const prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
|
|
468
479
|
fileStart += streamsInfo.unpackSizes[prevStreamGlobalIndex];
|
|
469
480
|
}
|
|
470
|
-
|
|
481
|
+
const fileSize = entry.size;
|
|
471
482
|
// Bounds check
|
|
472
483
|
if (fileStart + fileSize > data.length) {
|
|
473
484
|
return callback(createCodedError(`File data out of bounds: offset ${fileStart} + size ${fileSize} > decompressed length ${data.length}`, ErrorCode.DECOMPRESSION_FAILED));
|
|
474
485
|
}
|
|
475
486
|
// Create a PassThrough stream with the file data
|
|
476
|
-
|
|
477
|
-
|
|
487
|
+
const outputStream = new PassThrough();
|
|
488
|
+
const fileData = data.slice(fileStart, fileStart + fileSize);
|
|
489
|
+
// Verify CRC if present
|
|
490
|
+
if (entry._crc !== undefined) {
|
|
491
|
+
const actualCRC = crc32(fileData);
|
|
492
|
+
if (actualCRC !== entry._crc) {
|
|
493
|
+
return callback(createCodedError(`CRC mismatch for ${entry.path}: expected ${entry._crc.toString(16)}, got ${actualCRC.toString(16)}`, ErrorCode.CRC_MISMATCH));
|
|
494
|
+
}
|
|
495
|
+
}
|
|
478
496
|
outputStream.end(fileData);
|
|
479
497
|
// Track extraction and release cache when all files from this folder are done
|
|
480
498
|
this.extractedPerFolder[folderIdx] = (this.extractedPerFolder[folderIdx] || 0) + 1;
|
|
@@ -487,7 +505,7 @@ import { readNumber } from './NumberCodec.js';
|
|
|
487
505
|
/**
|
|
488
506
|
* Check if a folder uses BCJ2 codec
|
|
489
507
|
*/ folderHasBcj2(folder) {
|
|
490
|
-
for(
|
|
508
|
+
for(let i = 0; i < folder.coders.length; i++){
|
|
491
509
|
if (isBcj2Codec(folder.coders[i].id)) {
|
|
492
510
|
return true;
|
|
493
511
|
}
|
|
@@ -505,42 +523,42 @@ import { readNumber } from './NumberCodec.js';
|
|
|
505
523
|
if (!this.streamsInfo) {
|
|
506
524
|
throw createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER);
|
|
507
525
|
}
|
|
508
|
-
|
|
526
|
+
const folder = this.streamsInfo.folders[folderIndex];
|
|
509
527
|
// Check how many files remain in this folder
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
528
|
+
const filesInFolder = this.filesPerFolder[folderIndex] || 1;
|
|
529
|
+
const extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
|
|
530
|
+
const remainingFiles = filesInFolder - extractedFromFolder;
|
|
513
531
|
// Only cache if more than 1 file remains (including the current one being extracted)
|
|
514
|
-
|
|
532
|
+
const shouldCache = remainingFiles > 1;
|
|
515
533
|
// Check if this folder uses BCJ2 (requires special multi-stream handling)
|
|
516
534
|
if (this.folderHasBcj2(folder)) {
|
|
517
|
-
|
|
535
|
+
const data = this.decompressBcj2Folder(folderIndex);
|
|
518
536
|
if (shouldCache) {
|
|
519
537
|
this.decompressedCache[folderIndex] = data;
|
|
520
538
|
}
|
|
521
539
|
return data;
|
|
522
540
|
}
|
|
523
541
|
// Calculate packed data position
|
|
524
|
-
|
|
542
|
+
let packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
|
|
525
543
|
// Find which pack stream this folder uses
|
|
526
|
-
|
|
527
|
-
for(
|
|
544
|
+
let packStreamIndex = 0;
|
|
545
|
+
for(let j = 0; j < folderIndex; j++){
|
|
528
546
|
packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
|
|
529
547
|
}
|
|
530
548
|
// Calculate position of this pack stream
|
|
531
|
-
for(
|
|
549
|
+
for(let k = 0; k < packStreamIndex; k++){
|
|
532
550
|
packPos += this.streamsInfo.packSizes[k];
|
|
533
551
|
}
|
|
534
|
-
|
|
552
|
+
const packSize = this.streamsInfo.packSizes[packStreamIndex];
|
|
535
553
|
// Read packed data
|
|
536
|
-
|
|
554
|
+
const packedData = this.source.read(packPos, packSize);
|
|
537
555
|
// Decompress through codec chain
|
|
538
|
-
|
|
539
|
-
for(
|
|
540
|
-
|
|
541
|
-
|
|
556
|
+
let data2 = packedData;
|
|
557
|
+
for(let l = 0; l < folder.coders.length; l++){
|
|
558
|
+
const coderInfo = folder.coders[l];
|
|
559
|
+
const codec = getCodec(coderInfo.id);
|
|
542
560
|
// Get unpack size for this coder (needed by LZMA)
|
|
543
|
-
|
|
561
|
+
const unpackSize = folder.unpackSizes[l];
|
|
544
562
|
data2 = codec.decode(data2, coderInfo.properties, unpackSize);
|
|
545
563
|
}
|
|
546
564
|
// Cache only if more files remain in this folder
|
|
@@ -553,7 +571,7 @@ import { readNumber } from './NumberCodec.js';
|
|
|
553
571
|
* Get decompressed data for a folder using streaming (callback-based async)
|
|
554
572
|
* Uses createDecoder() streams for non-blocking decompression
|
|
555
573
|
*/ getDecompressedFolderAsync(folderIndex, callback) {
|
|
556
|
-
|
|
574
|
+
const self = this;
|
|
557
575
|
// Check cache first
|
|
558
576
|
if (this.decompressedCache[folderIndex]) {
|
|
559
577
|
callback(null, this.decompressedCache[folderIndex]);
|
|
@@ -563,17 +581,17 @@ import { readNumber } from './NumberCodec.js';
|
|
|
563
581
|
callback(createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER));
|
|
564
582
|
return;
|
|
565
583
|
}
|
|
566
|
-
|
|
584
|
+
const folder = this.streamsInfo.folders[folderIndex];
|
|
567
585
|
// Check how many files remain in this folder
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
586
|
+
const filesInFolder = this.filesPerFolder[folderIndex] || 1;
|
|
587
|
+
const extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
|
|
588
|
+
const remainingFiles = filesInFolder - extractedFromFolder;
|
|
589
|
+
const shouldCache = remainingFiles > 1;
|
|
572
590
|
// BCJ2 requires special handling - use sync version for now
|
|
573
591
|
// TODO: Add async BCJ2 support
|
|
574
592
|
if (this.folderHasBcj2(folder)) {
|
|
575
593
|
try {
|
|
576
|
-
|
|
594
|
+
const data = this.decompressBcj2Folder(folderIndex);
|
|
577
595
|
if (shouldCache) {
|
|
578
596
|
this.decompressedCache[folderIndex] = data;
|
|
579
597
|
}
|
|
@@ -584,29 +602,29 @@ import { readNumber } from './NumberCodec.js';
|
|
|
584
602
|
return;
|
|
585
603
|
}
|
|
586
604
|
// Calculate packed data position
|
|
587
|
-
|
|
605
|
+
let packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
|
|
588
606
|
// Find which pack stream this folder uses
|
|
589
|
-
|
|
590
|
-
for(
|
|
607
|
+
let packStreamIndex = 0;
|
|
608
|
+
for(let j = 0; j < folderIndex; j++){
|
|
591
609
|
packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
|
|
592
610
|
}
|
|
593
611
|
// Calculate position of this pack stream
|
|
594
|
-
for(
|
|
612
|
+
for(let k = 0; k < packStreamIndex; k++){
|
|
595
613
|
packPos += this.streamsInfo.packSizes[k];
|
|
596
614
|
}
|
|
597
|
-
|
|
615
|
+
const packSize = this.streamsInfo.packSizes[packStreamIndex];
|
|
598
616
|
// Read packed data
|
|
599
|
-
|
|
617
|
+
const packedData = this.source.read(packPos, packSize);
|
|
600
618
|
// Create decoder stream chain and decompress
|
|
601
|
-
|
|
602
|
-
|
|
619
|
+
const coders = folder.coders;
|
|
620
|
+
const unpackSizes = folder.unpackSizes;
|
|
603
621
|
// Helper to decompress through a single codec stream
|
|
604
622
|
function decompressWithStream(input, coderIdx, cb) {
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
623
|
+
const coderInfo = coders[coderIdx];
|
|
624
|
+
const codec = getCodec(coderInfo.id);
|
|
625
|
+
const decoder = codec.createDecoder(coderInfo.properties, unpackSizes[coderIdx]);
|
|
626
|
+
const chunks = [];
|
|
627
|
+
let errorOccurred = false;
|
|
610
628
|
decoder.on('data', (chunk)=>{
|
|
611
629
|
chunks.push(chunk);
|
|
612
630
|
});
|
|
@@ -654,24 +672,24 @@ import { readNumber } from './NumberCodec.js';
|
|
|
654
672
|
if (!this.streamsInfo) {
|
|
655
673
|
throw createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER);
|
|
656
674
|
}
|
|
657
|
-
|
|
675
|
+
const folder = this.streamsInfo.folders[folderIndex];
|
|
658
676
|
// Calculate starting pack position
|
|
659
|
-
|
|
677
|
+
let packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
|
|
660
678
|
// Find which pack stream index this folder starts at
|
|
661
|
-
|
|
662
|
-
for(
|
|
679
|
+
let packStreamIndex = 0;
|
|
680
|
+
for(let j = 0; j < folderIndex; j++){
|
|
663
681
|
packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
|
|
664
682
|
}
|
|
665
683
|
// Calculate position
|
|
666
|
-
for(
|
|
684
|
+
for(let k = 0; k < packStreamIndex; k++){
|
|
667
685
|
packPos += this.streamsInfo.packSizes[k];
|
|
668
686
|
}
|
|
669
687
|
// Read all pack streams for this folder
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
for(
|
|
674
|
-
|
|
688
|
+
const numPackStreams = folder.packedStreams.length;
|
|
689
|
+
const packStreams = [];
|
|
690
|
+
let currentPos = packPos;
|
|
691
|
+
for(let p = 0; p < numPackStreams; p++){
|
|
692
|
+
const size = this.streamsInfo.packSizes[packStreamIndex + p];
|
|
675
693
|
packStreams.push(this.source.read(currentPos, size));
|
|
676
694
|
currentPos += size;
|
|
677
695
|
}
|
|
@@ -683,10 +701,10 @@ import { readNumber } from './NumberCodec.js';
|
|
|
683
701
|
// Coder 3: BCJ2 - 4 in, 1 out
|
|
684
702
|
// Pack streams map to: coder inputs not bound to other coder outputs
|
|
685
703
|
// First, decompress each non-BCJ2 coder
|
|
686
|
-
|
|
704
|
+
const coderOutputs = {};
|
|
687
705
|
// Find the BCJ2 coder
|
|
688
|
-
|
|
689
|
-
for(
|
|
706
|
+
let bcj2CoderIndex = -1;
|
|
707
|
+
for(let c = 0; c < folder.coders.length; c++){
|
|
690
708
|
if (isBcj2Codec(folder.coders[c].id)) {
|
|
691
709
|
bcj2CoderIndex = c;
|
|
692
710
|
break;
|
|
@@ -697,44 +715,44 @@ import { readNumber } from './NumberCodec.js';
|
|
|
697
715
|
}
|
|
698
716
|
// Build input stream index -> pack stream mapping
|
|
699
717
|
// folder.packedStreams tells us which input indices are unbound and their order
|
|
700
|
-
|
|
701
|
-
for(
|
|
718
|
+
const inputToPackStream = {};
|
|
719
|
+
for(let pi = 0; pi < folder.packedStreams.length; pi++){
|
|
702
720
|
inputToPackStream[folder.packedStreams[pi]] = pi;
|
|
703
721
|
}
|
|
704
722
|
// Build output stream index -> coder mapping
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
for(
|
|
708
|
-
|
|
709
|
-
for(
|
|
723
|
+
const outputToCoder = {};
|
|
724
|
+
let totalOutputs = 0;
|
|
725
|
+
for(let co = 0; co < folder.coders.length; co++){
|
|
726
|
+
const numOut = folder.coders[co].numOutStreams;
|
|
727
|
+
for(let outp = 0; outp < numOut; outp++){
|
|
710
728
|
outputToCoder[totalOutputs + outp] = co;
|
|
711
729
|
}
|
|
712
730
|
totalOutputs += numOut;
|
|
713
731
|
}
|
|
714
732
|
// Decompress non-BCJ2 coders (LZMA, LZMA2)
|
|
715
733
|
// We need to process in dependency order
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
for(
|
|
719
|
-
|
|
734
|
+
const processed = {};
|
|
735
|
+
const processOrder = this.getCoderProcessOrder(folder, bcj2CoderIndex);
|
|
736
|
+
for(let po = 0; po < processOrder.length; po++){
|
|
737
|
+
const coderIdx = processOrder[po];
|
|
720
738
|
if (coderIdx === bcj2CoderIndex) continue;
|
|
721
|
-
|
|
722
|
-
|
|
739
|
+
const coder = folder.coders[coderIdx];
|
|
740
|
+
const codec = getCodec(coder.id);
|
|
723
741
|
// Find input for this coder
|
|
724
|
-
|
|
725
|
-
for(
|
|
742
|
+
let coderInputStart = 0;
|
|
743
|
+
for(let ci2 = 0; ci2 < coderIdx; ci2++){
|
|
726
744
|
coderInputStart += folder.coders[ci2].numInStreams;
|
|
727
745
|
}
|
|
728
746
|
// Get input data (from pack stream)
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
747
|
+
const inputIdx = coderInputStart;
|
|
748
|
+
const packStreamIdx = inputToPackStream[inputIdx];
|
|
749
|
+
const inputData = packStreams[packStreamIdx];
|
|
732
750
|
// Decompress
|
|
733
|
-
|
|
734
|
-
|
|
751
|
+
const unpackSize = folder.unpackSizes[coderIdx];
|
|
752
|
+
const outputData = codec.decode(inputData, coder.properties, unpackSize);
|
|
735
753
|
// Store in coder outputs
|
|
736
|
-
|
|
737
|
-
for(
|
|
754
|
+
let coderOutputStart = 0;
|
|
755
|
+
for(let co2 = 0; co2 < coderIdx; co2++){
|
|
738
756
|
coderOutputStart += folder.coders[co2].numOutStreams;
|
|
739
757
|
}
|
|
740
758
|
coderOutputs[coderOutputStart] = outputData;
|
|
@@ -743,16 +761,16 @@ import { readNumber } from './NumberCodec.js';
|
|
|
743
761
|
// Now process BCJ2
|
|
744
762
|
// BCJ2 has 4 inputs, need to map them correctly
|
|
745
763
|
// Standard order: main(LZMA2 output), call(LZMA output), jump(LZMA output), range(raw pack)
|
|
746
|
-
|
|
747
|
-
for(
|
|
764
|
+
let bcj2InputStart = 0;
|
|
765
|
+
for(let ci3 = 0; ci3 < bcj2CoderIndex; ci3++){
|
|
748
766
|
bcj2InputStart += folder.coders[ci3].numInStreams;
|
|
749
767
|
}
|
|
750
|
-
|
|
751
|
-
for(
|
|
752
|
-
|
|
768
|
+
const bcj2Inputs = [];
|
|
769
|
+
for(let bi = 0; bi < 4; bi++){
|
|
770
|
+
const globalIdx = bcj2InputStart + bi;
|
|
753
771
|
// Check if this input is bound to a coder output
|
|
754
|
-
|
|
755
|
-
for(
|
|
772
|
+
let boundOutput = -1;
|
|
773
|
+
for(let bp2 = 0; bp2 < folder.bindPairs.length; bp2++){
|
|
756
774
|
if (folder.bindPairs[bp2].inIndex === globalIdx) {
|
|
757
775
|
boundOutput = folder.bindPairs[bp2].outIndex;
|
|
758
776
|
break;
|
|
@@ -763,19 +781,19 @@ import { readNumber } from './NumberCodec.js';
|
|
|
763
781
|
bcj2Inputs.push(coderOutputs[boundOutput]);
|
|
764
782
|
} else {
|
|
765
783
|
// Get from pack streams
|
|
766
|
-
|
|
784
|
+
const psIdx = inputToPackStream[globalIdx];
|
|
767
785
|
bcj2Inputs.push(packStreams[psIdx]);
|
|
768
786
|
}
|
|
769
787
|
}
|
|
770
788
|
// Get BCJ2 unpack size
|
|
771
|
-
|
|
772
|
-
for(
|
|
789
|
+
let bcj2OutputStart = 0;
|
|
790
|
+
for(let co3 = 0; co3 < bcj2CoderIndex; co3++){
|
|
773
791
|
bcj2OutputStart += folder.coders[co3].numOutStreams;
|
|
774
792
|
}
|
|
775
|
-
|
|
793
|
+
const bcj2UnpackSize = folder.unpackSizes[bcj2OutputStart];
|
|
776
794
|
// Memory optimization: Clear intermediate buffers to help GC
|
|
777
795
|
// These are no longer needed after bcj2Inputs is built
|
|
778
|
-
for(
|
|
796
|
+
for(const key in coderOutputs){
|
|
779
797
|
delete coderOutputs[key];
|
|
780
798
|
}
|
|
781
799
|
// Clear packStreams array (allows GC to free compressed data)
|
|
@@ -786,30 +804,30 @@ import { readNumber } from './NumberCodec.js';
|
|
|
786
804
|
/**
|
|
787
805
|
* Get processing order for coders (dependency order)
|
|
788
806
|
*/ getCoderProcessOrder(folder, excludeIdx) {
|
|
789
|
-
|
|
790
|
-
|
|
807
|
+
const order = [];
|
|
808
|
+
const processed = {};
|
|
791
809
|
// Simple approach: process coders that don't depend on unprocessed outputs
|
|
792
|
-
|
|
810
|
+
let changed = true;
|
|
793
811
|
while(changed){
|
|
794
812
|
changed = false;
|
|
795
|
-
for(
|
|
813
|
+
for(let c = 0; c < folder.coders.length; c++){
|
|
796
814
|
if (processed[c] || c === excludeIdx) continue;
|
|
797
815
|
// Check if all inputs are satisfied
|
|
798
|
-
|
|
799
|
-
for(
|
|
816
|
+
let inputStart = 0;
|
|
817
|
+
for(let i = 0; i < c; i++){
|
|
800
818
|
inputStart += folder.coders[i].numInStreams;
|
|
801
819
|
}
|
|
802
|
-
|
|
803
|
-
for(
|
|
804
|
-
|
|
820
|
+
let canProcess = true;
|
|
821
|
+
for(let inp = 0; inp < folder.coders[c].numInStreams; inp++){
|
|
822
|
+
const globalIdx = inputStart + inp;
|
|
805
823
|
// Check if bound to an unprocessed coder
|
|
806
|
-
for(
|
|
824
|
+
for(let bp = 0; bp < folder.bindPairs.length; bp++){
|
|
807
825
|
if (folder.bindPairs[bp].inIndex === globalIdx) {
|
|
808
826
|
// Find which coder produces this output
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
for(
|
|
812
|
-
|
|
827
|
+
const outIdx = folder.bindPairs[bp].outIndex;
|
|
828
|
+
let outStart = 0;
|
|
829
|
+
for(let oc = 0; oc < folder.coders.length; oc++){
|
|
830
|
+
const numOut = folder.coders[oc].numOutStreams;
|
|
813
831
|
if (outIdx < outStart + numOut) {
|
|
814
832
|
if (!processed[oc] && oc !== excludeIdx) {
|
|
815
833
|
canProcess = false;
|
|
@@ -855,8 +873,8 @@ import { readNumber } from './NumberCodec.js';
|
|
|
855
873
|
/**
|
|
856
874
|
* Get base name from a path
|
|
857
875
|
*/ function getBaseName(path) {
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
876
|
+
const lastSlash = path.lastIndexOf('/');
|
|
877
|
+
const lastBackslash = path.lastIndexOf('\\');
|
|
878
|
+
const lastSep = Math.max(lastSlash, lastBackslash);
|
|
861
879
|
return lastSep >= 0 ? path.slice(lastSep + 1) : path;
|
|
862
880
|
}
|