7z-iterator 0.2.8 → 0.2.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/dist/cjs/FileEntry.js.map +1 -1
  2. package/dist/cjs/SevenZipIterator.js.map +1 -1
  3. package/dist/cjs/compat.js.map +1 -1
  4. package/dist/cjs/lib/streamToSource.js.map +1 -1
  5. package/dist/cjs/nextEntry.js.map +1 -1
  6. package/dist/cjs/sevenz/NumberCodec.js.map +1 -1
  7. package/dist/cjs/sevenz/SevenZipParser.d.cts +1 -0
  8. package/dist/cjs/sevenz/SevenZipParser.d.ts +1 -0
  9. package/dist/cjs/sevenz/SevenZipParser.js +18 -0
  10. package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
  11. package/dist/cjs/sevenz/codecs/Aes.js.map +1 -1
  12. package/dist/cjs/sevenz/codecs/Bcj.js +1 -1
  13. package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
  14. package/dist/cjs/sevenz/codecs/Bcj2.js.map +1 -1
  15. package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
  16. package/dist/cjs/sevenz/codecs/BcjArm64.js.map +1 -1
  17. package/dist/cjs/sevenz/codecs/BcjArmt.js.map +1 -1
  18. package/dist/cjs/sevenz/codecs/BcjIa64.js.map +1 -1
  19. package/dist/cjs/sevenz/codecs/BcjPpc.js.map +1 -1
  20. package/dist/cjs/sevenz/codecs/BcjSparc.js.map +1 -1
  21. package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
  22. package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
  23. package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
  24. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  25. package/dist/cjs/sevenz/codecs/index.js.map +1 -1
  26. package/dist/cjs/sevenz/codecs/lzmaCompat.d.cts +1 -1
  27. package/dist/cjs/sevenz/codecs/lzmaCompat.d.ts +1 -1
  28. package/dist/cjs/sevenz/codecs/lzmaCompat.js.map +1 -1
  29. package/dist/cjs/sevenz/codecs/streams.js.map +1 -1
  30. package/dist/cjs/sevenz/constants.d.cts +8 -8
  31. package/dist/cjs/sevenz/constants.d.ts +8 -8
  32. package/dist/cjs/sevenz/constants.js.map +1 -1
  33. package/dist/cjs/sevenz/headers.js +1 -1
  34. package/dist/cjs/sevenz/headers.js.map +1 -1
  35. package/dist/esm/FileEntry.js +1 -1
  36. package/dist/esm/FileEntry.js.map +1 -1
  37. package/dist/esm/SevenZipIterator.js +6 -6
  38. package/dist/esm/SevenZipIterator.js.map +1 -1
  39. package/dist/esm/compat.js +2 -2
  40. package/dist/esm/compat.js.map +1 -1
  41. package/dist/esm/lib/streamToSource.js +12 -12
  42. package/dist/esm/lib/streamToSource.js.map +1 -1
  43. package/dist/esm/nextEntry.js +11 -11
  44. package/dist/esm/nextEntry.js.map +1 -1
  45. package/dist/esm/sevenz/NumberCodec.js +19 -19
  46. package/dist/esm/sevenz/NumberCodec.js.map +1 -1
  47. package/dist/esm/sevenz/SevenZipParser.d.ts +1 -0
  48. package/dist/esm/sevenz/SevenZipParser.js +207 -189
  49. package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
  50. package/dist/esm/sevenz/codecs/Aes.js +29 -29
  51. package/dist/esm/sevenz/codecs/Aes.js.map +1 -1
  52. package/dist/esm/sevenz/codecs/Bcj.js +8 -8
  53. package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
  54. package/dist/esm/sevenz/codecs/Bcj2.js +35 -35
  55. package/dist/esm/sevenz/codecs/Bcj2.js.map +1 -1
  56. package/dist/esm/sevenz/codecs/BcjArm.js +4 -4
  57. package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
  58. package/dist/esm/sevenz/codecs/BcjArm64.js +5 -5
  59. package/dist/esm/sevenz/codecs/BcjArm64.js.map +1 -1
  60. package/dist/esm/sevenz/codecs/BcjArmt.js +10 -10
  61. package/dist/esm/sevenz/codecs/BcjArmt.js.map +1 -1
  62. package/dist/esm/sevenz/codecs/BcjIa64.js +24 -24
  63. package/dist/esm/sevenz/codecs/BcjIa64.js.map +1 -1
  64. package/dist/esm/sevenz/codecs/BcjPpc.js +5 -5
  65. package/dist/esm/sevenz/codecs/BcjPpc.js.map +1 -1
  66. package/dist/esm/sevenz/codecs/BcjSparc.js +8 -8
  67. package/dist/esm/sevenz/codecs/BcjSparc.js.map +1 -1
  68. package/dist/esm/sevenz/codecs/Delta.js +6 -6
  69. package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
  70. package/dist/esm/sevenz/codecs/Lzma.js +16 -16
  71. package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
  72. package/dist/esm/sevenz/codecs/Lzma2.js +35 -35
  73. package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
  74. package/dist/esm/sevenz/codecs/createBufferingDecoder.js +3 -3
  75. package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  76. package/dist/esm/sevenz/codecs/index.js +6 -6
  77. package/dist/esm/sevenz/codecs/index.js.map +1 -1
  78. package/dist/esm/sevenz/codecs/lzmaCompat.d.ts +1 -1
  79. package/dist/esm/sevenz/codecs/lzmaCompat.js +5 -5
  80. package/dist/esm/sevenz/codecs/lzmaCompat.js.map +1 -1
  81. package/dist/esm/sevenz/codecs/streams.js +13 -13
  82. package/dist/esm/sevenz/codecs/streams.js.map +1 -1
  83. package/dist/esm/sevenz/constants.d.ts +8 -8
  84. package/dist/esm/sevenz/constants.js +9 -9
  85. package/dist/esm/sevenz/constants.js.map +1 -1
  86. package/dist/esm/sevenz/headers.js +134 -134
  87. package/dist/esm/sevenz/headers.js.map +1 -1
  88. package/package.json +20 -20
@@ -36,8 +36,8 @@ import { readNumber } from './NumberCodec.js';
36
36
  * File descriptor based archive source
37
37
  */ export class FileSource {
38
38
  read(position, length) {
39
- var buf = allocBuffer(length);
40
- var bytesRead = fs.readSync(this.fd, buf, 0, length, position);
39
+ const buf = allocBuffer(length);
40
+ const bytesRead = fs.readSync(this.fd, buf, 0, length, position);
41
41
  if (bytesRead < length) {
42
42
  return buf.slice(0, bytesRead);
43
43
  }
@@ -67,24 +67,24 @@ import { readNumber } from './NumberCodec.js';
67
67
  */ parse() {
68
68
  if (this.parsed) return;
69
69
  // Read signature header
70
- var sigBuf = this.source.read(0, SIGNATURE_HEADER_SIZE);
70
+ const sigBuf = this.source.read(0, SIGNATURE_HEADER_SIZE);
71
71
  if (sigBuf.length < SIGNATURE_HEADER_SIZE) {
72
72
  throw createCodedError('Archive too small', ErrorCode.TRUNCATED_ARCHIVE);
73
73
  }
74
74
  this.signature = parseSignatureHeader(sigBuf);
75
75
  // Read encoded header
76
- var headerOffset = SIGNATURE_HEADER_SIZE + this.signature.nextHeaderOffset;
77
- var headerBuf = this.source.read(headerOffset, this.signature.nextHeaderSize);
76
+ const headerOffset = SIGNATURE_HEADER_SIZE + this.signature.nextHeaderOffset;
77
+ const headerBuf = this.source.read(headerOffset, this.signature.nextHeaderSize);
78
78
  if (headerBuf.length < this.signature.nextHeaderSize) {
79
79
  throw createCodedError('Truncated header', ErrorCode.TRUNCATED_ARCHIVE);
80
80
  }
81
81
  // Parse encoded header (may need decompression)
82
82
  try {
83
- var headerResult = parseEncodedHeader(headerBuf, this.signature.nextHeaderCRC);
83
+ const headerResult = parseEncodedHeader(headerBuf, this.signature.nextHeaderCRC);
84
84
  this.streamsInfo = headerResult.streamsInfo || null;
85
85
  this.filesInfo = headerResult.filesInfo;
86
86
  } catch (err) {
87
- var codedErr = err;
87
+ const codedErr = err;
88
88
  if (codedErr && codedErr.code === ErrorCode.COMPRESSED_HEADER) {
89
89
  // Header is compressed - need to decompress first
90
90
  this.handleCompressedHeader(headerBuf);
@@ -100,31 +100,31 @@ import { readNumber } from './NumberCodec.js';
100
100
  * Handle compressed header (kEncodedHeader)
101
101
  */ handleCompressedHeader(headerBuf) {
102
102
  // Parse the encoded header info to get decompression parameters
103
- var offset = 1; // Skip kEncodedHeader byte
103
+ let offset = 1; // Skip kEncodedHeader byte
104
104
  // Should have StreamsInfo for the header itself
105
- var propertyId = headerBuf[offset++];
105
+ const propertyId = headerBuf[offset++];
106
106
  if (propertyId !== PropertyId.kMainStreamsInfo && propertyId !== PropertyId.kPackInfo) {
107
107
  throw createCodedError('Expected StreamsInfo in encoded header', ErrorCode.CORRUPT_HEADER);
108
108
  }
109
109
  // For now, we parse the streams info from the encoded header block
110
110
  // This tells us how to decompress the actual header
111
111
  // Read pack info from the encoded header structure
112
- var packInfoResult = this.parseEncodedHeaderStreams(headerBuf, 1);
112
+ const packInfoResult = this.parseEncodedHeaderStreams(headerBuf, 1);
113
113
  // Calculate compressed header position
114
114
  // For simple archives: header is at SIGNATURE_HEADER_SIZE + packPos
115
115
  // For BCJ2/complex archives: header may be at the END of pack data area
116
116
  // The pack data area ends at nextHeaderOffset (where encoded header starts)
117
- var compressedStart = SIGNATURE_HEADER_SIZE + packInfoResult.packPos;
118
- var compressedData = this.source.read(compressedStart, packInfoResult.packSize);
117
+ const compressedStart = SIGNATURE_HEADER_SIZE + packInfoResult.packPos;
118
+ const compressedData = this.source.read(compressedStart, packInfoResult.packSize);
119
119
  // Decompress using the specified codec
120
- var codec = getCodec(packInfoResult.codecId);
121
- var decompressedHeader = null;
120
+ const codec = getCodec(packInfoResult.codecId);
121
+ let decompressedHeader = null;
122
122
  // Try decompressing from the calculated position first
123
123
  try {
124
124
  decompressedHeader = codec.decode(compressedData, packInfoResult.properties, packInfoResult.unpackSize);
125
125
  // Verify CRC if present
126
126
  if (packInfoResult.unpackCRC !== undefined) {
127
- var actualCRC = crc32(decompressedHeader);
127
+ const actualCRC = crc32(decompressedHeader);
128
128
  if (actualCRC !== packInfoResult.unpackCRC) {
129
129
  decompressedHeader = null; // CRC mismatch, need to search
130
130
  }
@@ -135,22 +135,22 @@ import { readNumber } from './NumberCodec.js';
135
135
  // If initial decompression failed, search for the correct position as a fallback
136
136
  // This handles edge cases where packPos doesn't point directly to header pack data
137
137
  if (decompressedHeader === null && this.signature) {
138
- var packAreaEnd = SIGNATURE_HEADER_SIZE + this.signature.nextHeaderOffset;
139
- var searchStart = packAreaEnd - packInfoResult.packSize;
140
- var searchEnd = Math.max(SIGNATURE_HEADER_SIZE, compressedStart - 100000);
138
+ const packAreaEnd = SIGNATURE_HEADER_SIZE + this.signature.nextHeaderOffset;
139
+ const searchStart = packAreaEnd - packInfoResult.packSize;
140
+ const searchEnd = Math.max(SIGNATURE_HEADER_SIZE, compressedStart - 100000);
141
141
  // Scan for LZMA data starting with 0x00 (range coder init)
142
142
  // Try each candidate and validate with CRC
143
- var scanChunkSize = 4096;
144
- searchLoop: for(var chunkStart = searchStart; chunkStart >= searchEnd; chunkStart -= scanChunkSize){
145
- var chunk = this.source.read(chunkStart, scanChunkSize + packInfoResult.packSize);
146
- for(var i = 0; i < Math.min(chunk.length, scanChunkSize); i++){
143
+ const scanChunkSize = 4096;
144
+ searchLoop: for(let chunkStart = searchStart; chunkStart >= searchEnd; chunkStart -= scanChunkSize){
145
+ const chunk = this.source.read(chunkStart, scanChunkSize + packInfoResult.packSize);
146
+ for(let i = 0; i < Math.min(chunk.length, scanChunkSize); i++){
147
147
  if (chunk[i] === 0x00) {
148
- var candidateData = chunk.subarray(i, i + packInfoResult.packSize);
148
+ const candidateData = chunk.subarray(i, i + packInfoResult.packSize);
149
149
  if (candidateData.length === packInfoResult.packSize) {
150
150
  try {
151
- var candidateDecompressed = codec.decode(candidateData, packInfoResult.properties, packInfoResult.unpackSize);
151
+ const candidateDecompressed = codec.decode(candidateData, packInfoResult.properties, packInfoResult.unpackSize);
152
152
  if (packInfoResult.unpackCRC !== undefined) {
153
- var candCRC = crc32(candidateDecompressed);
153
+ const candCRC = crc32(candidateDecompressed);
154
154
  if (candCRC === packInfoResult.unpackCRC) {
155
155
  decompressedHeader = candidateDecompressed;
156
156
  break searchLoop;
@@ -172,13 +172,13 @@ import { readNumber } from './NumberCodec.js';
172
172
  }
173
173
  // Now parse the decompressed header
174
174
  // It should start with kHeader
175
- var decompOffset = 0;
176
- var headerId = decompressedHeader[decompOffset++];
175
+ let decompOffset = 0;
176
+ const headerId = decompressedHeader[decompOffset++];
177
177
  if (headerId !== PropertyId.kHeader) {
178
178
  throw createCodedError('Expected kHeader in decompressed header', ErrorCode.CORRUPT_HEADER);
179
179
  }
180
180
  // Parse the decompressed header using shared function from headers.ts
181
- var result = parseHeaderContent(decompressedHeader, decompOffset);
181
+ const result = parseHeaderContent(decompressedHeader, decompOffset);
182
182
  this.streamsInfo = result.streamsInfo || null;
183
183
  this.filesInfo = result.filesInfo;
184
184
  }
@@ -187,30 +187,30 @@ import { readNumber } from './NumberCodec.js';
187
187
  * This is a simplified parser for the header's own compression info
188
188
  */ parseEncodedHeaderStreams(buf, offset) {
189
189
  // This is a simplified parser for the encoded header's own streams info
190
- var packPos = 0;
191
- var packSize = 0;
192
- var unpackSize = 0;
193
- var codecId = [];
194
- var properties;
195
- var unpackCRC;
190
+ let packPos = 0;
191
+ let packSize = 0;
192
+ let unpackSize = 0;
193
+ let codecId = [];
194
+ let properties;
195
+ let unpackCRC;
196
196
  while(offset < buf.length){
197
- var propertyId = buf[offset++];
197
+ const propertyId = buf[offset++];
198
198
  if (propertyId === PropertyId.kEnd) {
199
199
  break;
200
200
  }
201
201
  switch(propertyId){
202
202
  case PropertyId.kPackInfo:
203
203
  {
204
- var packPosResult = readNumber(buf, offset);
204
+ const packPosResult = readNumber(buf, offset);
205
205
  packPos = packPosResult.value;
206
206
  offset += packPosResult.bytesRead;
207
- var numPackResult = readNumber(buf, offset);
207
+ const numPackResult = readNumber(buf, offset);
208
208
  offset += numPackResult.bytesRead;
209
209
  // Read until kEnd
210
210
  while(buf[offset] !== PropertyId.kEnd){
211
211
  if (buf[offset] === PropertyId.kSize) {
212
212
  offset++;
213
- var sizeResult = readNumber(buf, offset);
213
+ const sizeResult = readNumber(buf, offset);
214
214
  packSize = sizeResult.value;
215
215
  offset += sizeResult.bytesRead;
216
216
  } else {
@@ -225,21 +225,21 @@ import { readNumber } from './NumberCodec.js';
225
225
  while(offset < buf.length && buf[offset] !== PropertyId.kEnd){
226
226
  if (buf[offset] === PropertyId.kFolder) {
227
227
  offset++;
228
- var numFoldersResult = readNumber(buf, offset);
228
+ const numFoldersResult = readNumber(buf, offset);
229
229
  offset += numFoldersResult.bytesRead;
230
230
  offset++; // external flag
231
231
  // Parse coder
232
- var numCodersResult = readNumber(buf, offset);
232
+ const numCodersResult = readNumber(buf, offset);
233
233
  offset += numCodersResult.bytesRead;
234
- var flags = buf[offset++];
235
- var idSize = flags & 0x0f;
236
- var hasAttributes = (flags & 0x20) !== 0;
234
+ const flags = buf[offset++];
235
+ const idSize = flags & 0x0f;
236
+ const hasAttributes = (flags & 0x20) !== 0;
237
237
  codecId = [];
238
- for(var i = 0; i < idSize; i++){
238
+ for(let i = 0; i < idSize; i++){
239
239
  codecId.push(buf[offset++]);
240
240
  }
241
241
  if (hasAttributes) {
242
- var propsLenResult = readNumber(buf, offset);
242
+ const propsLenResult = readNumber(buf, offset);
243
243
  offset += propsLenResult.bytesRead;
244
244
  properties = buf.slice(offset, offset + propsLenResult.value);
245
245
  offset += propsLenResult.value;
@@ -247,12 +247,12 @@ import { readNumber } from './NumberCodec.js';
247
247
  } else if (buf[offset] === PropertyId.kCodersUnpackSize) {
248
248
  offset++;
249
249
  // Read unpack size - needed for LZMA decoder
250
- var unpackSizeResult = readNumber(buf, offset);
250
+ const unpackSizeResult = readNumber(buf, offset);
251
251
  unpackSize = unpackSizeResult.value;
252
252
  offset += unpackSizeResult.bytesRead;
253
253
  } else if (buf[offset] === PropertyId.kCRC) {
254
254
  offset++;
255
- var allDefined = buf[offset++];
255
+ const allDefined = buf[offset++];
256
256
  if (allDefined) {
257
257
  unpackCRC = buf.readUInt32LE(offset);
258
258
  offset += 4;
@@ -280,33 +280,37 @@ import { readNumber } from './NumberCodec.js';
280
280
  this.entries = [];
281
281
  if (!this.streamsInfo) {
282
282
  // No streams info - just create entries from file info
283
- for(var i = 0; i < this.filesInfo.length; i++){
284
- var file = this.filesInfo[i];
283
+ for(let i = 0; i < this.filesInfo.length; i++){
284
+ const file = this.filesInfo[i];
285
285
  this.entries.push(this.createEntry(file, 0, 0, 0));
286
286
  }
287
287
  return;
288
288
  }
289
289
  // Use the properly parsed numUnpackStreamsPerFolder from the archive header
290
- var streamsPerFolder = this.streamsInfo.numUnpackStreamsPerFolder;
290
+ const streamsPerFolder = this.streamsInfo.numUnpackStreamsPerFolder;
291
291
  // Initialize files per folder count (for smart caching)
292
- for(var f = 0; f < streamsPerFolder.length; f++){
292
+ for(let f = 0; f < streamsPerFolder.length; f++){
293
293
  this.filesPerFolder[f] = streamsPerFolder[f];
294
294
  this.extractedPerFolder[f] = 0;
295
295
  }
296
296
  // Now build entries with proper folder/stream tracking
297
- var streamIndex = 0;
298
- var folderIndex = 0;
299
- var streamInFolder = 0;
300
- var folderStreamCount = streamsPerFolder[0] || 0;
301
- for(var j = 0; j < this.filesInfo.length; j++){
302
- var fileInfo = this.filesInfo[j];
297
+ let streamIndex = 0;
298
+ let folderIndex = 0;
299
+ let streamInFolder = 0;
300
+ let folderStreamCount = streamsPerFolder[0] || 0;
301
+ for(let j = 0; j < this.filesInfo.length; j++){
302
+ const fileInfo = this.filesInfo[j];
303
303
  // Get size from unpackSizes for files with streams
304
- var size = 0;
304
+ let size = 0;
305
305
  if (fileInfo.hasStream && streamIndex < this.streamsInfo.unpackSizes.length) {
306
306
  size = this.streamsInfo.unpackSizes[streamIndex];
307
307
  }
308
- var entry = this.createEntry(fileInfo, size, folderIndex, streamInFolder);
308
+ const entry = this.createEntry(fileInfo, size, folderIndex, streamInFolder);
309
309
  entry._streamIndex = streamIndex;
310
+ // Set CRC if available
311
+ if (fileInfo.hasStream && this.streamsInfo.unpackCRCs && this.streamsInfo.unpackCRCs[streamIndex] !== undefined) {
312
+ entry._crc = this.streamsInfo.unpackCRCs[streamIndex];
313
+ }
310
314
  this.entries.push(entry);
311
315
  // Advance stream tracking for files with streams
312
316
  if (fileInfo.hasStream) {
@@ -327,12 +331,12 @@ import { readNumber } from './NumberCodec.js';
327
331
  // Determine entry type
328
332
  // Note: 7z format doesn't natively support symlinks. p7zip with -snl stores
329
333
  // symlinks as regular files with the target path as content.
330
- var type = 'file';
334
+ let type = 'file';
331
335
  if (file.isDirectory) {
332
336
  type = 'directory';
333
337
  }
334
338
  // Calculate mode from Windows attributes
335
- var mode;
339
+ let mode;
336
340
  if (file.attributes !== undefined) {
337
341
  // Check for Unix extension bit
338
342
  if ((file.attributes & FileAttribute.UNIX_EXTENSION) !== 0) {
@@ -377,7 +381,7 @@ import { readNumber } from './NumberCodec.js';
377
381
  */ getEntryStream(entry) {
378
382
  if (!entry._hasStream || entry.type === 'directory') {
379
383
  // Return empty stream for directories and empty files
380
- var emptyStream = new PassThrough();
384
+ const emptyStream = new PassThrough();
381
385
  emptyStream.end();
382
386
  return emptyStream;
383
387
  }
@@ -385,37 +389,44 @@ import { readNumber } from './NumberCodec.js';
385
389
  throw createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER);
386
390
  }
387
391
  // Get folder info
388
- var folder = this.streamsInfo.folders[entry._folderIndex];
392
+ const folder = this.streamsInfo.folders[entry._folderIndex];
389
393
  if (!folder) {
390
394
  throw createCodedError('Invalid folder index', ErrorCode.CORRUPT_HEADER);
391
395
  }
392
396
  // Check codec support
393
- for(var i = 0; i < folder.coders.length; i++){
394
- var coder = folder.coders[i];
397
+ for(let i = 0; i < folder.coders.length; i++){
398
+ const coder = folder.coders[i];
395
399
  if (!isCodecSupported(coder.id)) {
396
- var codecName = getCodecName(coder.id);
400
+ const codecName = getCodecName(coder.id);
397
401
  throw createCodedError(`Unsupported codec: ${codecName}`, ErrorCode.UNSUPPORTED_CODEC);
398
402
  }
399
403
  }
400
404
  // Get decompressed data for this folder (with smart caching)
401
- var folderIdx = entry._folderIndex;
402
- var data = this.getDecompressedFolder(folderIdx);
405
+ const folderIdx = entry._folderIndex;
406
+ const data = this.getDecompressedFolder(folderIdx);
403
407
  // Calculate file offset within the decompressed block
404
408
  // For solid archives, multiple files are concatenated in the block
405
- var fileStart = 0;
406
- for(var m = 0; m < entry._streamIndexInFolder; m++){
409
+ let fileStart = 0;
410
+ for(let m = 0; m < entry._streamIndexInFolder; m++){
407
411
  // Sum sizes of all streams before this one in the folder
408
- var prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
412
+ const prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
409
413
  fileStart += this.streamsInfo.unpackSizes[prevStreamGlobalIndex];
410
414
  }
411
- var fileSize = entry.size;
415
+ const fileSize = entry.size;
412
416
  // Create a PassThrough stream with the file data
413
- var outputStream = new PassThrough();
417
+ const outputStream = new PassThrough();
414
418
  // Bounds check to prevent "oob" error on older Node versions
415
419
  if (fileStart + fileSize > data.length) {
416
420
  throw createCodedError(`File data out of bounds: offset ${fileStart} + size ${fileSize} > decompressed length ${data.length}`, ErrorCode.DECOMPRESSION_FAILED);
417
421
  }
418
- var fileData = data.slice(fileStart, fileStart + fileSize);
422
+ const fileData = data.slice(fileStart, fileStart + fileSize);
423
+ // Verify CRC if present
424
+ if (entry._crc !== undefined) {
425
+ const actualCRC = crc32(fileData);
426
+ if (actualCRC !== entry._crc) {
427
+ throw createCodedError(`CRC mismatch for ${entry.path}: expected ${entry._crc.toString(16)}, got ${actualCRC.toString(16)}`, ErrorCode.CRC_MISMATCH);
428
+ }
429
+ }
419
430
  outputStream.end(fileData);
420
431
  // Track extraction and release cache when all files from this folder are done
421
432
  this.extractedPerFolder[folderIdx] = (this.extractedPerFolder[folderIdx] || 0) + 1;
@@ -431,7 +442,7 @@ import { readNumber } from './NumberCodec.js';
431
442
  */ getEntryStreamAsync(entry, callback) {
432
443
  if (!entry._hasStream || entry.type === 'directory') {
433
444
  // Return empty stream for directories and empty files
434
- var emptyStream = new PassThrough();
445
+ const emptyStream = new PassThrough();
435
446
  emptyStream.end();
436
447
  callback(null, emptyStream);
437
448
  return;
@@ -441,40 +452,47 @@ import { readNumber } from './NumberCodec.js';
441
452
  return;
442
453
  }
443
454
  // Get folder info
444
- var folder = this.streamsInfo.folders[entry._folderIndex];
455
+ const folder = this.streamsInfo.folders[entry._folderIndex];
445
456
  if (!folder) {
446
457
  callback(createCodedError('Invalid folder index', ErrorCode.CORRUPT_HEADER));
447
458
  return;
448
459
  }
449
460
  // Check codec support
450
- for(var i = 0; i < folder.coders.length; i++){
451
- var coder = folder.coders[i];
461
+ for(let i = 0; i < folder.coders.length; i++){
462
+ const coder = folder.coders[i];
452
463
  if (!isCodecSupported(coder.id)) {
453
- var codecName = getCodecName(coder.id);
464
+ const codecName = getCodecName(coder.id);
454
465
  callback(createCodedError(`Unsupported codec: ${codecName}`, ErrorCode.UNSUPPORTED_CODEC));
455
466
  return;
456
467
  }
457
468
  }
458
469
  // Get decompressed data for this folder using async method
459
- var folderIdx = entry._folderIndex;
460
- var streamsInfo = this.streamsInfo;
470
+ const folderIdx = entry._folderIndex;
471
+ const streamsInfo = this.streamsInfo;
461
472
  this.getDecompressedFolderAsync(folderIdx, (err, data)=>{
462
473
  if (err) return callback(err);
463
474
  if (!data) return callback(new Error('No data returned from decompression'));
464
475
  // Calculate file offset within the decompressed block
465
- var fileStart = 0;
466
- for(var m = 0; m < entry._streamIndexInFolder; m++){
467
- var prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
476
+ let fileStart = 0;
477
+ for(let m = 0; m < entry._streamIndexInFolder; m++){
478
+ const prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
468
479
  fileStart += streamsInfo.unpackSizes[prevStreamGlobalIndex];
469
480
  }
470
- var fileSize = entry.size;
481
+ const fileSize = entry.size;
471
482
  // Bounds check
472
483
  if (fileStart + fileSize > data.length) {
473
484
  return callback(createCodedError(`File data out of bounds: offset ${fileStart} + size ${fileSize} > decompressed length ${data.length}`, ErrorCode.DECOMPRESSION_FAILED));
474
485
  }
475
486
  // Create a PassThrough stream with the file data
476
- var outputStream = new PassThrough();
477
- var fileData = data.slice(fileStart, fileStart + fileSize);
487
+ const outputStream = new PassThrough();
488
+ const fileData = data.slice(fileStart, fileStart + fileSize);
489
+ // Verify CRC if present
490
+ if (entry._crc !== undefined) {
491
+ const actualCRC = crc32(fileData);
492
+ if (actualCRC !== entry._crc) {
493
+ return callback(createCodedError(`CRC mismatch for ${entry.path}: expected ${entry._crc.toString(16)}, got ${actualCRC.toString(16)}`, ErrorCode.CRC_MISMATCH));
494
+ }
495
+ }
478
496
  outputStream.end(fileData);
479
497
  // Track extraction and release cache when all files from this folder are done
480
498
  this.extractedPerFolder[folderIdx] = (this.extractedPerFolder[folderIdx] || 0) + 1;
@@ -487,7 +505,7 @@ import { readNumber } from './NumberCodec.js';
487
505
  /**
488
506
  * Check if a folder uses BCJ2 codec
489
507
  */ folderHasBcj2(folder) {
490
- for(var i = 0; i < folder.coders.length; i++){
508
+ for(let i = 0; i < folder.coders.length; i++){
491
509
  if (isBcj2Codec(folder.coders[i].id)) {
492
510
  return true;
493
511
  }
@@ -505,42 +523,42 @@ import { readNumber } from './NumberCodec.js';
505
523
  if (!this.streamsInfo) {
506
524
  throw createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER);
507
525
  }
508
- var folder = this.streamsInfo.folders[folderIndex];
526
+ const folder = this.streamsInfo.folders[folderIndex];
509
527
  // Check how many files remain in this folder
510
- var filesInFolder = this.filesPerFolder[folderIndex] || 1;
511
- var extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
512
- var remainingFiles = filesInFolder - extractedFromFolder;
528
+ const filesInFolder = this.filesPerFolder[folderIndex] || 1;
529
+ const extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
530
+ const remainingFiles = filesInFolder - extractedFromFolder;
513
531
  // Only cache if more than 1 file remains (including the current one being extracted)
514
- var shouldCache = remainingFiles > 1;
532
+ const shouldCache = remainingFiles > 1;
515
533
  // Check if this folder uses BCJ2 (requires special multi-stream handling)
516
534
  if (this.folderHasBcj2(folder)) {
517
- var data = this.decompressBcj2Folder(folderIndex);
535
+ const data = this.decompressBcj2Folder(folderIndex);
518
536
  if (shouldCache) {
519
537
  this.decompressedCache[folderIndex] = data;
520
538
  }
521
539
  return data;
522
540
  }
523
541
  // Calculate packed data position
524
- var packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
542
+ let packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
525
543
  // Find which pack stream this folder uses
526
- var packStreamIndex = 0;
527
- for(var j = 0; j < folderIndex; j++){
544
+ let packStreamIndex = 0;
545
+ for(let j = 0; j < folderIndex; j++){
528
546
  packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
529
547
  }
530
548
  // Calculate position of this pack stream
531
- for(var k = 0; k < packStreamIndex; k++){
549
+ for(let k = 0; k < packStreamIndex; k++){
532
550
  packPos += this.streamsInfo.packSizes[k];
533
551
  }
534
- var packSize = this.streamsInfo.packSizes[packStreamIndex];
552
+ const packSize = this.streamsInfo.packSizes[packStreamIndex];
535
553
  // Read packed data
536
- var packedData = this.source.read(packPos, packSize);
554
+ const packedData = this.source.read(packPos, packSize);
537
555
  // Decompress through codec chain
538
- var data2 = packedData;
539
- for(var l = 0; l < folder.coders.length; l++){
540
- var coderInfo = folder.coders[l];
541
- var codec = getCodec(coderInfo.id);
556
+ let data2 = packedData;
557
+ for(let l = 0; l < folder.coders.length; l++){
558
+ const coderInfo = folder.coders[l];
559
+ const codec = getCodec(coderInfo.id);
542
560
  // Get unpack size for this coder (needed by LZMA)
543
- var unpackSize = folder.unpackSizes[l];
561
+ const unpackSize = folder.unpackSizes[l];
544
562
  data2 = codec.decode(data2, coderInfo.properties, unpackSize);
545
563
  }
546
564
  // Cache only if more files remain in this folder
@@ -553,7 +571,7 @@ import { readNumber } from './NumberCodec.js';
553
571
  * Get decompressed data for a folder using streaming (callback-based async)
554
572
  * Uses createDecoder() streams for non-blocking decompression
555
573
  */ getDecompressedFolderAsync(folderIndex, callback) {
556
- var self = this;
574
+ const self = this;
557
575
  // Check cache first
558
576
  if (this.decompressedCache[folderIndex]) {
559
577
  callback(null, this.decompressedCache[folderIndex]);
@@ -563,17 +581,17 @@ import { readNumber } from './NumberCodec.js';
563
581
  callback(createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER));
564
582
  return;
565
583
  }
566
- var folder = this.streamsInfo.folders[folderIndex];
584
+ const folder = this.streamsInfo.folders[folderIndex];
567
585
  // Check how many files remain in this folder
568
- var filesInFolder = this.filesPerFolder[folderIndex] || 1;
569
- var extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
570
- var remainingFiles = filesInFolder - extractedFromFolder;
571
- var shouldCache = remainingFiles > 1;
586
+ const filesInFolder = this.filesPerFolder[folderIndex] || 1;
587
+ const extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
588
+ const remainingFiles = filesInFolder - extractedFromFolder;
589
+ const shouldCache = remainingFiles > 1;
572
590
  // BCJ2 requires special handling - use sync version for now
573
591
  // TODO: Add async BCJ2 support
574
592
  if (this.folderHasBcj2(folder)) {
575
593
  try {
576
- var data = this.decompressBcj2Folder(folderIndex);
594
+ const data = this.decompressBcj2Folder(folderIndex);
577
595
  if (shouldCache) {
578
596
  this.decompressedCache[folderIndex] = data;
579
597
  }
@@ -584,29 +602,29 @@ import { readNumber } from './NumberCodec.js';
584
602
  return;
585
603
  }
586
604
  // Calculate packed data position
587
- var packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
605
+ let packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
588
606
  // Find which pack stream this folder uses
589
- var packStreamIndex = 0;
590
- for(var j = 0; j < folderIndex; j++){
607
+ let packStreamIndex = 0;
608
+ for(let j = 0; j < folderIndex; j++){
591
609
  packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
592
610
  }
593
611
  // Calculate position of this pack stream
594
- for(var k = 0; k < packStreamIndex; k++){
612
+ for(let k = 0; k < packStreamIndex; k++){
595
613
  packPos += this.streamsInfo.packSizes[k];
596
614
  }
597
- var packSize = this.streamsInfo.packSizes[packStreamIndex];
615
+ const packSize = this.streamsInfo.packSizes[packStreamIndex];
598
616
  // Read packed data
599
- var packedData = this.source.read(packPos, packSize);
617
+ const packedData = this.source.read(packPos, packSize);
600
618
  // Create decoder stream chain and decompress
601
- var coders = folder.coders;
602
- var unpackSizes = folder.unpackSizes;
619
+ const coders = folder.coders;
620
+ const unpackSizes = folder.unpackSizes;
603
621
  // Helper to decompress through a single codec stream
604
622
  function decompressWithStream(input, coderIdx, cb) {
605
- var coderInfo = coders[coderIdx];
606
- var codec = getCodec(coderInfo.id);
607
- var decoder = codec.createDecoder(coderInfo.properties, unpackSizes[coderIdx]);
608
- var chunks = [];
609
- var errorOccurred = false;
623
+ const coderInfo = coders[coderIdx];
624
+ const codec = getCodec(coderInfo.id);
625
+ const decoder = codec.createDecoder(coderInfo.properties, unpackSizes[coderIdx]);
626
+ const chunks = [];
627
+ let errorOccurred = false;
610
628
  decoder.on('data', (chunk)=>{
611
629
  chunks.push(chunk);
612
630
  });
@@ -654,24 +672,24 @@ import { readNumber } from './NumberCodec.js';
654
672
  if (!this.streamsInfo) {
655
673
  throw createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER);
656
674
  }
657
- var folder = this.streamsInfo.folders[folderIndex];
675
+ const folder = this.streamsInfo.folders[folderIndex];
658
676
  // Calculate starting pack position
659
- var packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
677
+ let packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
660
678
  // Find which pack stream index this folder starts at
661
- var packStreamIndex = 0;
662
- for(var j = 0; j < folderIndex; j++){
679
+ let packStreamIndex = 0;
680
+ for(let j = 0; j < folderIndex; j++){
663
681
  packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
664
682
  }
665
683
  // Calculate position
666
- for(var k = 0; k < packStreamIndex; k++){
684
+ for(let k = 0; k < packStreamIndex; k++){
667
685
  packPos += this.streamsInfo.packSizes[k];
668
686
  }
669
687
  // Read all pack streams for this folder
670
- var numPackStreams = folder.packedStreams.length;
671
- var packStreams = [];
672
- var currentPos = packPos;
673
- for(var p = 0; p < numPackStreams; p++){
674
- var size = this.streamsInfo.packSizes[packStreamIndex + p];
688
+ const numPackStreams = folder.packedStreams.length;
689
+ const packStreams = [];
690
+ let currentPos = packPos;
691
+ for(let p = 0; p < numPackStreams; p++){
692
+ const size = this.streamsInfo.packSizes[packStreamIndex + p];
675
693
  packStreams.push(this.source.read(currentPos, size));
676
694
  currentPos += size;
677
695
  }
@@ -683,10 +701,10 @@ import { readNumber } from './NumberCodec.js';
683
701
  // Coder 3: BCJ2 - 4 in, 1 out
684
702
  // Pack streams map to: coder inputs not bound to other coder outputs
685
703
  // First, decompress each non-BCJ2 coder
686
- var coderOutputs = {};
704
+ const coderOutputs = {};
687
705
  // Find the BCJ2 coder
688
- var bcj2CoderIndex = -1;
689
- for(var c = 0; c < folder.coders.length; c++){
706
+ let bcj2CoderIndex = -1;
707
+ for(let c = 0; c < folder.coders.length; c++){
690
708
  if (isBcj2Codec(folder.coders[c].id)) {
691
709
  bcj2CoderIndex = c;
692
710
  break;
@@ -697,44 +715,44 @@ import { readNumber } from './NumberCodec.js';
697
715
  }
698
716
  // Build input stream index -> pack stream mapping
699
717
  // folder.packedStreams tells us which input indices are unbound and their order
700
- var inputToPackStream = {};
701
- for(var pi = 0; pi < folder.packedStreams.length; pi++){
718
+ const inputToPackStream = {};
719
+ for(let pi = 0; pi < folder.packedStreams.length; pi++){
702
720
  inputToPackStream[folder.packedStreams[pi]] = pi;
703
721
  }
704
722
  // Build output stream index -> coder mapping
705
- var outputToCoder = {};
706
- var totalOutputs = 0;
707
- for(var co = 0; co < folder.coders.length; co++){
708
- var numOut = folder.coders[co].numOutStreams;
709
- for(var outp = 0; outp < numOut; outp++){
723
+ const outputToCoder = {};
724
+ let totalOutputs = 0;
725
+ for(let co = 0; co < folder.coders.length; co++){
726
+ const numOut = folder.coders[co].numOutStreams;
727
+ for(let outp = 0; outp < numOut; outp++){
710
728
  outputToCoder[totalOutputs + outp] = co;
711
729
  }
712
730
  totalOutputs += numOut;
713
731
  }
714
732
  // Decompress non-BCJ2 coders (LZMA, LZMA2)
715
733
  // We need to process in dependency order
716
- var processed = {};
717
- var processOrder = this.getCoderProcessOrder(folder, bcj2CoderIndex);
718
- for(var po = 0; po < processOrder.length; po++){
719
- var coderIdx = processOrder[po];
734
+ const processed = {};
735
+ const processOrder = this.getCoderProcessOrder(folder, bcj2CoderIndex);
736
+ for(let po = 0; po < processOrder.length; po++){
737
+ const coderIdx = processOrder[po];
720
738
  if (coderIdx === bcj2CoderIndex) continue;
721
- var coder = folder.coders[coderIdx];
722
- var codec = getCodec(coder.id);
739
+ const coder = folder.coders[coderIdx];
740
+ const codec = getCodec(coder.id);
723
741
  // Find input for this coder
724
- var coderInputStart = 0;
725
- for(var ci2 = 0; ci2 < coderIdx; ci2++){
742
+ let coderInputStart = 0;
743
+ for(let ci2 = 0; ci2 < coderIdx; ci2++){
726
744
  coderInputStart += folder.coders[ci2].numInStreams;
727
745
  }
728
746
  // Get input data (from pack stream)
729
- var inputIdx = coderInputStart;
730
- var packStreamIdx = inputToPackStream[inputIdx];
731
- var inputData = packStreams[packStreamIdx];
747
+ const inputIdx = coderInputStart;
748
+ const packStreamIdx = inputToPackStream[inputIdx];
749
+ const inputData = packStreams[packStreamIdx];
732
750
  // Decompress
733
- var unpackSize = folder.unpackSizes[coderIdx];
734
- var outputData = codec.decode(inputData, coder.properties, unpackSize);
751
+ const unpackSize = folder.unpackSizes[coderIdx];
752
+ const outputData = codec.decode(inputData, coder.properties, unpackSize);
735
753
  // Store in coder outputs
736
- var coderOutputStart = 0;
737
- for(var co2 = 0; co2 < coderIdx; co2++){
754
+ let coderOutputStart = 0;
755
+ for(let co2 = 0; co2 < coderIdx; co2++){
738
756
  coderOutputStart += folder.coders[co2].numOutStreams;
739
757
  }
740
758
  coderOutputs[coderOutputStart] = outputData;
@@ -743,16 +761,16 @@ import { readNumber } from './NumberCodec.js';
743
761
  // Now process BCJ2
744
762
  // BCJ2 has 4 inputs, need to map them correctly
745
763
  // Standard order: main(LZMA2 output), call(LZMA output), jump(LZMA output), range(raw pack)
746
- var bcj2InputStart = 0;
747
- for(var ci3 = 0; ci3 < bcj2CoderIndex; ci3++){
764
+ let bcj2InputStart = 0;
765
+ for(let ci3 = 0; ci3 < bcj2CoderIndex; ci3++){
748
766
  bcj2InputStart += folder.coders[ci3].numInStreams;
749
767
  }
750
- var bcj2Inputs = [];
751
- for(var bi = 0; bi < 4; bi++){
752
- var globalIdx = bcj2InputStart + bi;
768
+ const bcj2Inputs = [];
769
+ for(let bi = 0; bi < 4; bi++){
770
+ const globalIdx = bcj2InputStart + bi;
753
771
  // Check if this input is bound to a coder output
754
- var boundOutput = -1;
755
- for(var bp2 = 0; bp2 < folder.bindPairs.length; bp2++){
772
+ let boundOutput = -1;
773
+ for(let bp2 = 0; bp2 < folder.bindPairs.length; bp2++){
756
774
  if (folder.bindPairs[bp2].inIndex === globalIdx) {
757
775
  boundOutput = folder.bindPairs[bp2].outIndex;
758
776
  break;
@@ -763,19 +781,19 @@ import { readNumber } from './NumberCodec.js';
763
781
  bcj2Inputs.push(coderOutputs[boundOutput]);
764
782
  } else {
765
783
  // Get from pack streams
766
- var psIdx = inputToPackStream[globalIdx];
784
+ const psIdx = inputToPackStream[globalIdx];
767
785
  bcj2Inputs.push(packStreams[psIdx]);
768
786
  }
769
787
  }
770
788
  // Get BCJ2 unpack size
771
- var bcj2OutputStart = 0;
772
- for(var co3 = 0; co3 < bcj2CoderIndex; co3++){
789
+ let bcj2OutputStart = 0;
790
+ for(let co3 = 0; co3 < bcj2CoderIndex; co3++){
773
791
  bcj2OutputStart += folder.coders[co3].numOutStreams;
774
792
  }
775
- var bcj2UnpackSize = folder.unpackSizes[bcj2OutputStart];
793
+ const bcj2UnpackSize = folder.unpackSizes[bcj2OutputStart];
776
794
  // Memory optimization: Clear intermediate buffers to help GC
777
795
  // These are no longer needed after bcj2Inputs is built
778
- for(var key in coderOutputs){
796
+ for(const key in coderOutputs){
779
797
  delete coderOutputs[key];
780
798
  }
781
799
  // Clear packStreams array (allows GC to free compressed data)
@@ -786,30 +804,30 @@ import { readNumber } from './NumberCodec.js';
786
804
  /**
787
805
  * Get processing order for coders (dependency order)
788
806
  */ getCoderProcessOrder(folder, excludeIdx) {
789
- var order = [];
790
- var processed = {};
807
+ const order = [];
808
+ const processed = {};
791
809
  // Simple approach: process coders that don't depend on unprocessed outputs
792
- var changed = true;
810
+ let changed = true;
793
811
  while(changed){
794
812
  changed = false;
795
- for(var c = 0; c < folder.coders.length; c++){
813
+ for(let c = 0; c < folder.coders.length; c++){
796
814
  if (processed[c] || c === excludeIdx) continue;
797
815
  // Check if all inputs are satisfied
798
- var inputStart = 0;
799
- for(var i = 0; i < c; i++){
816
+ let inputStart = 0;
817
+ for(let i = 0; i < c; i++){
800
818
  inputStart += folder.coders[i].numInStreams;
801
819
  }
802
- var canProcess = true;
803
- for(var inp = 0; inp < folder.coders[c].numInStreams; inp++){
804
- var globalIdx = inputStart + inp;
820
+ let canProcess = true;
821
+ for(let inp = 0; inp < folder.coders[c].numInStreams; inp++){
822
+ const globalIdx = inputStart + inp;
805
823
  // Check if bound to an unprocessed coder
806
- for(var bp = 0; bp < folder.bindPairs.length; bp++){
824
+ for(let bp = 0; bp < folder.bindPairs.length; bp++){
807
825
  if (folder.bindPairs[bp].inIndex === globalIdx) {
808
826
  // Find which coder produces this output
809
- var outIdx = folder.bindPairs[bp].outIndex;
810
- var outStart = 0;
811
- for(var oc = 0; oc < folder.coders.length; oc++){
812
- var numOut = folder.coders[oc].numOutStreams;
827
+ const outIdx = folder.bindPairs[bp].outIndex;
828
+ let outStart = 0;
829
+ for(let oc = 0; oc < folder.coders.length; oc++){
830
+ const numOut = folder.coders[oc].numOutStreams;
813
831
  if (outIdx < outStart + numOut) {
814
832
  if (!processed[oc] && oc !== excludeIdx) {
815
833
  canProcess = false;
@@ -855,8 +873,8 @@ import { readNumber } from './NumberCodec.js';
855
873
  /**
856
874
  * Get base name from a path
857
875
  */ function getBaseName(path) {
858
- var lastSlash = path.lastIndexOf('/');
859
- var lastBackslash = path.lastIndexOf('\\');
860
- var lastSep = Math.max(lastSlash, lastBackslash);
876
+ const lastSlash = path.lastIndexOf('/');
877
+ const lastBackslash = path.lastIndexOf('\\');
878
+ const lastSep = Math.max(lastSlash, lastBackslash);
861
879
  return lastSep >= 0 ? path.slice(lastSep + 1) : path;
862
880
  }