7z-iterator 2.0.4 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/dist/cjs/FileEntry.js.map +1 -1
  2. package/dist/cjs/SevenZipIterator.js +13 -8
  3. package/dist/cjs/SevenZipIterator.js.map +1 -1
  4. package/dist/cjs/compat.js.map +1 -1
  5. package/dist/cjs/index.js.map +1 -1
  6. package/dist/cjs/lib/defer.js.map +1 -1
  7. package/dist/cjs/lib/runDecode.d.cts +5 -0
  8. package/dist/cjs/lib/runDecode.d.ts +5 -0
  9. package/dist/cjs/lib/runDecode.js +55 -0
  10. package/dist/cjs/lib/runDecode.js.map +1 -0
  11. package/dist/cjs/lib/streamToSource.js.map +1 -1
  12. package/dist/cjs/nextEntry.js.map +1 -1
  13. package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
  14. package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -1
  15. package/dist/cjs/sevenz/NumberCodec.js.map +1 -1
  16. package/dist/cjs/sevenz/SevenZipParser.d.cts +12 -1
  17. package/dist/cjs/sevenz/SevenZipParser.d.ts +12 -1
  18. package/dist/cjs/sevenz/SevenZipParser.js +325 -217
  19. package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
  20. package/dist/cjs/sevenz/codecs/Aes.js.map +1 -1
  21. package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
  22. package/dist/cjs/sevenz/codecs/Bcj2.js.map +1 -1
  23. package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
  24. package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
  25. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -1
  26. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -1
  27. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +24 -4
  28. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  29. package/dist/cjs/sevenz/codecs/index.d.cts +2 -1
  30. package/dist/cjs/sevenz/codecs/index.d.ts +2 -1
  31. package/dist/cjs/sevenz/codecs/index.js +28 -16
  32. package/dist/cjs/sevenz/codecs/index.js.map +1 -1
  33. package/dist/cjs/sevenz/codecs/streams.js.map +1 -1
  34. package/dist/cjs/sevenz/constants.js.map +1 -1
  35. package/dist/cjs/sevenz/headers.js.map +1 -1
  36. package/dist/cjs/sevenz/index.d.cts +1 -1
  37. package/dist/cjs/sevenz/index.d.ts +1 -1
  38. package/dist/cjs/sevenz/index.js.map +1 -1
  39. package/dist/cjs/types.js.map +1 -1
  40. package/dist/esm/FileEntry.js.map +1 -1
  41. package/dist/esm/SevenZipIterator.js +13 -8
  42. package/dist/esm/SevenZipIterator.js.map +1 -1
  43. package/dist/esm/compat.js.map +1 -1
  44. package/dist/esm/index.js.map +1 -1
  45. package/dist/esm/lib/defer.js.map +1 -1
  46. package/dist/esm/lib/runDecode.d.ts +5 -0
  47. package/dist/esm/lib/runDecode.js +29 -0
  48. package/dist/esm/lib/runDecode.js.map +1 -0
  49. package/dist/esm/lib/streamToSource.js.map +1 -1
  50. package/dist/esm/nextEntry.js.map +1 -1
  51. package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
  52. package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -1
  53. package/dist/esm/sevenz/NumberCodec.js.map +1 -1
  54. package/dist/esm/sevenz/SevenZipParser.d.ts +12 -1
  55. package/dist/esm/sevenz/SevenZipParser.js +308 -218
  56. package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
  57. package/dist/esm/sevenz/codecs/Aes.js.map +1 -1
  58. package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
  59. package/dist/esm/sevenz/codecs/Bcj2.js.map +1 -1
  60. package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
  61. package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
  62. package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -1
  63. package/dist/esm/sevenz/codecs/createBufferingDecoder.js +19 -4
  64. package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  65. package/dist/esm/sevenz/codecs/index.d.ts +2 -1
  66. package/dist/esm/sevenz/codecs/index.js +28 -16
  67. package/dist/esm/sevenz/codecs/index.js.map +1 -1
  68. package/dist/esm/sevenz/codecs/streams.js.map +1 -1
  69. package/dist/esm/sevenz/constants.js.map +1 -1
  70. package/dist/esm/sevenz/headers.js.map +1 -1
  71. package/dist/esm/sevenz/index.d.ts +1 -1
  72. package/dist/esm/sevenz/index.js.map +1 -1
  73. package/dist/esm/types.js.map +1 -1
  74. package/package.json +2 -2
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/FileEntry.ts"],"sourcesContent":["/**\n * FileEntry for 7z archives\n *\n * Wraps a lazy stream - decompression happens when the stream is read.\n * API consistent with zip-iterator and tar-iterator.\n */\n\nimport once from 'call-once-fn';\nimport { type FileAttributes, FileEntry, type Lock, type NoParamCallback, waitForAccess } from 'extract-base-iterator';\nimport fs from 'fs';\nimport oo from 'on-one';\nimport type { ExtractOptions } from './types.ts';\n\nexport default class SevenZipFileEntry extends FileEntry {\n private lock: Lock;\n private stream: NodeJS.ReadableStream;\n\n /**\n * Whether this entry's folder supports streaming decompression.\n */\n readonly _canStream: boolean;\n\n constructor(attributes: FileAttributes, stream: NodeJS.ReadableStream, lock: Lock, canStream: boolean) {\n super(attributes);\n this.stream = stream;\n this.lock = lock;\n this.lock.retain();\n this._canStream = canStream;\n }\n\n create(dest: string, callback: NoParamCallback): void;\n create(dest: string, options: ExtractOptions, callback: NoParamCallback): void;\n create(dest: string, options?: ExtractOptions): Promise<boolean>;\n create(dest: string, options?: ExtractOptions | NoParamCallback, callback?: NoParamCallback): void | Promise<boolean> {\n callback = typeof options === 'function' ? options : callback;\n options = typeof options === 'function' ? {} : ((options || {}) as ExtractOptions);\n\n if (typeof callback === 'function') {\n return FileEntry.prototype.create.call(this, dest, options, (err?: Error) => {\n callback(err);\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n });\n }\n return new Promise((resolve, reject) =>\n this.create(dest, options, (err?: Error, done?: boolean) => {\n err ? reject(err) : resolve(done);\n })\n );\n }\n\n _writeFile(fullPath: string, _options: ExtractOptions, callback: NoParamCallback): void {\n if (!this.stream) {\n callback(new Error('7z FileEntry missing stream. Check for calling create multiple times'));\n return;\n }\n\n const stream = this.stream;\n this.stream = null; // Prevent reuse\n\n // Use once since errors can come from either stream\n const cb = once((err?: Error) => {\n err ? callback(err) : waitForAccess(fullPath, callback);\n });\n\n try {\n const writeStream = fs.createWriteStream(fullPath);\n\n // Listen for errors on source stream (errors don't propagate through pipe)\n stream.on('error', (streamErr: Error) => {\n // Destroy the write stream on source error.\n // On Node 0.8, destroy() emits 'close' before 'error'. Since on-one is listening\n // for ['error', 'close', 'finish'], it catches 'close' first, calls our callback,\n // and removes ALL listeners - including the 'error' listener. The subsequent EBADF\n // error then fires with no handler, causing an uncaught exception.\n // Adding a no-op error handler ensures there's always a listener for any error.\n const ws = writeStream as fs.WriteStream & { destroy?: () => void };\n writeStream.on('error', () => {});\n if (typeof ws.destroy === 'function') ws.destroy();\n cb(streamErr);\n });\n\n // Pipe and listen for write stream completion/errors\n stream.pipe(writeStream);\n oo(writeStream, ['error', 'close', 'finish'], cb);\n } catch (pipeErr) {\n cb(pipeErr);\n }\n }\n\n destroy() {\n FileEntry.prototype.destroy.call(this);\n if (this.stream) {\n // Use destroy() to prevent decompression (our stream has custom destroy that sets destroyed flag)\n // Fallback to resume() for older Node versions without destroy()\n const s = this.stream as NodeJS.ReadableStream & { destroy?: () => void };\n if (typeof s.destroy === 'function') {\n s.destroy();\n }\n this.stream = null;\n }\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n }\n}\n"],"names":["SevenZipFileEntry","attributes","stream","lock","canStream","retain","_canStream","create","dest","options","callback","FileEntry","prototype","call","err","release","Promise","resolve","reject","done","_writeFile","fullPath","_options","Error","cb","once","waitForAccess","writeStream","fs","createWriteStream","on","streamErr","ws","destroy","pipe","oo","pipeErr","s"],"mappings":"AAAA;;;;;CAKC;;;;;;;eAQoBA;;;iEANJ;mCAC8E;yDAChF;4DACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAGA,IAAA,AAAMA,kCAAN;;cAAMA;aAAAA,kBASPC,UAA0B,EAAEC,MAA6B,EAAEC,IAAU,EAAEC,SAAkB;gCATlFJ;;gBAUjB,kBAViBA;YAUXC;;QACN,MAAKC,MAAM,GAAGA;QACd,MAAKC,IAAI,GAAGA;QACZ,MAAKA,IAAI,CAACE,MAAM;QAChB,MAAKC,UAAU,GAAGF;;;iBAdDJ;IAoBnBO,OAAAA,MAkBC,GAlBDA,SAAAA,OAAOC,IAAY,EAAEC,OAA0C,EAAEC,QAA0B;;QACzFA,WAAW,OAAOD,YAAY,aAAaA,UAAUC;QACrDD,UAAU,OAAOA,YAAY,aAAa,CAAC,IAAMA,WAAW,CAAC;QAE7D,IAAI,OAAOC,aAAa,YAAY;YAClC,OAAOC,8BAAS,CAACC,SAAS,CAACL,MAAM,CAACM,IAAI,CAAC,IAAI,EAAEL,MAAMC,SAAS,SAACK;gBAC3DJ,SAASI;gBACT,IAAI,MAAKX,IAAI,EAAE;oBACb,MAAKA,IAAI,CAACY,OAAO;oBACjB,MAAKZ,IAAI,GAAG;gBACd;YACF;QACF;QACA,OAAO,IAAIa,QAAQ,SAACC,SAASC;mBAC3B,MAAKX,MAAM,CAACC,MAAMC,SAAS,SAACK,KAAaK;gBACvCL,MAAMI,OAAOJ,OAAOG,QAAQE;YAC9B;;IAEJ;IAEAC,OAAAA,UAqCC,GArCDA,SAAAA,WAAWC,QAAgB,EAAEC,QAAwB,EAAEZ,QAAyB;QAC9E,IAAI,CAAC,IAAI,CAACR,MAAM,EAAE;YAChBQ,SAAS,IAAIa,MAAM;YACnB;QACF;QAEA,IAAMrB,SAAS,IAAI,CAACA,MAAM;QAC1B,IAAI,CAACA,MAAM,GAAG,MAAM,gBAAgB;QAEpC,oDAAoD;QACpD,IAAMsB,KAAKC,IAAAA,mBAAI,EAAC,SAACX;YACfA,MAAMJ,SAASI,OAAOY,IAAAA,kCAAa,EAACL,UAAUX;QAChD;QAEA,IAAI;YACF,IAAMiB,cAAcC,WAAE,CAACC,iBAAiB,CAACR;YAEzC,2EAA2E;YAC3EnB,OAAO4B,EAAE,CAAC,SAAS,SAACC;gBAClB,4CAA4C;gBAC5C,iFAAiF;gBACjF,kFAAkF;gBAClF,mFAAmF;gBACnF,mEAAmE;gBACnE,gFAAgF;gBAChF,IAAMC,KAAKL;gBACXA,YAAYG,EAAE,CAAC,SAAS,YAAO;gBAC/B,IAAI,OAAOE,GAAGC,OAAO,KAAK,YAAYD,GAAGC,OAAO;gBAChDT,GAAGO;YACL;YAEA,qDAAqD;YACrD7B,OAAOgC,IAAI,CAACP;YACZQ,IAAAA,cAAE,EAACR,aAAa;gBAAC;gBAAS;gBAAS;aAAS,EAAEH;QAChD,EAAE,OAAOY,SAAS;YAChBZ,GAAGY;QACL;IACF;IAEAH,OAAAA,OAeC,GAfDA,SAAAA;QACEtB,8BAAS,CAACC,SAAS,CAACqB,OAAO,CAACpB,IAAI,CAAC,IAAI;QACrC,IAAI,IAAI,CAACX,MAAM,EAAE;YACf,kGAAkG;YAClG,iEAAiE;YACjE,IAAMmC,IAAI,IAAI,CAACnC,MAAM;YACrB,IAAI,OAAOmC,EAAEJ,OAAO,KAAK,YAAY;gBACnCI,EAAEJ,OAAO;YACX;YACA,IAAI,CAAC/B,MAAM,GAAG;QAChB;QACA,IAAI,IAAI,CAACC,IAAI,EAAE;YACb,IAAI,CAACA,IAAI,CAACY,OAAO;YACjB,IAAI,CAACZ,IAAI,GAAG;QACd;IACF;WA9FmBH;EAA0BW,8BAAS"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/FileEntry.ts"],"sourcesContent":["/**\n * FileEntry for 7z archives\n *\n * Wraps a lazy stream - decompression happens when the stream is read.\n * API consistent with zip-iterator and tar-iterator.\n */\n\nimport once from 'call-once-fn';\nimport { type FileAttributes, FileEntry, type Lock, type NoParamCallback, waitForAccess } from 'extract-base-iterator';\nimport fs from 'fs';\nimport oo from 'on-one';\nimport type { ExtractOptions } from './types.ts';\n\nexport default class SevenZipFileEntry extends FileEntry {\n private lock: Lock;\n private stream: NodeJS.ReadableStream;\n\n /**\n * Whether this entry's folder supports streaming decompression.\n */\n readonly _canStream: boolean;\n\n constructor(attributes: FileAttributes, stream: NodeJS.ReadableStream, lock: Lock, canStream: boolean) {\n super(attributes);\n this.stream = stream;\n this.lock = lock;\n this.lock.retain();\n this._canStream = canStream;\n }\n\n create(dest: string, callback: NoParamCallback): void;\n create(dest: string, options: ExtractOptions, callback: NoParamCallback): void;\n create(dest: string, options?: ExtractOptions): Promise<boolean>;\n create(dest: string, options?: ExtractOptions | NoParamCallback, callback?: NoParamCallback): void | Promise<boolean> {\n callback = typeof options === 'function' ? options : callback;\n options = typeof options === 'function' ? {} : ((options || {}) as ExtractOptions);\n\n if (typeof callback === 'function') {\n return FileEntry.prototype.create.call(this, dest, options, (err?: Error) => {\n callback(err);\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n });\n }\n return new Promise((resolve, reject) =>\n this.create(dest, options, (err?: Error, done?: boolean) => {\n err ? reject(err) : resolve(done);\n })\n );\n }\n\n _writeFile(fullPath: string, _options: ExtractOptions, callback: NoParamCallback): void {\n if (!this.stream) {\n callback(new Error('7z FileEntry missing stream. Check for calling create multiple times'));\n return;\n }\n\n const stream = this.stream;\n this.stream = null; // Prevent reuse\n\n // Use once since errors can come from either stream\n const cb = once((err?: Error) => {\n err ? callback(err) : waitForAccess(fullPath, callback);\n });\n\n try {\n const writeStream = fs.createWriteStream(fullPath);\n\n // Listen for errors on source stream (errors don't propagate through pipe)\n stream.on('error', (streamErr: Error) => {\n // Destroy the write stream on source error.\n // On Node 0.8, destroy() emits 'close' before 'error'. Since on-one is listening\n // for ['error', 'close', 'finish'], it catches 'close' first, calls our callback,\n // and removes ALL listeners - including the 'error' listener. The subsequent EBADF\n // error then fires with no handler, causing an uncaught exception.\n // Adding a no-op error handler ensures there's always a listener for any error.\n const ws = writeStream as fs.WriteStream & { destroy?: () => void };\n writeStream.on('error', () => {});\n if (typeof ws.destroy === 'function') ws.destroy();\n cb(streamErr);\n });\n\n // Pipe and listen for write stream completion/errors\n stream.pipe(writeStream);\n oo(writeStream, ['error', 'close', 'finish'], cb);\n } catch (pipeErr) {\n cb(pipeErr);\n }\n }\n\n destroy() {\n FileEntry.prototype.destroy.call(this);\n if (this.stream) {\n // Use destroy() to prevent decompression (our stream has custom destroy that sets destroyed flag)\n // Fallback to resume() for older Node versions without destroy()\n const s = this.stream as NodeJS.ReadableStream & { destroy?: () => void };\n if (typeof s.destroy === 'function') {\n s.destroy();\n }\n this.stream = null;\n }\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n }\n}\n"],"names":["SevenZipFileEntry","attributes","stream","lock","canStream","retain","_canStream","create","dest","options","callback","FileEntry","prototype","call","err","release","Promise","resolve","reject","done","_writeFile","fullPath","_options","Error","cb","once","waitForAccess","writeStream","fs","createWriteStream","on","streamErr","ws","destroy","pipe","oo","pipeErr","s"],"mappings":"AAAA;;;;;CAKC;;;;;;;eAQoBA;;;iEANJ;mCAC8E;yDAChF;4DACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAGA,IAAA,AAAMA,kCAAN;;cAAMA;aAAAA,kBASPC,UAA0B,EAAEC,MAA6B,EAAEC,IAAU,EAAEC,SAAkB;gCATlFJ;;gBAUjB,kBAViBA;YAUXC;;QACN,MAAKC,MAAM,GAAGA;QACd,MAAKC,IAAI,GAAGA;QACZ,MAAKA,IAAI,CAACE,MAAM;QAChB,MAAKC,UAAU,GAAGF;;;iBAdDJ;IAoBnBO,OAAAA,MAkBC,GAlBDA,SAAAA,OAAOC,IAAY,EAAEC,OAA0C,EAAEC,QAA0B;;QACzFA,WAAW,OAAOD,YAAY,aAAaA,UAAUC;QACrDD,UAAU,OAAOA,YAAY,aAAa,CAAC,IAAMA,WAAW,CAAC;QAE7D,IAAI,OAAOC,aAAa,YAAY;YAClC,OAAOC,8BAAS,CAACC,SAAS,CAACL,MAAM,CAACM,IAAI,CAAC,IAAI,EAAEL,MAAMC,SAAS,SAACK;gBAC3DJ,SAASI;gBACT,IAAI,MAAKX,IAAI,EAAE;oBACb,MAAKA,IAAI,CAACY,OAAO;oBACjB,MAAKZ,IAAI,GAAG;gBACd;YACF;QACF;QACA,OAAO,IAAIa,QAAQ,SAACC,SAASC;mBAC3B,MAAKX,MAAM,CAACC,MAAMC,SAAS,SAACK,KAAaK;gBACvCL,MAAMI,OAAOJ,OAAOG,QAAQE;YAC9B;;IAEJ;IAEAC,OAAAA,UAqCC,GArCDA,SAAAA,WAAWC,QAAgB,EAAEC,QAAwB,EAAEZ,QAAyB;QAC9E,IAAI,CAAC,IAAI,CAACR,MAAM,EAAE;YAChBQ,SAAS,IAAIa,MAAM;YACnB;QACF;QAEA,IAAMrB,SAAS,IAAI,CAACA,MAAM;QAC1B,IAAI,CAACA,MAAM,GAAG,MAAM,gBAAgB;QAEpC,oDAAoD;QACpD,IAAMsB,KAAKC,IAAAA,mBAAI,EAAC,SAACX;YACfA,MAAMJ,SAASI,OAAOY,IAAAA,kCAAa,EAACL,UAAUX;QAChD;QAEA,IAAI;YACF,IAAMiB,cAAcC,WAAE,CAACC,iBAAiB,CAACR;YAEzC,2EAA2E;YAC3EnB,OAAO4B,EAAE,CAAC,SAAS,SAACC;gBAClB,4CAA4C;gBAC5C,iFAAiF;gBACjF,kFAAkF;gBAClF,mFAAmF;gBACnF,mEAAmE;gBACnE,gFAAgF;gBAChF,IAAMC,KAAKL;gBACXA,YAAYG,EAAE,CAAC,SAAS,YAAO;gBAC/B,IAAI,OAAOE,GAAGC,OAAO,KAAK,YAAYD,GAAGC,OAAO;gBAChDT,GAAGO;YACL;YAEA,qDAAqD;YACrD7B,OAAOgC,IAAI,CAACP;YACZQ,IAAAA,cAAE,EAACR,aAAa;gBAAC;gBAAS;gBAAS;aAAS,EAAEH;QAChD,EAAE,OAAOY,SAAS;YAChBZ,GAAGY;QACL;IACF;IAEAH,OAAAA,OAeC,GAfDA,SAAAA;QACEtB,8BAAS,CAACC,SAAS,CAACqB,OAAO,CAACpB,IAAI,CAAC,IAAI;QACrC,IAAI,IAAI,CAACX,MAAM,EAAE;YACf,kGAAkG;YAClG,iEAAiE;YACjE,IAAMmC,IAAI,IAAI,CAACnC,MAAM;YACrB,IAAI,OAAOmC,EAAEJ,OAAO,KAAK,YAAY;gBACnCI,EAAEJ,OAAO;YACX;YACA,IAAI,CAAC/B,MAAM,GAAG;QAChB;QACA,IAAI,IAAI,CAACC,IAAI,EAAE;YACb,IAAI,CAACA,IAAI,CAACY,OAAO;YACjB,IAAI,CAACZ,IAAI,GAAG;QACd;IACF;WA9FmBH;EAA0BW,8BAAS"}
@@ -224,14 +224,19 @@ var SevenZipIterator = /*#__PURE__*/ function(BaseIterator) {
224
224
  queue.defer(function(cb) {
225
225
  if (_this.done || cancelled) return;
226
226
  if (!archiveSource) return cb(new Error('No archive source'));
227
- try {
228
- var parser = new _SevenZipParserts.SevenZipParser(archiveSource);
229
- parser.parse();
230
- _this._iterator = new EntryIterator(parser);
231
- cb();
232
- } catch (parseErr) {
233
- cb(parseErr);
234
- }
227
+ var parser = new _SevenZipParserts.SevenZipParser(archiveSource);
228
+ parser.parse(function(parseErr) {
229
+ if (parseErr) {
230
+ cb(parseErr);
231
+ return;
232
+ }
233
+ try {
234
+ _this._iterator = new EntryIterator(parser);
235
+ cb();
236
+ } catch (err) {
237
+ cb(err);
238
+ }
239
+ });
235
240
  });
236
241
  // start processing
237
242
  queue.await(function(err) {
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/SevenZipIterator.ts"],"sourcesContent":["import BaseIterator, { Lock } from 'extract-base-iterator';\nimport fs from 'fs';\nimport { rmSync } from 'fs-remove-compat';\nimport path from 'path';\nimport Queue from 'queue-cb';\nimport shortHash from 'short-hash';\nimport tempSuffix from 'temp-suffix';\nimport { tmpdir } from './compat.ts';\nimport streamToSource, { type SourceResult } from './lib/streamToSource.ts';\nimport nextEntry from './nextEntry.ts';\nimport { setPassword } from './sevenz/codecs/index.ts';\nimport { type ArchiveSource, FileSource, type SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\n\nimport type { Entry, ExtractOptions } from './types.ts';\n\n/**\n * Internal iterator interface for SevenZipParser entries\n * @internal\n */\ninterface SevenZipFileIterator {\n next(): SevenZipEntry | null;\n getParser(): SevenZipParser;\n}\n\n/**\n * Iterator wrapper around SevenZipParser entries\n */\nclass EntryIterator implements SevenZipFileIterator {\n private parser: SevenZipParser;\n private entries: SevenZipEntry[];\n private index = 0;\n\n constructor(parser: SevenZipParser) {\n this.parser = parser;\n this.entries = parser.getEntries();\n }\n\n next(): SevenZipEntry | null {\n if (this.index >= this.entries.length) {\n return null;\n }\n return this.entries[this.index++];\n }\n\n getParser(): SevenZipParser {\n return this.parser;\n }\n}\n\nexport default class SevenZipIterator extends BaseIterator<Entry> {\n lock: Lock | null;\n /** @internal - Do not use directly */\n _iterator: unknown;\n\n constructor(source: string | NodeJS.ReadableStream, options: ExtractOptions = {}) {\n super(options);\n this.lock = new Lock();\n this.lock.onDestroy = (err) => BaseIterator.prototype.end.call(this, err);\n const queue = new Queue(1);\n let cancelled = false;\n let archiveSource: ArchiveSource | null = null;\n const setup = (): void => {\n cancelled = true;\n };\n this.processing.push(setup);\n\n // Set password (or clear if not provided)\n setPassword(options.password || null);\n\n if (typeof source === 'string') {\n // File path input - use FileSource directly\n queue.defer((cb: (err?: Error) => void) => {\n fs.stat(source, (statErr, stats) => {\n if (this.done || cancelled) return;\n if (statErr) return cb(statErr);\n\n fs.open(source, 'r', (err, fd) => {\n if (this.done || cancelled) return;\n if (err) return cb(err);\n\n archiveSource = new FileSource(fd, stats.size);\n // Register cleanup for file descriptor\n this.lock.registerCleanup(() => {\n fs.closeSync(fd);\n });\n cb();\n });\n });\n });\n } else {\n // Stream input - write to temp file for random access\n // Register cleanup for source stream\n const stream = source as NodeJS.ReadableStream;\n this.lock.registerCleanup(() => {\n const s = stream as NodeJS.ReadableStream & { destroy?: () => void };\n if (typeof s.destroy === 'function') s.destroy();\n });\n\n const tempPath = path.join(tmpdir(), '7z-iterator', shortHash(process.cwd()), tempSuffix('tmp.7z'));\n queue.defer((cb: (err?: Error) => void) => {\n streamToSource(source, { tempPath }, (err?: Error, result?: SourceResult) => {\n if (this.done || cancelled) return;\n if (err) return cb(err);\n if (!result) return cb(new Error('No result from streamToSource'));\n\n archiveSource = result.source;\n\n // Register cleanup for file descriptor\n this.lock.registerCleanup(() => {\n fs.closeSync(result.fd);\n });\n\n // Register cleanup for temp file\n this.lock.registerCleanup(() => {\n try {\n rmSync(result.tempPath);\n } catch (_e) {\n /* ignore */\n }\n });\n\n cb();\n });\n });\n }\n\n // Parse and build iterator\n queue.defer((cb: (err?: Error) => void) => {\n if (this.done || cancelled) return;\n if (!archiveSource) return cb(new Error('No archive source'));\n\n try {\n const parser = new SevenZipParser(archiveSource);\n parser.parse();\n this._iterator = new EntryIterator(parser);\n cb();\n } catch (parseErr) {\n cb(parseErr as Error);\n }\n });\n\n // start processing\n queue.await((err?: Error) => {\n this.processing.remove(setup);\n if (this.done || cancelled) return;\n err ? this.end(err) : this.push(nextEntry);\n });\n }\n\n end(err?: Error) {\n if (this.lock) {\n const lock = this.lock;\n this.lock = null; // Clear before release to prevent re-entrancy\n lock.err = err;\n lock.release();\n }\n // Don't call base end here - Lock.__destroy() handles it\n this._iterator = null;\n }\n\n /**\n * Check if streaming extraction is available for any folder in this archive.\n * Streaming is possible when folders use codecs like BZip2, Deflate, or Copy\n * that can decompress incrementally without buffering the entire input.\n *\n * @returns true if at least one folder supports streaming\n */\n canStream(): boolean {\n if (!this._iterator) return false;\n const parser = (this._iterator as SevenZipFileIterator).getParser();\n if (!parser) return false;\n\n const entries = parser.getEntries();\n const checkedFolders: { [key: number]: boolean } = {};\n\n for (let i = 0; i < entries.length; i++) {\n const folderIndex = entries[i]._folderIndex;\n if (folderIndex >= 0 && checkedFolders[folderIndex] === undefined) {\n checkedFolders[folderIndex] = parser.canStreamFolder(folderIndex);\n if (checkedFolders[folderIndex]) {\n return true;\n }\n }\n }\n\n return false;\n }\n\n /**\n * Get entries sorted for optimal streaming extraction.\n *\n * Entries are sorted by:\n * 1. Folder index (process one folder at a time)\n * 2. Stream index within folder (for solid block streaming)\n *\n * This ordering allows multi-file solid folders to stream with\n * O(largest file) memory instead of O(folder size).\n *\n * @returns Array of entries in streaming order\n */\n getStreamingOrder(): SevenZipEntry[] {\n if (!this._iterator) return [];\n const parser = (this._iterator as SevenZipFileIterator).getParser();\n if (!parser) return [];\n\n const entries = parser.getEntries();\n\n // Create a copy and sort for streaming order\n const sorted: SevenZipEntry[] = [];\n for (let i = 0; i < entries.length; i++) {\n sorted.push(entries[i]);\n }\n\n sorted.sort((a, b) => {\n // First by folder index\n if (a._folderIndex !== b._folderIndex) {\n return a._folderIndex - b._folderIndex;\n }\n // Then by stream index within folder\n return a._streamIndexInFolder - b._streamIndexInFolder;\n });\n\n return sorted;\n }\n}\n"],"names":["SevenZipIterator","EntryIterator","parser","index","entries","getEntries","next","length","getParser","source","options","lock","Lock","onDestroy","err","BaseIterator","prototype","end","call","queue","Queue","cancelled","archiveSource","setup","processing","push","setPassword","password","defer","cb","fs","stat","statErr","stats","done","open","fd","FileSource","size","registerCleanup","closeSync","stream","s","destroy","tempPath","path","join","tmpdir","shortHash","process","cwd","tempSuffix","streamToSource","result","Error","rmSync","_e","SevenZipParser","parse","_iterator","parseErr","await","remove","nextEntry","release","canStream","checkedFolders","i","folderIndex","_folderIndex","undefined","canStreamFolder","getStreamingOrder","sorted","sort","a","b","_streamIndexInFolder"],"mappings":";;;;;;;eAiDqBA;;;2EAjDc;yDACpB;8BACQ;2DACN;8DACC;gEACI;iEACC;wBACA;uEAC2B;kEAC5B;uBACM;gCACuD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAanF;;CAEC,GACD,IAAA,AAAMC,8BAAN;;aAAMA,cAKQC,MAAsB;gCAL9BD;aAGIE,QAAQ;QAGd,IAAI,CAACD,MAAM,GAAGA;QACd,IAAI,CAACE,OAAO,GAAGF,OAAOG,UAAU;;iBAP9BJ;IAUJK,OAAAA,IAKC,GALDA,SAAAA;QACE,IAAI,IAAI,CAACH,KAAK,IAAI,IAAI,CAACC,OAAO,CAACG,MAAM,EAAE;YACrC,OAAO;QACT;QACA,OAAO,IAAI,CAACH,OAAO,CAAC,IAAI,CAACD,KAAK,GAAG;IACnC;IAEAK,OAAAA,SAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACN,MAAM;IACpB;WAnBID;;AAsBS,IAAA,AAAMD,iCAAN;;cAAMA;aAAAA,iBAKPS,MAAsC;YAAEC,UAAAA,iEAA0B,CAAC;gCAL5DV;;gBAMjB,kBANiBA;YAMXU;;QACN,MAAKC,IAAI,GAAG,IAAIC,yBAAI;QACpB,MAAKD,IAAI,CAACE,SAAS,GAAG,SAACC;mBAAQC,4BAAY,CAACC,SAAS,CAACC,GAAG,CAACC,IAAI,QAAOJ;;QACrE,IAAMK,QAAQ,IAAIC,gBAAK,CAAC;QACxB,IAAIC,YAAY;QAChB,IAAIC,gBAAsC;QAC1C,IAAMC,QAAQ;YACZF,YAAY;QACd;QACA,MAAKG,UAAU,CAACC,IAAI,CAACF;QAErB,0CAA0C;QAC1CG,IAAAA,oBAAW,EAAChB,QAAQiB,QAAQ,IAAI;QAEhC,IAAI,OAAOlB,WAAW,UAAU;YAC9B,4CAA4C;YAC5CU,MAAMS,KAAK,CAAC,SAACC;gBACXC,WAAE,CAACC,IAAI,CAACtB,QAAQ,SAACuB,SAASC;oBACxB,IAAI,MAAKC,IAAI,IAAIb,WAAW;oBAC5B,IAAIW,SAAS,OAAOH,GAAGG;oBAEvBF,WAAE,CAACK,IAAI,CAAC1B,QAAQ,KAAK,SAACK,KAAKsB;wBACzB,IAAI,MAAKF,IAAI,IAAIb,WAAW;wBAC5B,IAAIP,KAAK,OAAOe,GAAGf;wBAEnBQ,gBAAgB,IAAIe,4BAAU,CAACD,IAAIH,MAAMK,IAAI;wBAC7C,uCAAuC;wBACvC,MAAK3B,IAAI,CAAC4B,eAAe,CAAC;4BACxBT,WAAE,CAACU,SAAS,CAACJ;wBACf;wBACAP;oBACF;gBACF;YACF;QACF,OAAO;YACL,sDAAsD;YACtD,qCAAqC;YACrC,IAAMY,SAAShC;YACf,MAAKE,IAAI,CAAC4B,eAAe,CAAC;gBACxB,IAAMG,IAAID;gBACV,IAAI,OAAOC,EAAEC,OAAO,KAAK,YAAYD,EAAEC,OAAO;YAChD;YAEA,IAAMC,WAAWC,aAAI,CAACC,IAAI,CAACC,IAAAA,gBAAM,KAAI,eAAeC,IAAAA,kBAAS,EAACC,QAAQC,GAAG,KAAKC,IAAAA,mBAAU,EAAC;YACzFhC,MAAMS,KAAK,CAAC,SAACC;gBACXuB,IAAAA,yBAAc,EAAC3C,QAAQ;oBAAEmC,UAAAA;gBAAS,GAAG,SAAC9B,KAAauC;oBACjD,IAAI,MAAKnB,IAAI,IAAIb,WAAW;oBAC5B,IAAIP,KAAK,OAAOe,GAAGf;oBACnB,IAAI,CAACuC,QAAQ,OAAOxB,GAAG,IAAIyB,MAAM;oBAEjChC,gBAAgB+B,OAAO5C,MAAM;oBAE7B,uCAAuC;oBACvC,MAAKE,IAAI,CAAC4B,eAAe,CAAC;wBACxBT,WAAE,CAACU,SAAS,CAACa,OAAOjB,EAAE;oBACxB;oBAEA,iCAAiC;oBACjC,MAAKzB,IAAI,CAAC4B,eAAe,CAAC;wBACxB,IAAI;4BACFgB,IAAAA,sBAAM,EAACF,OAAOT,QAAQ;wBACxB,EAAE,OAAOY,IAAI;wBACX,UAAU,GACZ;oBACF;oBAEA3B;gBACF;YACF;QACF;QAEA,2BAA2B;QAC3BV,MAAMS,KAAK,CAAC,SAACC;YACX,IAAI,MAAKK,IAAI,IAAIb,WAAW;YAC5B,IAAI,CAACC,eAAe,OAAOO,GAAG,IAAIyB,MAAM;YAExC,IAAI;gBACF,IAAMpD,SAAS,IAAIuD,gCAAc,CAACnC;gBAClCpB,OAAOwD,KAAK;gBACZ,MAAKC,SAAS,GAAG,IAAI1D,cAAcC;gBACnC2B;YACF,EAAE,OAAO+B,UAAU;gBACjB/B,GAAG+B;YACL;QACF;QAEA,mBAAmB;QACnBzC,MAAM0C,KAAK,CAAC,SAAC/C;YACX,MAAKU,UAAU,CAACsC,MAAM,CAACvC;YACvB,IAAI,MAAKW,IAAI,IAAIb,WAAW;YAC5BP,MAAM,MAAKG,GAAG,CAACH,OAAO,MAAKW,IAAI,CAACsC,oBAAS;QAC3C;;;iBAjGiB/D;IAoGnBiB,OAAAA,GASC,GATDA,SAAAA,IAAIH,GAAW;QACb,IAAI,IAAI,CAACH,IAAI,EAAE;YACb,IAAMA,OAAO,IAAI,CAACA,IAAI;YACtB,IAAI,CAACA,IAAI,GAAG,MAAM,8CAA8C;YAChEA,KAAKG,GAAG,GAAGA;YACXH,KAAKqD,OAAO;QACd;QACA,yDAAyD;QACzD,IAAI,CAACL,SAAS,GAAG;IACnB;IAEA;;;;;;GAMC,GACDM,OAAAA,SAmBC,GAnBDA,SAAAA;QACE,IAAI,CAAC,IAAI,CAACN,SAAS,EAAE,OAAO;QAC5B,IAAMzD,SAAS,AAAC,IAAI,CAACyD,SAAS,CAA0BnD,SAAS;QACjE,IAAI,CAACN,QAAQ,OAAO;QAEpB,IAAME,UAAUF,OAAOG,UAAU;QACjC,IAAM6D,iBAA6C,CAAC;QAEpD,IAAK,IAAIC,IAAI,GAAGA,IAAI/D,QAAQG,MAAM,EAAE4D,IAAK;YACvC,IAAMC,cAAchE,OAAO,CAAC+D,EAAE,CAACE,YAAY;YAC3C,IAAID,eAAe,KAAKF,cAAc,CAACE,YAAY,KAAKE,WAAW;gBACjEJ,cAAc,CAACE,YAAY,GAAGlE,OAAOqE,eAAe,CAACH;gBACrD,IAAIF,cAAc,CAACE,YAAY,EAAE;oBAC/B,OAAO;gBACT;YACF;QACF;QAEA,OAAO;IACT;IAEA;;;;;;;;;;;GAWC,GACDI,OAAAA,iBAuBC,GAvBDA,SAAAA;QACE,IAAI,CAAC,IAAI,CAACb,SAAS,EAAE,OAAO,EAAE;QAC9B,IAAMzD,SAAS,AAAC,IAAI,CAACyD,SAAS,CAA0BnD,SAAS;QACjE,IAAI,CAACN,QAAQ,OAAO,EAAE;QAEtB,IAAME,UAAUF,OAAOG,UAAU;QAEjC,6CAA6C;QAC7C,IAAMoE,SAA0B,EAAE;QAClC,IAAK,IAAIN,IAAI,GAAGA,IAAI/D,QAAQG,MAAM,EAAE4D,IAAK;YACvCM,OAAOhD,IAAI,CAACrB,OAAO,CAAC+D,EAAE;QACxB;QAEAM,OAAOC,IAAI,CAAC,SAACC,GAAGC;YACd,wBAAwB;YACxB,IAAID,EAAEN,YAAY,KAAKO,EAAEP,YAAY,EAAE;gBACrC,OAAOM,EAAEN,YAAY,GAAGO,EAAEP,YAAY;YACxC;YACA,qCAAqC;YACrC,OAAOM,EAAEE,oBAAoB,GAAGD,EAAEC,oBAAoB;QACxD;QAEA,OAAOJ;IACT;WA9KmBzE;EAAyBe,4BAAY"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/SevenZipIterator.ts"],"sourcesContent":["import BaseIterator, { Lock } from 'extract-base-iterator';\nimport fs from 'fs';\nimport { rmSync } from 'fs-remove-compat';\nimport path from 'path';\nimport Queue from 'queue-cb';\nimport shortHash from 'short-hash';\nimport tempSuffix from 'temp-suffix';\nimport { tmpdir } from './compat.ts';\nimport streamToSource, { type SourceResult } from './lib/streamToSource.ts';\nimport nextEntry from './nextEntry.ts';\nimport { setPassword } from './sevenz/codecs/index.ts';\nimport { type ArchiveSource, FileSource, type SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\n\nimport type { Entry, ExtractOptions } from './types.ts';\n\n/**\n * Internal iterator interface for SevenZipParser entries\n * @internal\n */\ninterface SevenZipFileIterator {\n next(): SevenZipEntry | null;\n getParser(): SevenZipParser;\n}\n\n/**\n * Iterator wrapper around SevenZipParser entries\n */\nclass EntryIterator implements SevenZipFileIterator {\n private parser: SevenZipParser;\n private entries: SevenZipEntry[];\n private index = 0;\n\n constructor(parser: SevenZipParser) {\n this.parser = parser;\n this.entries = parser.getEntries();\n }\n\n next(): SevenZipEntry | null {\n if (this.index >= this.entries.length) {\n return null;\n }\n return this.entries[this.index++];\n }\n\n getParser(): SevenZipParser {\n return this.parser;\n }\n}\n\nexport default class SevenZipIterator extends BaseIterator<Entry> {\n lock: Lock | null;\n /** @internal - Do not use directly */\n _iterator: unknown;\n\n constructor(source: string | NodeJS.ReadableStream, options: ExtractOptions = {}) {\n super(options);\n this.lock = new Lock();\n this.lock.onDestroy = (err) => BaseIterator.prototype.end.call(this, err);\n const queue = new Queue(1);\n let cancelled = false;\n let archiveSource: ArchiveSource | null = null;\n const setup = (): void => {\n cancelled = true;\n };\n this.processing.push(setup);\n\n // Set password (or clear if not provided)\n setPassword(options.password || null);\n\n if (typeof source === 'string') {\n // File path input - use FileSource directly\n queue.defer((cb: (err?: Error) => void) => {\n fs.stat(source, (statErr, stats) => {\n if (this.done || cancelled) return;\n if (statErr) return cb(statErr);\n\n fs.open(source, 'r', (err, fd) => {\n if (this.done || cancelled) return;\n if (err) return cb(err);\n\n archiveSource = new FileSource(fd, stats.size);\n // Register cleanup for file descriptor\n this.lock.registerCleanup(() => {\n fs.closeSync(fd);\n });\n cb();\n });\n });\n });\n } else {\n // Stream input - write to temp file for random access\n // Register cleanup for source stream\n const stream = source as NodeJS.ReadableStream;\n this.lock.registerCleanup(() => {\n const s = stream as NodeJS.ReadableStream & { destroy?: () => void };\n if (typeof s.destroy === 'function') s.destroy();\n });\n\n const tempPath = path.join(tmpdir(), '7z-iterator', shortHash(process.cwd()), tempSuffix('tmp.7z'));\n queue.defer((cb: (err?: Error) => void) => {\n streamToSource(source, { tempPath }, (err?: Error, result?: SourceResult) => {\n if (this.done || cancelled) return;\n if (err) return cb(err);\n if (!result) return cb(new Error('No result from streamToSource'));\n\n archiveSource = result.source;\n\n // Register cleanup for file descriptor\n this.lock.registerCleanup(() => {\n fs.closeSync(result.fd);\n });\n\n // Register cleanup for temp file\n this.lock.registerCleanup(() => {\n try {\n rmSync(result.tempPath);\n } catch (_e) {\n /* ignore */\n }\n });\n\n cb();\n });\n });\n }\n\n // Parse and build iterator\n queue.defer((cb: (err?: Error) => void) => {\n if (this.done || cancelled) return;\n if (!archiveSource) return cb(new Error('No archive source'));\n\n const parser = new SevenZipParser(archiveSource);\n parser.parse((parseErr) => {\n if (parseErr) {\n cb(parseErr);\n return;\n }\n try {\n this._iterator = new EntryIterator(parser);\n cb();\n } catch (err) {\n cb(err as Error);\n }\n });\n });\n\n // start processing\n queue.await((err?: Error) => {\n this.processing.remove(setup);\n if (this.done || cancelled) return;\n err ? this.end(err) : this.push(nextEntry);\n });\n }\n\n end(err?: Error) {\n if (this.lock) {\n const lock = this.lock;\n this.lock = null; // Clear before release to prevent re-entrancy\n lock.err = err;\n lock.release();\n }\n // Don't call base end here - Lock.__destroy() handles it\n this._iterator = null;\n }\n\n /**\n * Check if streaming extraction is available for any folder in this archive.\n * Streaming is possible when folders use codecs like BZip2, Deflate, or Copy\n * that can decompress incrementally without buffering the entire input.\n *\n * @returns true if at least one folder supports streaming\n */\n canStream(): boolean {\n if (!this._iterator) return false;\n const parser = (this._iterator as SevenZipFileIterator).getParser();\n if (!parser) return false;\n\n const entries = parser.getEntries();\n const checkedFolders: { [key: number]: boolean } = {};\n\n for (let i = 0; i < entries.length; i++) {\n const folderIndex = entries[i]._folderIndex;\n if (folderIndex >= 0 && checkedFolders[folderIndex] === undefined) {\n checkedFolders[folderIndex] = parser.canStreamFolder(folderIndex);\n if (checkedFolders[folderIndex]) {\n return true;\n }\n }\n }\n\n return false;\n }\n\n /**\n * Get entries sorted for optimal streaming extraction.\n *\n * Entries are sorted by:\n * 1. Folder index (process one folder at a time)\n * 2. Stream index within folder (for solid block streaming)\n *\n * This ordering allows multi-file solid folders to stream with\n * O(largest file) memory instead of O(folder size).\n *\n * @returns Array of entries in streaming order\n */\n getStreamingOrder(): SevenZipEntry[] {\n if (!this._iterator) return [];\n const parser = (this._iterator as SevenZipFileIterator).getParser();\n if (!parser) return [];\n\n const entries = parser.getEntries();\n\n // Create a copy and sort for streaming order\n const sorted: SevenZipEntry[] = [];\n for (let i = 0; i < entries.length; i++) {\n sorted.push(entries[i]);\n }\n\n sorted.sort((a, b) => {\n // First by folder index\n if (a._folderIndex !== b._folderIndex) {\n return a._folderIndex - b._folderIndex;\n }\n // Then by stream index within folder\n return a._streamIndexInFolder - b._streamIndexInFolder;\n });\n\n return sorted;\n }\n}\n"],"names":["SevenZipIterator","EntryIterator","parser","index","entries","getEntries","next","length","getParser","source","options","lock","Lock","onDestroy","err","BaseIterator","prototype","end","call","queue","Queue","cancelled","archiveSource","setup","processing","push","setPassword","password","defer","cb","fs","stat","statErr","stats","done","open","fd","FileSource","size","registerCleanup","closeSync","stream","s","destroy","tempPath","path","join","tmpdir","shortHash","process","cwd","tempSuffix","streamToSource","result","Error","rmSync","_e","SevenZipParser","parse","parseErr","_iterator","await","remove","nextEntry","release","canStream","checkedFolders","i","folderIndex","_folderIndex","undefined","canStreamFolder","getStreamingOrder","sorted","sort","a","b","_streamIndexInFolder"],"mappings":";;;;;;;eAiDqBA;;;2EAjDc;yDACpB;8BACQ;2DACN;8DACC;gEACI;iEACC;wBACA;uEAC2B;kEAC5B;uBACM;gCACuD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAanF;;CAEC,GACD,IAAA,AAAMC,8BAAN;;aAAMA,cAKQC,MAAsB;gCAL9BD;aAGIE,QAAQ;QAGd,IAAI,CAACD,MAAM,GAAGA;QACd,IAAI,CAACE,OAAO,GAAGF,OAAOG,UAAU;;iBAP9BJ;IAUJK,OAAAA,IAKC,GALDA,SAAAA;QACE,IAAI,IAAI,CAACH,KAAK,IAAI,IAAI,CAACC,OAAO,CAACG,MAAM,EAAE;YACrC,OAAO;QACT;QACA,OAAO,IAAI,CAACH,OAAO,CAAC,IAAI,CAACD,KAAK,GAAG;IACnC;IAEAK,OAAAA,SAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACN,MAAM;IACpB;WAnBID;;AAsBS,IAAA,AAAMD,iCAAN;;cAAMA;aAAAA,iBAKPS,MAAsC;YAAEC,UAAAA,iEAA0B,CAAC;gCAL5DV;;gBAMjB,kBANiBA;YAMXU;;QACN,MAAKC,IAAI,GAAG,IAAIC,yBAAI;QACpB,MAAKD,IAAI,CAACE,SAAS,GAAG,SAACC;mBAAQC,4BAAY,CAACC,SAAS,CAACC,GAAG,CAACC,IAAI,QAAOJ;;QACrE,IAAMK,QAAQ,IAAIC,gBAAK,CAAC;QACxB,IAAIC,YAAY;QAChB,IAAIC,gBAAsC;QAC1C,IAAMC,QAAQ;YACZF,YAAY;QACd;QACA,MAAKG,UAAU,CAACC,IAAI,CAACF;QAErB,0CAA0C;QAC1CG,IAAAA,oBAAW,EAAChB,QAAQiB,QAAQ,IAAI;QAEhC,IAAI,OAAOlB,WAAW,UAAU;YAC9B,4CAA4C;YAC5CU,MAAMS,KAAK,CAAC,SAACC;gBACXC,WAAE,CAACC,IAAI,CAACtB,QAAQ,SAACuB,SAASC;oBACxB,IAAI,MAAKC,IAAI,IAAIb,WAAW;oBAC5B,IAAIW,SAAS,OAAOH,GAAGG;oBAEvBF,WAAE,CAACK,IAAI,CAAC1B,QAAQ,KAAK,SAACK,KAAKsB;wBACzB,IAAI,MAAKF,IAAI,IAAIb,WAAW;wBAC5B,IAAIP,KAAK,OAAOe,GAAGf;wBAEnBQ,gBAAgB,IAAIe,4BAAU,CAACD,IAAIH,MAAMK,IAAI;wBAC7C,uCAAuC;wBACvC,MAAK3B,IAAI,CAAC4B,eAAe,CAAC;4BACxBT,WAAE,CAACU,SAAS,CAACJ;wBACf;wBACAP;oBACF;gBACF;YACF;QACF,OAAO;YACL,sDAAsD;YACtD,qCAAqC;YACrC,IAAMY,SAAShC;YACf,MAAKE,IAAI,CAAC4B,eAAe,CAAC;gBACxB,IAAMG,IAAID;gBACV,IAAI,OAAOC,EAAEC,OAAO,KAAK,YAAYD,EAAEC,OAAO;YAChD;YAEA,IAAMC,WAAWC,aAAI,CAACC,IAAI,CAACC,IAAAA,gBAAM,KAAI,eAAeC,IAAAA,kBAAS,EAACC,QAAQC,GAAG,KAAKC,IAAAA,mBAAU,EAAC;YACzFhC,MAAMS,KAAK,CAAC,SAACC;gBACXuB,IAAAA,yBAAc,EAAC3C,QAAQ;oBAAEmC,UAAAA;gBAAS,GAAG,SAAC9B,KAAauC;oBACjD,IAAI,MAAKnB,IAAI,IAAIb,WAAW;oBAC5B,IAAIP,KAAK,OAAOe,GAAGf;oBACnB,IAAI,CAACuC,QAAQ,OAAOxB,GAAG,IAAIyB,MAAM;oBAEjChC,gBAAgB+B,OAAO5C,MAAM;oBAE7B,uCAAuC;oBACvC,MAAKE,IAAI,CAAC4B,eAAe,CAAC;wBACxBT,WAAE,CAACU,SAAS,CAACa,OAAOjB,EAAE;oBACxB;oBAEA,iCAAiC;oBACjC,MAAKzB,IAAI,CAAC4B,eAAe,CAAC;wBACxB,IAAI;4BACFgB,IAAAA,sBAAM,EAACF,OAAOT,QAAQ;wBACxB,EAAE,OAAOY,IAAI;wBACX,UAAU,GACZ;oBACF;oBAEA3B;gBACF;YACF;QACF;QAEA,2BAA2B;QAC3BV,MAAMS,KAAK,CAAC,SAACC;YACX,IAAI,MAAKK,IAAI,IAAIb,WAAW;YAC5B,IAAI,CAACC,eAAe,OAAOO,GAAG,IAAIyB,MAAM;YAExC,IAAMpD,SAAS,IAAIuD,gCAAc,CAACnC;YAClCpB,OAAOwD,KAAK,CAAC,SAACC;gBACZ,IAAIA,UAAU;oBACZ9B,GAAG8B;oBACH;gBACF;gBACA,IAAI;oBACF,MAAKC,SAAS,GAAG,IAAI3D,cAAcC;oBACnC2B;gBACF,EAAE,OAAOf,KAAK;oBACZe,GAAGf;gBACL;YACF;QACF;QAEA,mBAAmB;QACnBK,MAAM0C,KAAK,CAAC,SAAC/C;YACX,MAAKU,UAAU,CAACsC,MAAM,CAACvC;YACvB,IAAI,MAAKW,IAAI,IAAIb,WAAW;YAC5BP,MAAM,MAAKG,GAAG,CAACH,OAAO,MAAKW,IAAI,CAACsC,oBAAS;QAC3C;;;iBAtGiB/D;IAyGnBiB,OAAAA,GASC,GATDA,SAAAA,IAAIH,GAAW;QACb,IAAI,IAAI,CAACH,IAAI,EAAE;YACb,IAAMA,OAAO,IAAI,CAACA,IAAI;YACtB,IAAI,CAACA,IAAI,GAAG,MAAM,8CAA8C;YAChEA,KAAKG,GAAG,GAAGA;YACXH,KAAKqD,OAAO;QACd;QACA,yDAAyD;QACzD,IAAI,CAACJ,SAAS,GAAG;IACnB;IAEA;;;;;;GAMC,GACDK,OAAAA,SAmBC,GAnBDA,SAAAA;QACE,IAAI,CAAC,IAAI,CAACL,SAAS,EAAE,OAAO;QAC5B,IAAM1D,SAAS,AAAC,IAAI,CAAC0D,SAAS,CAA0BpD,SAAS;QACjE,IAAI,CAACN,QAAQ,OAAO;QAEpB,IAAME,UAAUF,OAAOG,UAAU;QACjC,IAAM6D,iBAA6C,CAAC;QAEpD,IAAK,IAAIC,IAAI,GAAGA,IAAI/D,QAAQG,MAAM,EAAE4D,IAAK;YACvC,IAAMC,cAAchE,OAAO,CAAC+D,EAAE,CAACE,YAAY;YAC3C,IAAID,eAAe,KAAKF,cAAc,CAACE,YAAY,KAAKE,WAAW;gBACjEJ,cAAc,CAACE,YAAY,GAAGlE,OAAOqE,eAAe,CAACH;gBACrD,IAAIF,cAAc,CAACE,YAAY,EAAE;oBAC/B,OAAO;gBACT;YACF;QACF;QAEA,OAAO;IACT;IAEA;;;;;;;;;;;GAWC,GACDI,OAAAA,iBAuBC,GAvBDA,SAAAA;QACE,IAAI,CAAC,IAAI,CAACZ,SAAS,EAAE,OAAO,EAAE;QAC9B,IAAM1D,SAAS,AAAC,IAAI,CAAC0D,SAAS,CAA0BpD,SAAS;QACjE,IAAI,CAACN,QAAQ,OAAO,EAAE;QAEtB,IAAME,UAAUF,OAAOG,UAAU;QAEjC,6CAA6C;QAC7C,IAAMoE,SAA0B,EAAE;QAClC,IAAK,IAAIN,IAAI,GAAGA,IAAI/D,QAAQG,MAAM,EAAE4D,IAAK;YACvCM,OAAOhD,IAAI,CAACrB,OAAO,CAAC+D,EAAE;QACxB;QAEAM,OAAOC,IAAI,CAAC,SAACC,GAAGC;YACd,wBAAwB;YACxB,IAAID,EAAEN,YAAY,KAAKO,EAAEP,YAAY,EAAE;gBACrC,OAAOM,EAAEN,YAAY,GAAGO,EAAEP,YAAY;YACxC;YACA,qCAAqC;YACrC,OAAOM,EAAEE,oBAAoB,GAAGD,EAAEC,oBAAoB;QACxD;QAEA,OAAOJ;IACT;WAnLmBzE;EAAyBe,4BAAY"}
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/compat.ts"],"sourcesContent":["/**\n * Compatibility Layer for Node.js 0.8+\n * Local to this package - contains only needed functions.\n */\nimport os from 'os';\n\nexport function tmpdir(): string {\n return typeof os.tmpdir === 'function' ? os.tmpdir() : require('os-shim').tmpdir();\n}\n"],"names":["tmpdir","os","require"],"mappings":"AAAA;;;CAGC;;;;+BAGeA;;;eAAAA;;;yDAFD;;;;;;AAER,SAASA;IACd,OAAO,OAAOC,WAAE,CAACD,MAAM,KAAK,aAAaC,WAAE,CAACD,MAAM,KAAKE,QAAQ,WAAWF,MAAM;AAClF"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/compat.ts"],"sourcesContent":["/**\n * Compatibility Layer for Node.js 0.8+\n * Local to this package - contains only needed functions.\n */\nimport os from 'os';\n\nexport function tmpdir(): string {\n return typeof os.tmpdir === 'function' ? os.tmpdir() : require('os-shim').tmpdir();\n}\n"],"names":["tmpdir","os","require"],"mappings":"AAAA;;;CAGC;;;;+BAGeA;;;eAAAA;;;yDAFD;;;;;;AAER,SAASA;IACd,OAAO,OAAOC,WAAE,CAACD,MAAM,KAAK,aAAaC,WAAE,CAACD,MAAM,KAAKE,QAAQ,WAAWF,MAAM;AAClF"}
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/index.ts"],"sourcesContent":["// 7z-iterator - high-level 7z archive iterator\n\nexport * from './FileEntry.ts';\nexport * from './nextEntry.ts';\nexport { default } from './SevenZipIterator.ts';\nexport * from './sevenz/index.ts';\nexport * from './types.ts';\n"],"names":["default"],"mappings":"AAAA,+CAA+C;;;;;+BAItCA;;;eAAAA,2BAAO;;;qBAFF;qBACA;yEACU;qBACV;qBACA"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/index.ts"],"sourcesContent":["// 7z-iterator - high-level 7z archive iterator\n\nexport * from './FileEntry.ts';\nexport * from './nextEntry.ts';\nexport { default } from './SevenZipIterator.ts';\nexport * from './sevenz/index.ts';\nexport * from './types.ts';\n"],"names":["default"],"mappings":"AAAA,+CAA+C;;;;;+BAItCA;;;eAAAA,2BAAO;;;qBAFF;qBACA;yEACU;qBACV;qBACA"}
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/lib/defer.ts"],"sourcesContent":["// Yield to I/O: runs after pending I/O callbacks complete\n// setImmediate (Node 0.10+) or setTimeout fallback (Node 0.8)\n// Use this when other code may have scheduled I/O that must run first\n// For \"avoid Zalgo\" (just need async), use process.nextTick instead\nexport const defer: (fn: () => void) => void = typeof setImmediate !== 'undefined' ? setImmediate : (fn) => setTimeout(fn, 0);\n"],"names":["defer","setImmediate","fn","setTimeout"],"mappings":"AAAA,0DAA0D;AAC1D,8DAA8D;AAC9D,sEAAsE;AACtE,oEAAoE;;;;;+BACvDA;;;eAAAA;;;AAAN,IAAMA,QAAkC,OAAOC,iBAAiB,cAAcA,eAAe,SAACC;WAAOC,WAAWD,IAAI"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/lib/defer.ts"],"sourcesContent":["// Yield to I/O: runs after pending I/O callbacks complete\n// setImmediate (Node 0.10+) or setTimeout fallback (Node 0.8)\n// Use this when other code may have scheduled I/O that must run first\n// For \"avoid Zalgo\" (just need async), use process.nextTick instead\nexport const defer: (fn: () => void) => void = typeof setImmediate !== 'undefined' ? setImmediate : (fn) => setTimeout(fn, 0);\n"],"names":["defer","setImmediate","fn","setTimeout"],"mappings":"AAAA,0DAA0D;AAC1D,8DAA8D;AAC9D,sEAAsE;AACtE,oEAAoE;;;;;+BACvDA;;;eAAAA;;;AAAN,IAAMA,QAAkC,OAAOC,iBAAiB,cAAcA,eAAe,SAACC;WAAOC,WAAWD,IAAI"}
@@ -0,0 +1,5 @@
1
+ export type DecodeCallback<T = Buffer> = (error: Error | null, result?: T) => void;
2
+ type Executor<T> = (callback: DecodeCallback<T>) => void;
3
+ export declare function runDecode<T>(executor: Executor<T>, callback?: DecodeCallback<T>): Promise<T> | void;
4
+ export declare function runSync<T>(fn: () => T, callback: DecodeCallback<T>): void;
5
+ export {};
@@ -0,0 +1,5 @@
1
+ export type DecodeCallback<T = Buffer> = (error: Error | null, result?: T) => void;
2
+ type Executor<T> = (callback: DecodeCallback<T>) => void;
3
+ export declare function runDecode<T>(executor: Executor<T>, callback?: DecodeCallback<T>): Promise<T> | void;
4
+ export declare function runSync<T>(fn: () => T, callback: DecodeCallback<T>): void;
5
+ export {};
@@ -0,0 +1,55 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ function _export(target, all) {
6
+ for(var name in all)Object.defineProperty(target, name, {
7
+ enumerable: true,
8
+ get: Object.getOwnPropertyDescriptor(all, name).get
9
+ });
10
+ }
11
+ _export(exports, {
12
+ get runDecode () {
13
+ return runDecode;
14
+ },
15
+ get runSync () {
16
+ return runSync;
17
+ }
18
+ });
19
+ var _calloncefn = /*#__PURE__*/ _interop_require_default(require("call-once-fn"));
20
+ function _interop_require_default(obj) {
21
+ return obj && obj.__esModule ? obj : {
22
+ default: obj
23
+ };
24
+ }
25
+ var schedule = typeof setImmediate === 'function' ? setImmediate : function(fn) {
26
+ return process.nextTick(fn);
27
+ };
28
+ function runDecode(executor, callback) {
29
+ if (typeof callback === 'function') {
30
+ executor((0, _calloncefn.default)(callback));
31
+ return;
32
+ }
33
+ if (typeof Promise === 'undefined') {
34
+ throw new Error('Promises are not available in this runtime. Please provide a callback.');
35
+ }
36
+ return new Promise(function(resolve, reject) {
37
+ executor((0, _calloncefn.default)(function(err, value) {
38
+ if (err) {
39
+ reject(err);
40
+ return;
41
+ }
42
+ resolve(value);
43
+ }));
44
+ });
45
+ }
46
+ function runSync(fn, callback) {
47
+ schedule(function() {
48
+ try {
49
+ callback(null, fn());
50
+ } catch (err) {
51
+ callback(err);
52
+ }
53
+ });
54
+ }
55
+ /* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/lib/runDecode.ts"],"sourcesContent":["import once from 'call-once-fn';\n\nexport type DecodeCallback<T = Buffer> = (error: Error | null, result?: T) => void;\n\ntype Executor<T> = (callback: DecodeCallback<T>) => void;\n\nconst schedule = typeof setImmediate === 'function' ? setImmediate : (fn: () => void) => process.nextTick(fn);\n\nexport function runDecode<T>(executor: Executor<T>, callback?: DecodeCallback<T>): Promise<T> | void {\n if (typeof callback === 'function') {\n executor(once(callback));\n return;\n }\n\n if (typeof Promise === 'undefined') {\n throw new Error('Promises are not available in this runtime. Please provide a callback.');\n }\n\n return new Promise<T>((resolve, reject) => {\n executor(\n once((err, value) => {\n if (err) {\n reject(err);\n return;\n }\n resolve(value as T);\n })\n );\n });\n}\n\nexport function runSync<T>(fn: () => T, callback: DecodeCallback<T>): void {\n schedule(() => {\n try {\n callback(null, fn());\n } catch (err) {\n callback(err as Error);\n }\n });\n}\n"],"names":["runDecode","runSync","schedule","setImmediate","fn","process","nextTick","executor","callback","once","Promise","Error","resolve","reject","err","value"],"mappings":";;;;;;;;;;;QAQgBA;eAAAA;;QAuBAC;eAAAA;;;iEA/BC;;;;;;AAMjB,IAAMC,WAAW,OAAOC,iBAAiB,aAAaA,eAAe,SAACC;WAAmBC,QAAQC,QAAQ,CAACF;;AAEnG,SAASJ,UAAaO,QAAqB,EAAEC,QAA4B;IAC9E,IAAI,OAAOA,aAAa,YAAY;QAClCD,SAASE,IAAAA,mBAAI,EAACD;QACd;IACF;IAEA,IAAI,OAAOE,YAAY,aAAa;QAClC,MAAM,IAAIC,MAAM;IAClB;IAEA,OAAO,IAAID,QAAW,SAACE,SAASC;QAC9BN,SACEE,IAAAA,mBAAI,EAAC,SAACK,KAAKC;YACT,IAAID,KAAK;gBACPD,OAAOC;gBACP;YACF;YACAF,QAAQG;QACV;IAEJ;AACF;AAEO,SAASd,QAAWG,EAAW,EAAEI,QAA2B;IACjEN,SAAS;QACP,IAAI;YACFM,SAAS,MAAMJ;QACjB,EAAE,OAAOU,KAAK;YACZN,SAASM;QACX;IACF;AACF"}
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/lib/streamToSource.ts"],"sourcesContent":["// Stream to source conversion: writes stream to temp file for random access\nimport once from 'call-once-fn';\nimport { bufferFrom } from 'extract-base-iterator';\nimport fs from 'fs';\nimport mkdirp from 'mkdirp-classic';\nimport oo from 'on-one';\nimport path from 'path';\nimport { FileSource } from '../sevenz/SevenZipParser.ts';\n\nexport interface StreamToSourceOptions {\n tempPath: string;\n}\n\nexport interface SourceResult {\n source: FileSource;\n fd: number; // Caller must close\n tempPath: string; // Caller must clean up\n}\n\nexport type Callback = (error?: Error, result?: SourceResult) => void;\n\n/**\n * Convert a stream to a FileSource by writing to temp file\n *\n * 7z format requires random access for header parsing, so temp file is necessary for streams.\n * Writes directly to temp file for predictable O(1) memory usage during stream consumption.\n */\nexport default function streamToSource(stream: NodeJS.ReadableStream, options: StreamToSourceOptions, callback: Callback): void {\n const tempPath = options.tempPath;\n\n const end = once(callback);\n\n mkdirp.sync(path.dirname(tempPath));\n const writeStream = fs.createWriteStream(tempPath);\n\n function onData(chunk: Buffer | string): void {\n const buf = typeof chunk === 'string' ? bufferFrom(chunk) : chunk;\n writeStream.write(buf);\n }\n\n function onEnd(): void {\n writeStream.end(() => {\n fs.open(tempPath, 'r', (err, fd) => {\n if (err) return end(err);\n fs.stat(tempPath, (statErr, stats) => {\n if (statErr) {\n fs.closeSync(fd);\n return end(statErr);\n }\n end(null, {\n source: new FileSource(fd, stats.size),\n fd: fd,\n tempPath: tempPath,\n });\n });\n });\n });\n }\n\n function onError(err: Error): void {\n writeStream.end();\n end(err);\n }\n\n stream.on('data', onData);\n oo(stream, ['error'], onError);\n oo(stream, ['end', 'close', 'finish'], onEnd);\n}\n"],"names":["streamToSource","stream","options","callback","tempPath","end","once","mkdirp","sync","path","dirname","writeStream","fs","createWriteStream","onData","chunk","buf","bufferFrom","write","onEnd","open","err","fd","stat","statErr","stats","closeSync","source","FileSource","size","onError","on","oo"],"mappings":"AAAA,4EAA4E;;;;;+BAqB5E;;;;;CAKC,GACD;;;eAAwBA;;;iEA1BP;mCACU;yDACZ;oEACI;4DACJ;2DACE;gCACU;;;;;;AAoBZ,SAASA,eAAeC,MAA6B,EAAEC,OAA8B,EAAEC,QAAkB;IACtH,IAAMC,WAAWF,QAAQE,QAAQ;IAEjC,IAAMC,MAAMC,IAAAA,mBAAI,EAACH;IAEjBI,sBAAM,CAACC,IAAI,CAACC,aAAI,CAACC,OAAO,CAACN;IACzB,IAAMO,cAAcC,WAAE,CAACC,iBAAiB,CAACT;IAEzC,SAASU,OAAOC,KAAsB;QACpC,IAAMC,MAAM,OAAOD,UAAU,WAAWE,IAAAA,+BAAU,EAACF,SAASA;QAC5DJ,YAAYO,KAAK,CAACF;IACpB;IAEA,SAASG;QACPR,YAAYN,GAAG,CAAC;YACdO,WAAE,CAACQ,IAAI,CAAChB,UAAU,KAAK,SAACiB,KAAKC;gBAC3B,IAAID,KAAK,OAAOhB,IAAIgB;gBACpBT,WAAE,CAACW,IAAI,CAACnB,UAAU,SAACoB,SAASC;oBAC1B,IAAID,SAAS;wBACXZ,WAAE,CAACc,SAAS,CAACJ;wBACb,OAAOjB,IAAImB;oBACb;oBACAnB,IAAI,MAAM;wBACRsB,QAAQ,IAAIC,4BAAU,CAACN,IAAIG,MAAMI,IAAI;wBACrCP,IAAIA;wBACJlB,UAAUA;oBACZ;gBACF;YACF;QACF;IACF;IAEA,SAAS0B,QAAQT,GAAU;QACzBV,YAAYN,GAAG;QACfA,IAAIgB;IACN;IAEApB,OAAO8B,EAAE,CAAC,QAAQjB;IAClBkB,IAAAA,cAAE,EAAC/B,QAAQ;QAAC;KAAQ,EAAE6B;IACtBE,IAAAA,cAAE,EAAC/B,QAAQ;QAAC;QAAO;QAAS;KAAS,EAAEkB;AACzC"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/lib/streamToSource.ts"],"sourcesContent":["// Stream to source conversion: writes stream to temp file for random access\nimport once from 'call-once-fn';\nimport { bufferFrom } from 'extract-base-iterator';\nimport fs from 'fs';\nimport mkdirp from 'mkdirp-classic';\nimport oo from 'on-one';\nimport path from 'path';\nimport { FileSource } from '../sevenz/SevenZipParser.ts';\n\nexport interface StreamToSourceOptions {\n tempPath: string;\n}\n\nexport interface SourceResult {\n source: FileSource;\n fd: number; // Caller must close\n tempPath: string; // Caller must clean up\n}\n\nexport type Callback = (error?: Error, result?: SourceResult) => void;\n\n/**\n * Convert a stream to a FileSource by writing to temp file\n *\n * 7z format requires random access for header parsing, so temp file is necessary for streams.\n * Writes directly to temp file for predictable O(1) memory usage during stream consumption.\n */\nexport default function streamToSource(stream: NodeJS.ReadableStream, options: StreamToSourceOptions, callback: Callback): void {\n const tempPath = options.tempPath;\n\n const end = once(callback);\n\n mkdirp.sync(path.dirname(tempPath));\n const writeStream = fs.createWriteStream(tempPath);\n\n function onData(chunk: Buffer | string): void {\n const buf = typeof chunk === 'string' ? bufferFrom(chunk) : chunk;\n writeStream.write(buf);\n }\n\n function onEnd(): void {\n writeStream.end(() => {\n fs.open(tempPath, 'r', (err, fd) => {\n if (err) return end(err);\n fs.stat(tempPath, (statErr, stats) => {\n if (statErr) {\n fs.closeSync(fd);\n return end(statErr);\n }\n end(null, {\n source: new FileSource(fd, stats.size),\n fd: fd,\n tempPath: tempPath,\n });\n });\n });\n });\n }\n\n function onError(err: Error): void {\n writeStream.end();\n end(err);\n }\n\n stream.on('data', onData);\n oo(stream, ['error'], onError);\n oo(stream, ['end', 'close', 'finish'], onEnd);\n}\n"],"names":["streamToSource","stream","options","callback","tempPath","end","once","mkdirp","sync","path","dirname","writeStream","fs","createWriteStream","onData","chunk","buf","bufferFrom","write","onEnd","open","err","fd","stat","statErr","stats","closeSync","source","FileSource","size","onError","on","oo"],"mappings":"AAAA,4EAA4E;;;;;+BAqB5E;;;;;CAKC,GACD;;;eAAwBA;;;iEA1BP;mCACU;yDACZ;oEACI;4DACJ;2DACE;gCACU;;;;;;AAoBZ,SAASA,eAAeC,MAA6B,EAAEC,OAA8B,EAAEC,QAAkB;IACtH,IAAMC,WAAWF,QAAQE,QAAQ;IAEjC,IAAMC,MAAMC,IAAAA,mBAAI,EAACH;IAEjBI,sBAAM,CAACC,IAAI,CAACC,aAAI,CAACC,OAAO,CAACN;IACzB,IAAMO,cAAcC,WAAE,CAACC,iBAAiB,CAACT;IAEzC,SAASU,OAAOC,KAAsB;QACpC,IAAMC,MAAM,OAAOD,UAAU,WAAWE,IAAAA,+BAAU,EAACF,SAASA;QAC5DJ,YAAYO,KAAK,CAACF;IACpB;IAEA,SAASG;QACPR,YAAYN,GAAG,CAAC;YACdO,WAAE,CAACQ,IAAI,CAAChB,UAAU,KAAK,SAACiB,KAAKC;gBAC3B,IAAID,KAAK,OAAOhB,IAAIgB;gBACpBT,WAAE,CAACW,IAAI,CAACnB,UAAU,SAACoB,SAASC;oBAC1B,IAAID,SAAS;wBACXZ,WAAE,CAACc,SAAS,CAACJ;wBACb,OAAOjB,IAAImB;oBACb;oBACAnB,IAAI,MAAM;wBACRsB,QAAQ,IAAIC,4BAAU,CAACN,IAAIG,MAAMI,IAAI;wBACrCP,IAAIA;wBACJlB,UAAUA;oBACZ;gBACF;YACF;QACF;IACF;IAEA,SAAS0B,QAAQT,GAAU;QACzBV,YAAYN,GAAG;QACfA,IAAIgB;IACN;IAEApB,OAAO8B,EAAE,CAAC,QAAQjB;IAClBkB,IAAAA,cAAE,EAAC/B,QAAQ;QAAC;KAAQ,EAAE6B;IACtBE,IAAAA,cAAE,EAAC/B,QAAQ;QAAC;QAAO;QAAS;KAAS,EAAEkB;AACzC"}
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/nextEntry.ts"],"sourcesContent":["import once from 'call-once-fn';\nimport { type DirectoryAttributes, DirectoryEntry, type FileAttributes, type LinkAttributes, SymbolicLinkEntry } from 'extract-base-iterator';\nimport path from 'path';\nimport FileEntry from './FileEntry.ts';\nimport type SevenZipIterator from './SevenZipIterator.ts';\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\nimport type { Entry, EntryCallback } from './types.ts';\n\nexport type NextCallback = (error?: Error, entry?: Entry) => void;\n\n/** @internal */\ninterface InternalIterator {\n next(): SevenZipEntry | null;\n getParser(): SevenZipParser;\n}\n\n// Entry attributes object that gets mutated in switch - union of possible shapes\n// mtime is number for FileAttributes compatibility (timestamp in ms)\ntype EntryAttributesBuilder = {\n path: string;\n basename: string;\n mtime: number;\n mode: number;\n type?: 'file' | 'directory';\n size?: number;\n};\n\nexport default function nextEntry<_T>(iterator: SevenZipIterator, callback: EntryCallback): void {\n const internalIter = iterator._iterator as InternalIterator | null;\n if (!internalIter) {\n callback(new Error('iterator missing'));\n return;\n }\n\n let entry: SevenZipEntry | null = null;\n entry = internalIter.next();\n\n const nextCallback = once((err?: Error, entry?: Entry) => {\n // keep processing\n if (entry) iterator.push(nextEntry);\n err ? callback(err) : callback(null, entry ? { done: false, value: entry } : { done: true, value: null });\n }) as NextCallback;\n\n // done: signal iteration is complete (guard against stale lock)\n if (!iterator.lock || iterator.isDone() || !entry) return callback(null, { done: true, value: null });\n\n // Skip anti-files (these mark files to delete in delta archives)\n if (entry.isAntiFile) {\n iterator.push(nextEntry);\n return callback(null, null);\n }\n\n // Determine type from entry\n const type = entry.type;\n\n // Default modes (decimal values for Node 0.8 compatibility)\n // 0o755 = 493, 0o644 = 420\n const defaultMode = type === 'directory' ? 493 : 420;\n\n // Build attributes from 7z entry\n // mtime must be timestamp (number) for FileAttributes compatibility\n const mtimeDate = entry.mtime || new Date();\n const attributes: EntryAttributesBuilder = {\n path: entry.path.split(path.sep).filter(Boolean).join(path.sep),\n basename: entry.name,\n mtime: mtimeDate.getTime(),\n mode: entry.mode !== undefined ? entry.mode : defaultMode,\n };\n\n switch (type) {\n case 'directory':\n attributes.type = 'directory';\n return nextCallback(null, new DirectoryEntry(attributes as DirectoryAttributes));\n\n case 'link': {\n // For symlinks, the file content IS the symlink target path\n // Read the content to get the linkpath for SymbolicLinkEntry\n const parser = internalIter.getParser();\n const stream = parser.getEntryStream(entry);\n\n const chunks: Buffer[] = [];\n\n stream.on('data', (chunk: Buffer) => {\n chunks.push(chunk);\n });\n stream.on('end', () => {\n const linkpath = Buffer.concat(chunks).toString('utf8');\n\n const linkAttributes: LinkAttributes = {\n path: attributes.path,\n mtime: attributes.mtime,\n mode: attributes.mode,\n linkpath: linkpath,\n };\n\n nextCallback(null, new SymbolicLinkEntry(linkAttributes));\n });\n stream.on('error', (streamErr: Error) => {\n nextCallback(streamErr);\n });\n return;\n }\n\n case 'file': {\n attributes.type = 'file';\n attributes.size = entry.size;\n const parser = internalIter.getParser();\n\n const stream = parser.getEntryStream(entry);\n return nextCallback(null, new FileEntry(attributes as FileAttributes, stream, iterator.lock, entry._canStream));\n }\n }\n\n return callback(new Error(`Unrecognized entry type: ${type}`));\n}\n"],"names":["nextEntry","iterator","callback","internalIter","_iterator","Error","entry","next","nextCallback","once","err","push","done","value","lock","isDone","isAntiFile","type","defaultMode","mtimeDate","mtime","Date","attributes","path","split","sep","filter","Boolean","join","basename","name","getTime","mode","undefined","DirectoryEntry","parser","getParser","stream","getEntryStream","chunks","on","chunk","linkpath","Buffer","concat","toString","linkAttributes","SymbolicLinkEntry","streamErr","size","FileEntry","_canStream"],"mappings":";;;;+BA2BA;;;eAAwBA;;;iEA3BP;mCACqG;2DACrG;kEACK;;;;;;AAwBP,SAASA,UAAcC,QAA0B,EAAEC,QAAuB;IACvF,IAAMC,eAAeF,SAASG,SAAS;IACvC,IAAI,CAACD,cAAc;QACjBD,SAAS,IAAIG,MAAM;QACnB;IACF;IAEA,IAAIC,QAA8B;IAClCA,QAAQH,aAAaI,IAAI;IAEzB,IAAMC,eAAeC,IAAAA,mBAAI,EAAC,SAACC,KAAaJ;QACtC,kBAAkB;QAClB,IAAIA,OAAOL,SAASU,IAAI,CAACX;QACzBU,MAAMR,SAASQ,OAAOR,SAAS,MAAMI,QAAQ;YAAEM,MAAM;YAAOC,OAAOP;QAAM,IAAI;YAAEM,MAAM;YAAMC,OAAO;QAAK;IACzG;IAEA,gEAAgE;IAChE,IAAI,CAACZ,SAASa,IAAI,IAAIb,SAASc,MAAM,MAAM,CAACT,OAAO,OAAOJ,SAAS,MAAM;QAAEU,MAAM;QAAMC,OAAO;IAAK;IAEnG,iEAAiE;IACjE,IAAIP,MAAMU,UAAU,EAAE;QACpBf,SAASU,IAAI,CAACX;QACd,OAAOE,SAAS,MAAM;IACxB;IAEA,4BAA4B;IAC5B,IAAMe,OAAOX,MAAMW,IAAI;IAEvB,4DAA4D;IAC5D,2BAA2B;IAC3B,IAAMC,cAAcD,SAAS,cAAc,MAAM;IAEjD,iCAAiC;IACjC,oEAAoE;IACpE,IAAME,YAAYb,MAAMc,KAAK,IAAI,IAAIC;IACrC,IAAMC,aAAqC;QACzCC,MAAMjB,MAAMiB,IAAI,CAACC,KAAK,CAACD,aAAI,CAACE,GAAG,EAAEC,MAAM,CAACC,SAASC,IAAI,CAACL,aAAI,CAACE,GAAG;QAC9DI,UAAUvB,MAAMwB,IAAI;QACpBV,OAAOD,UAAUY,OAAO;QACxBC,MAAM1B,MAAM0B,IAAI,KAAKC,YAAY3B,MAAM0B,IAAI,GAAGd;IAChD;IAEA,OAAQD;QACN,KAAK;YACHK,WAAWL,IAAI,GAAG;YAClB,OAAOT,aAAa,MAAM,IAAI0B,mCAAc,CAACZ;QAE/C,KAAK;YAAQ;gBACX,4DAA4D;gBAC5D,6DAA6D;gBAC7D,IAAMa,SAAShC,aAAaiC,SAAS;gBACrC,IAAMC,SAASF,OAAOG,cAAc,CAAChC;gBAErC,IAAMiC,SAAmB,EAAE;gBAE3BF,OAAOG,EAAE,CAAC,QAAQ,SAACC;oBACjBF,OAAO5B,IAAI,CAAC8B;gBACd;gBACAJ,OAAOG,EAAE,CAAC,OAAO;oBACf,IAAME,WAAWC,OAAOC,MAAM,CAACL,QAAQM,QAAQ,CAAC;oBAEhD,IAAMC,iBAAiC;wBACrCvB,MAAMD,WAAWC,IAAI;wBACrBH,OAAOE,WAAWF,KAAK;wBACvBY,MAAMV,WAAWU,IAAI;wBACrBU,UAAUA;oBACZ;oBAEAlC,aAAa,MAAM,IAAIuC,sCAAiB,CAACD;gBAC3C;gBACAT,OAAOG,EAAE,CAAC,SAAS,SAACQ;oBAClBxC,aAAawC;gBACf;gBACA;YACF;QAEA,KAAK;YAAQ;gBACX1B,WAAWL,IAAI,GAAG;gBAClBK,WAAW2B,IAAI,GAAG3C,MAAM2C,IAAI;gBAC5B,IAAMd,UAAShC,aAAaiC,SAAS;gBAErC,IAAMC,UAASF,QAAOG,cAAc,CAAChC;gBACrC,OAAOE,aAAa,MAAM,IAAI0C,oBAAS,CAAC5B,YAA8Be,SAAQpC,SAASa,IAAI,EAAER,MAAM6C,UAAU;YAC/G;IACF;IAEA,OAAOjD,SAAS,IAAIG,MAAM,AAAC,4BAAgC,OAALY;AACxD"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/nextEntry.ts"],"sourcesContent":["import once from 'call-once-fn';\nimport { type DirectoryAttributes, DirectoryEntry, type FileAttributes, type LinkAttributes, SymbolicLinkEntry } from 'extract-base-iterator';\nimport path from 'path';\nimport FileEntry from './FileEntry.ts';\nimport type SevenZipIterator from './SevenZipIterator.ts';\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\nimport type { Entry, EntryCallback } from './types.ts';\n\nexport type NextCallback = (error?: Error, entry?: Entry) => void;\n\n/** @internal */\ninterface InternalIterator {\n next(): SevenZipEntry | null;\n getParser(): SevenZipParser;\n}\n\n// Entry attributes object that gets mutated in switch - union of possible shapes\n// mtime is number for FileAttributes compatibility (timestamp in ms)\ntype EntryAttributesBuilder = {\n path: string;\n basename: string;\n mtime: number;\n mode: number;\n type?: 'file' | 'directory';\n size?: number;\n};\n\nexport default function nextEntry<_T>(iterator: SevenZipIterator, callback: EntryCallback): void {\n const internalIter = iterator._iterator as InternalIterator | null;\n if (!internalIter) {\n callback(new Error('iterator missing'));\n return;\n }\n\n let entry: SevenZipEntry | null = null;\n entry = internalIter.next();\n\n const nextCallback = once((err?: Error, entry?: Entry) => {\n // keep processing\n if (entry) iterator.push(nextEntry);\n err ? callback(err) : callback(null, entry ? { done: false, value: entry } : { done: true, value: null });\n }) as NextCallback;\n\n // done: signal iteration is complete (guard against stale lock)\n if (!iterator.lock || iterator.isDone() || !entry) return callback(null, { done: true, value: null });\n\n // Skip anti-files (these mark files to delete in delta archives)\n if (entry.isAntiFile) {\n iterator.push(nextEntry);\n return callback(null, null);\n }\n\n // Determine type from entry\n const type = entry.type;\n\n // Default modes (decimal values for Node 0.8 compatibility)\n // 0o755 = 493, 0o644 = 420\n const defaultMode = type === 'directory' ? 493 : 420;\n\n // Build attributes from 7z entry\n // mtime must be timestamp (number) for FileAttributes compatibility\n const mtimeDate = entry.mtime || new Date();\n const attributes: EntryAttributesBuilder = {\n path: entry.path.split(path.sep).filter(Boolean).join(path.sep),\n basename: entry.name,\n mtime: mtimeDate.getTime(),\n mode: entry.mode !== undefined ? entry.mode : defaultMode,\n };\n\n switch (type) {\n case 'directory':\n attributes.type = 'directory';\n return nextCallback(null, new DirectoryEntry(attributes as DirectoryAttributes));\n\n case 'link': {\n // For symlinks, the file content IS the symlink target path\n // Read the content to get the linkpath for SymbolicLinkEntry\n const parser = internalIter.getParser();\n const stream = parser.getEntryStream(entry);\n\n const chunks: Buffer[] = [];\n\n stream.on('data', (chunk: Buffer) => {\n chunks.push(chunk);\n });\n stream.on('end', () => {\n const linkpath = Buffer.concat(chunks).toString('utf8');\n\n const linkAttributes: LinkAttributes = {\n path: attributes.path,\n mtime: attributes.mtime,\n mode: attributes.mode,\n linkpath: linkpath,\n };\n\n nextCallback(null, new SymbolicLinkEntry(linkAttributes));\n });\n stream.on('error', (streamErr: Error) => {\n nextCallback(streamErr);\n });\n return;\n }\n\n case 'file': {\n attributes.type = 'file';\n attributes.size = entry.size;\n const parser = internalIter.getParser();\n\n const stream = parser.getEntryStream(entry);\n return nextCallback(null, new FileEntry(attributes as FileAttributes, stream, iterator.lock, entry._canStream));\n }\n }\n\n return callback(new Error(`Unrecognized entry type: ${type}`));\n}\n"],"names":["nextEntry","iterator","callback","internalIter","_iterator","Error","entry","next","nextCallback","once","err","push","done","value","lock","isDone","isAntiFile","type","defaultMode","mtimeDate","mtime","Date","attributes","path","split","sep","filter","Boolean","join","basename","name","getTime","mode","undefined","DirectoryEntry","parser","getParser","stream","getEntryStream","chunks","on","chunk","linkpath","Buffer","concat","toString","linkAttributes","SymbolicLinkEntry","streamErr","size","FileEntry","_canStream"],"mappings":";;;;+BA2BA;;;eAAwBA;;;iEA3BP;mCACqG;2DACrG;kEACK;;;;;;AAwBP,SAASA,UAAcC,QAA0B,EAAEC,QAAuB;IACvF,IAAMC,eAAeF,SAASG,SAAS;IACvC,IAAI,CAACD,cAAc;QACjBD,SAAS,IAAIG,MAAM;QACnB;IACF;IAEA,IAAIC,QAA8B;IAClCA,QAAQH,aAAaI,IAAI;IAEzB,IAAMC,eAAeC,IAAAA,mBAAI,EAAC,SAACC,KAAaJ;QACtC,kBAAkB;QAClB,IAAIA,OAAOL,SAASU,IAAI,CAACX;QACzBU,MAAMR,SAASQ,OAAOR,SAAS,MAAMI,QAAQ;YAAEM,MAAM;YAAOC,OAAOP;QAAM,IAAI;YAAEM,MAAM;YAAMC,OAAO;QAAK;IACzG;IAEA,gEAAgE;IAChE,IAAI,CAACZ,SAASa,IAAI,IAAIb,SAASc,MAAM,MAAM,CAACT,OAAO,OAAOJ,SAAS,MAAM;QAAEU,MAAM;QAAMC,OAAO;IAAK;IAEnG,iEAAiE;IACjE,IAAIP,MAAMU,UAAU,EAAE;QACpBf,SAASU,IAAI,CAACX;QACd,OAAOE,SAAS,MAAM;IACxB;IAEA,4BAA4B;IAC5B,IAAMe,OAAOX,MAAMW,IAAI;IAEvB,4DAA4D;IAC5D,2BAA2B;IAC3B,IAAMC,cAAcD,SAAS,cAAc,MAAM;IAEjD,iCAAiC;IACjC,oEAAoE;IACpE,IAAME,YAAYb,MAAMc,KAAK,IAAI,IAAIC;IACrC,IAAMC,aAAqC;QACzCC,MAAMjB,MAAMiB,IAAI,CAACC,KAAK,CAACD,aAAI,CAACE,GAAG,EAAEC,MAAM,CAACC,SAASC,IAAI,CAACL,aAAI,CAACE,GAAG;QAC9DI,UAAUvB,MAAMwB,IAAI;QACpBV,OAAOD,UAAUY,OAAO;QACxBC,MAAM1B,MAAM0B,IAAI,KAAKC,YAAY3B,MAAM0B,IAAI,GAAGd;IAChD;IAEA,OAAQD;QACN,KAAK;YACHK,WAAWL,IAAI,GAAG;YAClB,OAAOT,aAAa,MAAM,IAAI0B,mCAAc,CAACZ;QAE/C,KAAK;YAAQ;gBACX,4DAA4D;gBAC5D,6DAA6D;gBAC7D,IAAMa,SAAShC,aAAaiC,SAAS;gBACrC,IAAMC,SAASF,OAAOG,cAAc,CAAChC;gBAErC,IAAMiC,SAAmB,EAAE;gBAE3BF,OAAOG,EAAE,CAAC,QAAQ,SAACC;oBACjBF,OAAO5B,IAAI,CAAC8B;gBACd;gBACAJ,OAAOG,EAAE,CAAC,OAAO;oBACf,IAAME,WAAWC,OAAOC,MAAM,CAACL,QAAQM,QAAQ,CAAC;oBAEhD,IAAMC,iBAAiC;wBACrCvB,MAAMD,WAAWC,IAAI;wBACrBH,OAAOE,WAAWF,KAAK;wBACvBY,MAAMV,WAAWU,IAAI;wBACrBU,UAAUA;oBACZ;oBAEAlC,aAAa,MAAM,IAAIuC,sCAAiB,CAACD;gBAC3C;gBACAT,OAAOG,EAAE,CAAC,SAAS,SAACQ;oBAClBxC,aAAawC;gBACf;gBACA;YACF;QAEA,KAAK;YAAQ;gBACX1B,WAAWL,IAAI,GAAG;gBAClBK,WAAW2B,IAAI,GAAG3C,MAAM2C,IAAI;gBAC5B,IAAMd,UAAShC,aAAaiC,SAAS;gBAErC,IAAMC,UAASF,QAAOG,cAAc,CAAChC;gBACrC,OAAOE,aAAa,MAAM,IAAI0C,oBAAS,CAAC5B,YAA8Be,SAAQpC,SAASa,IAAI,EAAER,MAAM6C,UAAU;YAC/G;IACF;IAEA,OAAOjD,SAAS,IAAIG,MAAM,AAAC,4BAAgC,OAALY;AACxD"}
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/ArchiveSource.ts"],"sourcesContent":["/**\n * ArchiveSource - Abstraction for reading 7z archive data\n *\n * Provides a common interface for reading archive data from either\n * a file descriptor or an in-memory buffer.\n */\n\nimport { allocBuffer, Readable } from 'extract-base-iterator';\nimport fs from 'fs';\nimport type Stream from 'stream';\n\n// Helper to create a Readable stream compatible with Node 0.8\nfunction createReadableStream(readFn: (stream: Stream.Readable) => void): Stream.Readable {\n const stream = new Readable();\n stream._read = function () {\n readFn(this);\n };\n return stream;\n}\n\n/**\n * Archive source abstraction - allows reading from file descriptor or buffer\n */\nexport interface ArchiveSource {\n read(position: number, length: number): Buffer;\n getSize(): number;\n close(): void;\n /**\n * Create a readable stream for a portion of the archive.\n * Used for streaming decompression.\n */\n createReadStream(offset: number, length: number): Stream.Readable;\n}\n\n/**\n * Buffer-based archive source\n *\n * Used when the entire archive is already in memory.\n */\nexport class BufferSource implements ArchiveSource {\n private buffer: Buffer;\n\n constructor(buffer: Buffer) {\n this.buffer = buffer;\n }\n\n read(position: number, length: number): Buffer {\n return this.buffer.slice(position, position + length);\n }\n\n getSize(): number {\n return this.buffer.length;\n }\n\n close(): void {\n // Nothing to close for buffer\n }\n\n /**\n * Create a readable stream for a portion of the buffer.\n * Streams the data in chunks to avoid blocking.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const buffer = this.buffer;\n const end = Math.min(offset + length, buffer.length);\n let currentPos = offset;\n const chunkSize = 65536; // 64KB chunks\n\n return createReadableStream((stream) => {\n if (currentPos >= end) {\n stream.push(null);\n return;\n }\n\n const toRead = Math.min(chunkSize, end - currentPos);\n const chunk = buffer.slice(currentPos, currentPos + toRead);\n currentPos += toRead;\n stream.push(chunk);\n });\n }\n}\n\n/**\n * File descriptor based archive source\n *\n * Used for reading directly from a file on disk.\n * More memory efficient for large archives.\n */\nexport class FileSource implements ArchiveSource {\n private fd: number;\n private size: number;\n\n constructor(fd: number, size: number) {\n this.fd = fd;\n this.size = size;\n }\n\n read(position: number, length: number): Buffer {\n // Handle large reads by chunking to fit 32-bit signed int limit\n const MAX_INT32 = 0x7fffffff; // 2,147,483,647 bytes (~2GB)\n\n if (length <= MAX_INT32) {\n return this.readChunk(position, length);\n }\n\n // For large reads, split into multiple chunks\n const chunks: Buffer[] = [];\n let totalBytesRead = 0;\n let currentPos = position;\n\n while (totalBytesRead < length) {\n const remaining = length - totalBytesRead;\n const chunkSize = Math.min(remaining, MAX_INT32);\n const chunk = this.readChunk(currentPos, chunkSize);\n\n chunks.push(chunk);\n totalBytesRead += chunk.length;\n currentPos += chunk.length;\n\n if (chunk.length < chunkSize) {\n // EOF reached\n break;\n }\n }\n\n return Buffer.concat(chunks);\n }\n\n private readChunk(position: number, length: number): Buffer {\n const buf = allocBuffer(length);\n const bytesRead = fs.readSync(this.fd, buf, 0, length, position);\n if (bytesRead < length) {\n return buf.slice(0, bytesRead);\n }\n return buf;\n }\n\n getSize(): number {\n return this.size;\n }\n\n close(): void {\n try {\n fs.closeSync(this.fd);\n } catch (_e) {\n // Ignore close errors\n }\n }\n\n /**\n * Create a readable stream for a portion of the file.\n * Uses async fs.read() to avoid blocking the event loop.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const fd = this.fd;\n let bytesRead = 0;\n let reading = false;\n let finished = false;\n const chunkSize = 65536; // 64KB chunks\n let _streamRef: Stream.Readable | null = null;\n\n const stream = createReadableStream((s) => {\n _streamRef = s;\n if (reading || finished) return; // Prevent re-entrant reads\n\n const toRead = Math.min(chunkSize, length - bytesRead);\n if (toRead <= 0) {\n finished = true;\n s.push(null);\n return;\n }\n\n reading = true;\n const buffer = allocBuffer(toRead);\n const currentOffset = offset + bytesRead;\n\n fs.read(fd, buffer, 0, toRead, currentOffset, (err, n) => {\n reading = false;\n\n if (err) {\n // Emit error for Node 0.8 compatibility (no destroy method)\n s.emit('error', err);\n finished = true;\n s.push(null);\n return;\n }\n\n if (n === 0) {\n finished = true;\n s.push(null);\n } else {\n bytesRead += n;\n s.push(buffer.slice(0, n));\n }\n });\n });\n\n return stream;\n }\n}\n"],"names":["BufferSource","FileSource","createReadableStream","readFn","stream","Readable","_read","buffer","read","position","length","slice","getSize","close","createReadStream","offset","end","Math","min","currentPos","chunkSize","push","toRead","chunk","fd","size","MAX_INT32","readChunk","chunks","totalBytesRead","remaining","Buffer","concat","buf","allocBuffer","bytesRead","fs","readSync","closeSync","_e","reading","finished","_streamRef","s","currentOffset","err","n","emit"],"mappings":"AAAA;;;;;CAKC;;;;;;;;;;;QAkCYA;eAAAA;;QAiDAC;eAAAA;;;mCAjFyB;yDACvB;;;;;;;;;;;AAGf,8DAA8D;AAC9D,SAASC,qBAAqBC,MAAyC;IACrE,IAAMC,SAAS,IAAIC,6BAAQ;IAC3BD,OAAOE,KAAK,GAAG;QACbH,OAAO,IAAI;IACb;IACA,OAAOC;AACT;AAqBO,IAAA,AAAMJ,6BAAN;;aAAMA,aAGCO,MAAc;gCAHfP;QAIT,IAAI,CAACO,MAAM,GAAGA;;iBAJLP;IAOXQ,OAAAA,IAEC,GAFDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,OAAO,IAAI,CAACH,MAAM,CAACI,KAAK,CAACF,UAAUA,WAAWC;IAChD;IAEAE,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACL,MAAM,CAACG,MAAM;IAC3B;IAEAG,OAAAA,KAEC,GAFDA,SAAAA;IACE,8BAA8B;IAChC;IAEA;;;GAGC,GACDC,OAAAA,gBAiBC,GAjBDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMH,SAAS,IAAI,CAACA,MAAM;QAC1B,IAAMS,MAAMC,KAAKC,GAAG,CAACH,SAASL,QAAQH,OAAOG,MAAM;QACnD,IAAIS,aAAaJ;QACjB,IAAMK,YAAY,OAAO,cAAc;QAEvC,OAAOlB,qBAAqB,SAACE;YAC3B,IAAIe,cAAcH,KAAK;gBACrBZ,OAAOiB,IAAI,CAAC;gBACZ;YACF;YAEA,IAAMC,SAASL,KAAKC,GAAG,CAACE,WAAWJ,MAAMG;YACzC,IAAMI,QAAQhB,OAAOI,KAAK,CAACQ,YAAYA,aAAaG;YACpDH,cAAcG;YACdlB,OAAOiB,IAAI,CAACE;QACd;IACF;WAxCWvB;;AAiDN,IAAA,AAAMC,2BAAN;;aAAMA,WAICuB,EAAU,EAAEC,IAAY;gCAJzBxB;QAKT,IAAI,CAACuB,EAAE,GAAGA;QACV,IAAI,CAACC,IAAI,GAAGA;;iBANHxB;IASXO,OAAAA,IA6BC,GA7BDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,gEAAgE;QAChE,IAAMgB,YAAY,YAAY,6BAA6B;QAE3D,IAAIhB,UAAUgB,WAAW;YACvB,OAAO,IAAI,CAACC,SAAS,CAAClB,UAAUC;QAClC;QAEA,8CAA8C;QAC9C,IAAMkB,SAAmB,EAAE;QAC3B,IAAIC,iBAAiB;QACrB,IAAIV,aAAaV;QAEjB,MAAOoB,iBAAiBnB,OAAQ;YAC9B,IAAMoB,YAAYpB,SAASmB;YAC3B,IAAMT,YAAYH,KAAKC,GAAG,CAACY,WAAWJ;YACtC,IAAMH,QAAQ,IAAI,CAACI,SAAS,CAACR,YAAYC;YAEzCQ,OAAOP,IAAI,CAACE;YACZM,kBAAkBN,MAAMb,MAAM;YAC9BS,cAAcI,MAAMb,MAAM;YAE1B,IAAIa,MAAMb,MAAM,GAAGU,WAAW;gBAE5B;YACF;QACF;QAEA,OAAOW,OAAOC,MAAM,CAACJ;IACvB;IAEA,OAAQD,SAOP,GAPD,SAAQA,UAAUlB,QAAgB,EAAEC,MAAc;QAChD,IAAMuB,MAAMC,IAAAA,gCAAW,EAACxB;QACxB,IAAMyB,YAAYC,WAAE,CAACC,QAAQ,CAAC,IAAI,CAACb,EAAE,EAAES,KAAK,GAAGvB,QAAQD;QACvD,IAAI0B,YAAYzB,QAAQ;YACtB,OAAOuB,IAAItB,KAAK,CAAC,GAAGwB;QACtB;QACA,OAAOF;IACT;IAEArB,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACa,IAAI;IAClB;IAEAZ,OAAAA,KAMC,GANDA,SAAAA;QACE,IAAI;YACFuB,WAAE,CAACE,SAAS,CAAC,IAAI,CAACd,EAAE;QACtB,EAAE,OAAOe,IAAI;QACX,sBAAsB;QACxB;IACF;IAEA;;;GAGC,GACDzB,OAAAA,gBA6CC,GA7CDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMc,KAAK,IAAI,CAACA,EAAE;QAClB,IAAIW,YAAY;QAChB,IAAIK,UAAU;QACd,IAAIC,WAAW;QACf,IAAMrB,YAAY,OAAO,cAAc;QACvC,IAAIsB,aAAqC;QAEzC,IAAMtC,SAASF,qBAAqB,SAACyC;YACnCD,aAAaC;YACb,IAAIH,WAAWC,UAAU,QAAQ,2BAA2B;YAE5D,IAAMnB,SAASL,KAAKC,GAAG,CAACE,WAAWV,SAASyB;YAC5C,IAAIb,UAAU,GAAG;gBACfmB,WAAW;gBACXE,EAAEtB,IAAI,CAAC;gBACP;YACF;YAEAmB,UAAU;YACV,IAAMjC,SAAS2B,IAAAA,gCAAW,EAACZ;YAC3B,IAAMsB,gBAAgB7B,SAASoB;YAE/BC,WAAE,CAAC5B,IAAI,CAACgB,IAAIjB,QAAQ,GAAGe,QAAQsB,eAAe,SAACC,KAAKC;gBAClDN,UAAU;gBAEV,IAAIK,KAAK;oBACP,4DAA4D;oBAC5DF,EAAEI,IAAI,CAAC,SAASF;oBAChBJ,WAAW;oBACXE,EAAEtB,IAAI,CAAC;oBACP;gBACF;gBAEA,IAAIyB,MAAM,GAAG;oBACXL,WAAW;oBACXE,EAAEtB,IAAI,CAAC;gBACT,OAAO;oBACLc,aAAaW;oBACbH,EAAEtB,IAAI,CAACd,OAAOI,KAAK,CAAC,GAAGmC;gBACzB;YACF;QACF;QAEA,OAAO1C;IACT;WA9GWH"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/sevenz/ArchiveSource.ts"],"sourcesContent":["/**\n * ArchiveSource - Abstraction for reading 7z archive data\n *\n * Provides a common interface for reading archive data from either\n * a file descriptor or an in-memory buffer.\n */\n\nimport { allocBuffer, Readable } from 'extract-base-iterator';\nimport fs from 'fs';\nimport type Stream from 'stream';\n\n// Helper to create a Readable stream compatible with Node 0.8\nfunction createReadableStream(readFn: (stream: Stream.Readable) => void): Stream.Readable {\n const stream = new Readable();\n stream._read = function () {\n readFn(this);\n };\n return stream;\n}\n\n/**\n * Archive source abstraction - allows reading from file descriptor or buffer\n */\nexport interface ArchiveSource {\n read(position: number, length: number): Buffer;\n getSize(): number;\n close(): void;\n /**\n * Create a readable stream for a portion of the archive.\n * Used for streaming decompression.\n */\n createReadStream(offset: number, length: number): Stream.Readable;\n}\n\n/**\n * Buffer-based archive source\n *\n * Used when the entire archive is already in memory.\n */\nexport class BufferSource implements ArchiveSource {\n private buffer: Buffer;\n\n constructor(buffer: Buffer) {\n this.buffer = buffer;\n }\n\n read(position: number, length: number): Buffer {\n return this.buffer.slice(position, position + length);\n }\n\n getSize(): number {\n return this.buffer.length;\n }\n\n close(): void {\n // Nothing to close for buffer\n }\n\n /**\n * Create a readable stream for a portion of the buffer.\n * Streams the data in chunks to avoid blocking.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const buffer = this.buffer;\n const end = Math.min(offset + length, buffer.length);\n let currentPos = offset;\n const chunkSize = 65536; // 64KB chunks\n\n return createReadableStream((stream) => {\n if (currentPos >= end) {\n stream.push(null);\n return;\n }\n\n const toRead = Math.min(chunkSize, end - currentPos);\n const chunk = buffer.slice(currentPos, currentPos + toRead);\n currentPos += toRead;\n stream.push(chunk);\n });\n }\n}\n\n/**\n * File descriptor based archive source\n *\n * Used for reading directly from a file on disk.\n * More memory efficient for large archives.\n */\nexport class FileSource implements ArchiveSource {\n private fd: number;\n private size: number;\n\n constructor(fd: number, size: number) {\n this.fd = fd;\n this.size = size;\n }\n\n read(position: number, length: number): Buffer {\n // Handle large reads by chunking to fit 32-bit signed int limit\n const MAX_INT32 = 0x7fffffff; // 2,147,483,647 bytes (~2GB)\n\n if (length <= MAX_INT32) {\n return this.readChunk(position, length);\n }\n\n // For large reads, split into multiple chunks\n const chunks: Buffer[] = [];\n let totalBytesRead = 0;\n let currentPos = position;\n\n while (totalBytesRead < length) {\n const remaining = length - totalBytesRead;\n const chunkSize = Math.min(remaining, MAX_INT32);\n const chunk = this.readChunk(currentPos, chunkSize);\n\n chunks.push(chunk);\n totalBytesRead += chunk.length;\n currentPos += chunk.length;\n\n if (chunk.length < chunkSize) {\n // EOF reached\n break;\n }\n }\n\n return Buffer.concat(chunks);\n }\n\n private readChunk(position: number, length: number): Buffer {\n const buf = allocBuffer(length);\n const bytesRead = fs.readSync(this.fd, buf, 0, length, position);\n if (bytesRead < length) {\n return buf.slice(0, bytesRead);\n }\n return buf;\n }\n\n getSize(): number {\n return this.size;\n }\n\n close(): void {\n try {\n fs.closeSync(this.fd);\n } catch (_e) {\n // Ignore close errors\n }\n }\n\n /**\n * Create a readable stream for a portion of the file.\n * Uses async fs.read() to avoid blocking the event loop.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const fd = this.fd;\n let bytesRead = 0;\n let reading = false;\n let finished = false;\n const chunkSize = 65536; // 64KB chunks\n let _streamRef: Stream.Readable | null = null;\n\n const stream = createReadableStream((s) => {\n _streamRef = s;\n if (reading || finished) return; // Prevent re-entrant reads\n\n const toRead = Math.min(chunkSize, length - bytesRead);\n if (toRead <= 0) {\n finished = true;\n s.push(null);\n return;\n }\n\n reading = true;\n const buffer = allocBuffer(toRead);\n const currentOffset = offset + bytesRead;\n\n fs.read(fd, buffer, 0, toRead, currentOffset, (err, n) => {\n reading = false;\n\n if (err) {\n // Emit error for Node 0.8 compatibility (no destroy method)\n s.emit('error', err);\n finished = true;\n s.push(null);\n return;\n }\n\n if (n === 0) {\n finished = true;\n s.push(null);\n } else {\n bytesRead += n;\n s.push(buffer.slice(0, n));\n }\n });\n });\n\n return stream;\n }\n}\n"],"names":["BufferSource","FileSource","createReadableStream","readFn","stream","Readable","_read","buffer","read","position","length","slice","getSize","close","createReadStream","offset","end","Math","min","currentPos","chunkSize","push","toRead","chunk","fd","size","MAX_INT32","readChunk","chunks","totalBytesRead","remaining","Buffer","concat","buf","allocBuffer","bytesRead","fs","readSync","closeSync","_e","reading","finished","_streamRef","s","currentOffset","err","n","emit"],"mappings":"AAAA;;;;;CAKC;;;;;;;;;;;QAkCYA;eAAAA;;QAiDAC;eAAAA;;;mCAjFyB;yDACvB;;;;;;;;;;;AAGf,8DAA8D;AAC9D,SAASC,qBAAqBC,MAAyC;IACrE,IAAMC,SAAS,IAAIC,6BAAQ;IAC3BD,OAAOE,KAAK,GAAG;QACbH,OAAO,IAAI;IACb;IACA,OAAOC;AACT;AAqBO,IAAA,AAAMJ,6BAAN;;aAAMA,aAGCO,MAAc;gCAHfP;QAIT,IAAI,CAACO,MAAM,GAAGA;;iBAJLP;IAOXQ,OAAAA,IAEC,GAFDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,OAAO,IAAI,CAACH,MAAM,CAACI,KAAK,CAACF,UAAUA,WAAWC;IAChD;IAEAE,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACL,MAAM,CAACG,MAAM;IAC3B;IAEAG,OAAAA,KAEC,GAFDA,SAAAA;IACE,8BAA8B;IAChC;IAEA;;;GAGC,GACDC,OAAAA,gBAiBC,GAjBDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMH,SAAS,IAAI,CAACA,MAAM;QAC1B,IAAMS,MAAMC,KAAKC,GAAG,CAACH,SAASL,QAAQH,OAAOG,MAAM;QACnD,IAAIS,aAAaJ;QACjB,IAAMK,YAAY,OAAO,cAAc;QAEvC,OAAOlB,qBAAqB,SAACE;YAC3B,IAAIe,cAAcH,KAAK;gBACrBZ,OAAOiB,IAAI,CAAC;gBACZ;YACF;YAEA,IAAMC,SAASL,KAAKC,GAAG,CAACE,WAAWJ,MAAMG;YACzC,IAAMI,QAAQhB,OAAOI,KAAK,CAACQ,YAAYA,aAAaG;YACpDH,cAAcG;YACdlB,OAAOiB,IAAI,CAACE;QACd;IACF;WAxCWvB;;AAiDN,IAAA,AAAMC,2BAAN;;aAAMA,WAICuB,EAAU,EAAEC,IAAY;gCAJzBxB;QAKT,IAAI,CAACuB,EAAE,GAAGA;QACV,IAAI,CAACC,IAAI,GAAGA;;iBANHxB;IASXO,OAAAA,IA6BC,GA7BDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,gEAAgE;QAChE,IAAMgB,YAAY,YAAY,6BAA6B;QAE3D,IAAIhB,UAAUgB,WAAW;YACvB,OAAO,IAAI,CAACC,SAAS,CAAClB,UAAUC;QAClC;QAEA,8CAA8C;QAC9C,IAAMkB,SAAmB,EAAE;QAC3B,IAAIC,iBAAiB;QACrB,IAAIV,aAAaV;QAEjB,MAAOoB,iBAAiBnB,OAAQ;YAC9B,IAAMoB,YAAYpB,SAASmB;YAC3B,IAAMT,YAAYH,KAAKC,GAAG,CAACY,WAAWJ;YACtC,IAAMH,QAAQ,IAAI,CAACI,SAAS,CAACR,YAAYC;YAEzCQ,OAAOP,IAAI,CAACE;YACZM,kBAAkBN,MAAMb,MAAM;YAC9BS,cAAcI,MAAMb,MAAM;YAE1B,IAAIa,MAAMb,MAAM,GAAGU,WAAW;gBAE5B;YACF;QACF;QAEA,OAAOW,OAAOC,MAAM,CAACJ;IACvB;IAEA,OAAQD,SAOP,GAPD,SAAQA,UAAUlB,QAAgB,EAAEC,MAAc;QAChD,IAAMuB,MAAMC,IAAAA,gCAAW,EAACxB;QACxB,IAAMyB,YAAYC,WAAE,CAACC,QAAQ,CAAC,IAAI,CAACb,EAAE,EAAES,KAAK,GAAGvB,QAAQD;QACvD,IAAI0B,YAAYzB,QAAQ;YACtB,OAAOuB,IAAItB,KAAK,CAAC,GAAGwB;QACtB;QACA,OAAOF;IACT;IAEArB,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACa,IAAI;IAClB;IAEAZ,OAAAA,KAMC,GANDA,SAAAA;QACE,IAAI;YACFuB,WAAE,CAACE,SAAS,CAAC,IAAI,CAACd,EAAE;QACtB,EAAE,OAAOe,IAAI;QACX,sBAAsB;QACxB;IACF;IAEA;;;GAGC,GACDzB,OAAAA,gBA6CC,GA7CDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMc,KAAK,IAAI,CAACA,EAAE;QAClB,IAAIW,YAAY;QAChB,IAAIK,UAAU;QACd,IAAIC,WAAW;QACf,IAAMrB,YAAY,OAAO,cAAc;QACvC,IAAIsB,aAAqC;QAEzC,IAAMtC,SAASF,qBAAqB,SAACyC;YACnCD,aAAaC;YACb,IAAIH,WAAWC,UAAU,QAAQ,2BAA2B;YAE5D,IAAMnB,SAASL,KAAKC,GAAG,CAACE,WAAWV,SAASyB;YAC5C,IAAIb,UAAU,GAAG;gBACfmB,WAAW;gBACXE,EAAEtB,IAAI,CAAC;gBACP;YACF;YAEAmB,UAAU;YACV,IAAMjC,SAAS2B,IAAAA,gCAAW,EAACZ;YAC3B,IAAMsB,gBAAgB7B,SAASoB;YAE/BC,WAAE,CAAC5B,IAAI,CAACgB,IAAIjB,QAAQ,GAAGe,QAAQsB,eAAe,SAACC,KAAKC;gBAClDN,UAAU;gBAEV,IAAIK,KAAK;oBACP,4DAA4D;oBAC5DF,EAAEI,IAAI,CAAC,SAASF;oBAChBJ,WAAW;oBACXE,EAAEtB,IAAI,CAAC;oBACP;gBACF;gBAEA,IAAIyB,MAAM,GAAG;oBACXL,WAAW;oBACXE,EAAEtB,IAAI,CAAC;gBACT,OAAO;oBACLc,aAAaW;oBACbH,EAAEtB,IAAI,CAACd,OAAOI,KAAK,CAAC,GAAGmC;gBACzB;YACF;QACF;QAEA,OAAO1C;IACT;WA9GWH"}
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/FolderStreamSplitter.ts"],"sourcesContent":["/**\n * FolderStreamSplitter - Splits a decompressed folder stream into individual file streams\n *\n * For multi-file solid archives, the folder is decompressed as a single stream.\n * This class splits that stream into individual file streams based on known file boundaries.\n *\n * Features:\n * - Lazy stream creation (streams created on first access)\n * - Backpressure propagation (returns false when downstream is full)\n * - Running CRC verification per file\n * - Automatic cleanup of completed streams\n */\n\nimport { crc32, PassThrough } from 'extract-base-iterator';\nimport type Stream from 'stream';\n\nexport interface FolderStreamSplitterOptions {\n /** Sizes of each file in the folder (in order) */\n fileSizes: number[];\n /** Whether to verify CRC for each file */\n verifyCrc?: boolean;\n /** Expected CRCs for each file (parallel to fileSizes) */\n expectedCrcs?: (number | undefined)[];\n}\n\n/**\n * Splits a decompressed folder stream into individual file streams.\n *\n * Usage:\n * ```\n * const splitter = new FolderStreamSplitter({ fileSizes: [1000, 2000, 500] });\n *\n * decompressStream.on('data', (chunk) => {\n * if (!splitter.write(chunk)) {\n * decompressStream.pause();\n * splitter.onDrain(() => decompressStream.resume());\n * }\n * });\n * decompressStream.on('end', () => splitter.end());\n *\n * // Get stream for file at index 1 (created lazily)\n * const fileStream = splitter.getFileStream(1);\n * ```\n */\nexport class FolderStreamSplitter {\n private fileBoundaries: number[]; // Cumulative offsets [0, size1, size1+size2, ...]\n private fileStreams: (Stream.PassThrough | null)[]; // Lazy-created, null after completion\n private fileCrcs: number[]; // Running CRC per file\n private currentFileIndex: number;\n private bytesWritten: number;\n private currentFileEnd: number;\n private verifyCrc: boolean;\n private expectedCrcs: (number | undefined)[];\n private finished: boolean;\n private error: Error | null;\n private drainCallbacks: (() => void)[];\n private _needsDrain: boolean;\n\n constructor(options: FolderStreamSplitterOptions) {\n const fileSizes = options.fileSizes;\n const verifyCrc = options.verifyCrc !== undefined ? options.verifyCrc : true;\n const expectedCrcs = options.expectedCrcs || [];\n\n this.verifyCrc = verifyCrc;\n this.expectedCrcs = expectedCrcs;\n this.currentFileIndex = 0;\n this.bytesWritten = 0;\n this.finished = false;\n this.error = null;\n this.drainCallbacks = [];\n this._needsDrain = false;\n\n // Calculate cumulative boundaries\n this.fileBoundaries = [0];\n for (let i = 0; i < fileSizes.length; i++) {\n this.fileBoundaries.push(this.fileBoundaries[this.fileBoundaries.length - 1] + fileSizes[i]);\n }\n\n // Initialize streams array (lazy creation - all null initially)\n this.fileStreams = [];\n this.fileCrcs = [];\n for (let i = 0; i < fileSizes.length; i++) {\n this.fileStreams.push(null);\n this.fileCrcs.push(0);\n }\n\n // Set first file boundary\n this.currentFileEnd = this.fileBoundaries[1] || 0;\n }\n\n /**\n * Write decompressed data chunk. Data is routed to appropriate file stream(s).\n * Returns false if backpressure should be applied (downstream is full).\n */\n write(chunk: Buffer): boolean {\n if (this.finished || this.error) return true;\n\n let offset = 0;\n let canContinue = true;\n\n while (offset < chunk.length && this.currentFileIndex < this.fileStreams.length) {\n const remaining = chunk.length - offset;\n const neededForFile = this.currentFileEnd - this.bytesWritten;\n const toWrite = Math.min(remaining, neededForFile);\n\n if (toWrite > 0) {\n const fileChunk = chunk.slice(offset, offset + toWrite);\n\n // Ensure stream exists (lazy creation)\n const fileStream = this.ensureFileStream(this.currentFileIndex);\n\n // Update CRC\n if (this.verifyCrc) {\n this.fileCrcs[this.currentFileIndex] = crc32(fileChunk, this.fileCrcs[this.currentFileIndex]);\n }\n\n // Write to file stream, track backpressure\n if (!fileStream.write(fileChunk)) {\n canContinue = false;\n this._needsDrain = true;\n fileStream.once('drain', () => {\n this._needsDrain = false;\n this.notifyDrain();\n });\n }\n }\n\n this.bytesWritten += toWrite;\n offset += toWrite;\n\n // Check if current file is complete\n if (this.bytesWritten >= this.currentFileEnd) {\n this.finishCurrentFile();\n }\n }\n\n return canContinue;\n }\n\n /**\n * Ensure stream exists for file index (lazy creation)\n */\n private ensureFileStream(fileIndex: number): Stream.PassThrough {\n let stream = this.fileStreams[fileIndex];\n if (!stream) {\n stream = new PassThrough();\n this.fileStreams[fileIndex] = stream;\n }\n return stream;\n }\n\n /**\n * Complete current file and move to next\n */\n private finishCurrentFile(): void {\n const fileStream = this.fileStreams[this.currentFileIndex];\n\n // Verify CRC if enabled\n if (this.verifyCrc) {\n const expectedCrc = this.expectedCrcs[this.currentFileIndex];\n if (expectedCrc !== undefined && this.fileCrcs[this.currentFileIndex] !== expectedCrc) {\n const err = new Error(`CRC mismatch for file ${this.currentFileIndex}: expected ${expectedCrc.toString(16)}, got ${this.fileCrcs[this.currentFileIndex].toString(16)}`);\n this.emitError(err);\n return;\n }\n }\n\n // End this file's stream\n if (fileStream) {\n fileStream.end();\n }\n\n // Release reference for GC\n this.fileStreams[this.currentFileIndex] = null;\n\n // Move to next file\n this.currentFileIndex++;\n if (this.currentFileIndex < this.fileBoundaries.length - 1) {\n this.currentFileEnd = this.fileBoundaries[this.currentFileIndex + 1];\n }\n }\n\n /**\n * Signal end of decompressed data\n */\n end(): void {\n if (this.finished) return;\n this.finished = true;\n\n // End any remaining streams\n for (let i = this.currentFileIndex; i < this.fileStreams.length; i++) {\n const stream = this.fileStreams[i];\n if (stream) {\n stream.end();\n }\n this.fileStreams[i] = null;\n }\n }\n\n /**\n * Emit error to all pending file streams\n */\n private emitError(err: Error): void {\n this.error = err;\n for (let i = this.currentFileIndex; i < this.fileStreams.length; i++) {\n const stream = this.fileStreams[i];\n if (stream) {\n stream.emit('error', err);\n stream.end();\n }\n this.fileStreams[i] = null;\n }\n }\n\n /**\n * Get the stream for a specific file by index.\n * Stream is created lazily on first access.\n */\n getFileStream(fileIndex: number): Stream.PassThrough {\n if (fileIndex < 0 || fileIndex >= this.fileBoundaries.length - 1) {\n throw new Error(`Invalid file index: ${fileIndex}`);\n }\n\n // Check if file already completed\n if (fileIndex < this.currentFileIndex) {\n throw new Error(`File ${fileIndex} already completed - streams must be accessed in order`);\n }\n\n return this.ensureFileStream(fileIndex);\n }\n\n /**\n * Register callback for when backpressure clears\n */\n onDrain(callback: () => void): void {\n if (!this._needsDrain) {\n callback();\n } else {\n this.drainCallbacks.push(callback);\n }\n }\n\n /**\n * Notify all drain callbacks\n */\n private notifyDrain(): void {\n const callbacks = this.drainCallbacks;\n this.drainCallbacks = [];\n for (let i = 0; i < callbacks.length; i++) {\n callbacks[i]();\n }\n }\n\n /**\n * Check if a specific file's stream has been fully written\n */\n isFileComplete(fileIndex: number): boolean {\n return fileIndex < this.currentFileIndex;\n }\n\n /**\n * Get total number of files in this folder\n */\n get fileCount(): number {\n return this.fileBoundaries.length - 1;\n }\n\n /**\n * Check if splitter has encountered an error\n */\n getError(): Error | null {\n return this.error;\n }\n}\n"],"names":["FolderStreamSplitter","options","fileSizes","verifyCrc","undefined","expectedCrcs","currentFileIndex","bytesWritten","finished","error","drainCallbacks","_needsDrain","fileBoundaries","i","length","push","fileStreams","fileCrcs","currentFileEnd","write","chunk","offset","canContinue","remaining","neededForFile","toWrite","Math","min","fileChunk","slice","fileStream","ensureFileStream","crc32","once","notifyDrain","finishCurrentFile","fileIndex","stream","PassThrough","expectedCrc","err","Error","toString","emitError","end","emit","getFileStream","onDrain","callback","callbacks","isFileComplete","getError","fileCount"],"mappings":"AAAA;;;;;;;;;;;CAWC;;;;+BAiCYA;;;eAAAA;;;mCA/BsB;;;;;;;;;;;;;;;;;;;;AA+B5B,IAAA,AAAMA,qCAAN;;aAAMA,qBAcCC,OAAoC;gCAdrCD;QAeT,IAAME,YAAYD,QAAQC,SAAS;QACnC,IAAMC,YAAYF,QAAQE,SAAS,KAAKC,YAAYH,QAAQE,SAAS,GAAG;QACxE,IAAME,eAAeJ,QAAQI,YAAY,IAAI,EAAE;QAE/C,IAAI,CAACF,SAAS,GAAGA;QACjB,IAAI,CAACE,YAAY,GAAGA;QACpB,IAAI,CAACC,gBAAgB,GAAG;QACxB,IAAI,CAACC,YAAY,GAAG;QACpB,IAAI,CAACC,QAAQ,GAAG;QAChB,IAAI,CAACC,KAAK,GAAG;QACb,IAAI,CAACC,cAAc,GAAG,EAAE;QACxB,IAAI,CAACC,WAAW,GAAG;QAEnB,kCAAkC;QAClC,IAAI,CAACC,cAAc,GAAG;YAAC;SAAE;QACzB,IAAK,IAAIC,IAAI,GAAGA,IAAIX,UAAUY,MAAM,EAAED,IAAK;YACzC,IAAI,CAACD,cAAc,CAACG,IAAI,CAAC,IAAI,CAACH,cAAc,CAAC,IAAI,CAACA,cAAc,CAACE,MAAM,GAAG,EAAE,GAAGZ,SAAS,CAACW,EAAE;QAC7F;QAEA,gEAAgE;QAChE,IAAI,CAACG,WAAW,GAAG,EAAE;QACrB,IAAI,CAACC,QAAQ,GAAG,EAAE;QAClB,IAAK,IAAIJ,KAAI,GAAGA,KAAIX,UAAUY,MAAM,EAAED,KAAK;YACzC,IAAI,CAACG,WAAW,CAACD,IAAI,CAAC;YACtB,IAAI,CAACE,QAAQ,CAACF,IAAI,CAAC;QACrB;QAEA,0BAA0B;QAC1B,IAAI,CAACG,cAAc,GAAG,IAAI,CAACN,cAAc,CAAC,EAAE,IAAI;;iBA3CvCZ;IA8CX;;;GAGC,GACDmB,OAAAA,KA2CC,GA3CDA,SAAAA,MAAMC,KAAa;;QACjB,IAAI,IAAI,CAACZ,QAAQ,IAAI,IAAI,CAACC,KAAK,EAAE,OAAO;QAExC,IAAIY,SAAS;QACb,IAAIC,cAAc;QAElB,MAAOD,SAASD,MAAMN,MAAM,IAAI,IAAI,CAACR,gBAAgB,GAAG,IAAI,CAACU,WAAW,CAACF,MAAM,CAAE;YAC/E,IAAMS,YAAYH,MAAMN,MAAM,GAAGO;YACjC,IAAMG,gBAAgB,IAAI,CAACN,cAAc,GAAG,IAAI,CAACX,YAAY;YAC7D,IAAMkB,UAAUC,KAAKC,GAAG,CAACJ,WAAWC;YAEpC,IAAIC,UAAU,GAAG;gBACf,IAAMG,YAAYR,MAAMS,KAAK,CAACR,QAAQA,SAASI;gBAE/C,uCAAuC;gBACvC,IAAMK,aAAa,IAAI,CAACC,gBAAgB,CAAC,IAAI,CAACzB,gBAAgB;gBAE9D,aAAa;gBACb,IAAI,IAAI,CAACH,SAAS,EAAE;oBAClB,IAAI,CAACc,QAAQ,CAAC,IAAI,CAACX,gBAAgB,CAAC,GAAG0B,IAAAA,0BAAK,EAACJ,WAAW,IAAI,CAACX,QAAQ,CAAC,IAAI,CAACX,gBAAgB,CAAC;gBAC9F;gBAEA,2CAA2C;gBAC3C,IAAI,CAACwB,WAAWX,KAAK,CAACS,YAAY;oBAChCN,cAAc;oBACd,IAAI,CAACX,WAAW,GAAG;oBACnBmB,WAAWG,IAAI,CAAC,SAAS;wBACvB,MAAKtB,WAAW,GAAG;wBACnB,MAAKuB,WAAW;oBAClB;gBACF;YACF;YAEA,IAAI,CAAC3B,YAAY,IAAIkB;YACrBJ,UAAUI;YAEV,oCAAoC;YACpC,IAAI,IAAI,CAAClB,YAAY,IAAI,IAAI,CAACW,cAAc,EAAE;gBAC5C,IAAI,CAACiB,iBAAiB;YACxB;QACF;QAEA,OAAOb;IACT;IAEA;;GAEC,GACD,OAAQS,gBAOP,GAPD,SAAQA,iBAAiBK,SAAiB;QACxC,IAAIC,SAAS,IAAI,CAACrB,WAAW,CAACoB,UAAU;QACxC,IAAI,CAACC,QAAQ;YACXA,SAAS,IAAIC,gCAAW;YACxB,IAAI,CAACtB,WAAW,CAACoB,UAAU,GAAGC;QAChC;QACA,OAAOA;IACT;IAEA;;GAEC,GACD,OAAQF,iBA0BP,GA1BD,SAAQA;QACN,IAAML,aAAa,IAAI,CAACd,WAAW,CAAC,IAAI,CAACV,gBAAgB,CAAC;QAE1D,wBAAwB;QACxB,IAAI,IAAI,CAACH,SAAS,EAAE;YAClB,IAAMoC,cAAc,IAAI,CAAClC,YAAY,CAAC,IAAI,CAACC,gBAAgB,CAAC;YAC5D,IAAIiC,gBAAgBnC,aAAa,IAAI,CAACa,QAAQ,CAAC,IAAI,CAACX,gBAAgB,CAAC,KAAKiC,aAAa;gBACrF,IAAMC,MAAM,IAAIC,MAAM,AAAC,yBAA2DF,OAAnC,IAAI,CAACjC,gBAAgB,EAAC,eAA8C,OAAjCiC,YAAYG,QAAQ,CAAC,KAAI,UAA0D,OAAlD,IAAI,CAACzB,QAAQ,CAAC,IAAI,CAACX,gBAAgB,CAAC,CAACoC,QAAQ,CAAC;gBACjK,IAAI,CAACC,SAAS,CAACH;gBACf;YACF;QACF;QAEA,yBAAyB;QACzB,IAAIV,YAAY;YACdA,WAAWc,GAAG;QAChB;QAEA,2BAA2B;QAC3B,IAAI,CAAC5B,WAAW,CAAC,IAAI,CAACV,gBAAgB,CAAC,GAAG;QAE1C,oBAAoB;QACpB,IAAI,CAACA,gBAAgB;QACrB,IAAI,IAAI,CAACA,gBAAgB,GAAG,IAAI,CAACM,cAAc,CAACE,MAAM,GAAG,GAAG;YAC1D,IAAI,CAACI,cAAc,GAAG,IAAI,CAACN,cAAc,CAAC,IAAI,CAACN,gBAAgB,GAAG,EAAE;QACtE;IACF;IAEA;;GAEC,GACDsC,OAAAA,GAYC,GAZDA,SAAAA;QACE,IAAI,IAAI,CAACpC,QAAQ,EAAE;QACnB,IAAI,CAACA,QAAQ,GAAG;QAEhB,4BAA4B;QAC5B,IAAK,IAAIK,IAAI,IAAI,CAACP,gBAAgB,EAAEO,IAAI,IAAI,CAACG,WAAW,CAACF,MAAM,EAAED,IAAK;YACpE,IAAMwB,SAAS,IAAI,CAACrB,WAAW,CAACH,EAAE;YAClC,IAAIwB,QAAQ;gBACVA,OAAOO,GAAG;YACZ;YACA,IAAI,CAAC5B,WAAW,CAACH,EAAE,GAAG;QACxB;IACF;IAEA;;GAEC,GACD,OAAQ8B,SAUP,GAVD,SAAQA,UAAUH,GAAU;QAC1B,IAAI,CAAC/B,KAAK,GAAG+B;QACb,IAAK,IAAI3B,IAAI,IAAI,CAACP,gBAAgB,EAAEO,IAAI,IAAI,CAACG,WAAW,CAACF,MAAM,EAAED,IAAK;YACpE,IAAMwB,SAAS,IAAI,CAACrB,WAAW,CAACH,EAAE;YAClC,IAAIwB,QAAQ;gBACVA,OAAOQ,IAAI,CAAC,SAASL;gBACrBH,OAAOO,GAAG;YACZ;YACA,IAAI,CAAC5B,WAAW,CAACH,EAAE,GAAG;QACxB;IACF;IAEA;;;GAGC,GACDiC,OAAAA,aAWC,GAXDA,SAAAA,cAAcV,SAAiB;QAC7B,IAAIA,YAAY,KAAKA,aAAa,IAAI,CAACxB,cAAc,CAACE,MAAM,GAAG,GAAG;YAChE,MAAM,IAAI2B,MAAM,AAAC,uBAAgC,OAAVL;QACzC;QAEA,kCAAkC;QAClC,IAAIA,YAAY,IAAI,CAAC9B,gBAAgB,EAAE;YACrC,MAAM,IAAImC,MAAM,AAAC,QAAiB,OAAVL,WAAU;QACpC;QAEA,OAAO,IAAI,CAACL,gBAAgB,CAACK;IAC/B;IAEA;;GAEC,GACDW,OAAAA,OAMC,GANDA,SAAAA,QAAQC,QAAoB;QAC1B,IAAI,CAAC,IAAI,CAACrC,WAAW,EAAE;YACrBqC;QACF,OAAO;YACL,IAAI,CAACtC,cAAc,CAACK,IAAI,CAACiC;QAC3B;IACF;IAEA;;GAEC,GACD,OAAQd,WAMP,GAND,SAAQA;QACN,IAAMe,YAAY,IAAI,CAACvC,cAAc;QACrC,IAAI,CAACA,cAAc,GAAG,EAAE;QACxB,IAAK,IAAIG,IAAI,GAAGA,IAAIoC,UAAUnC,MAAM,EAAED,IAAK;YACzCoC,SAAS,CAACpC,EAAE;QACd;IACF;IAEA;;GAEC,GACDqC,OAAAA,cAEC,GAFDA,SAAAA,eAAed,SAAiB;QAC9B,OAAOA,YAAY,IAAI,CAAC9B,gBAAgB;IAC1C;IASA;;GAEC,GACD6C,OAAAA,QAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAAC1C,KAAK;IACnB;kBApOWT;;YA2NPoD,KAAAA;iBAAJ,AAHA;;GAEC,GACD;gBACE,OAAO,IAAI,CAACxC,cAAc,CAACE,MAAM,GAAG;YACtC;;;WA7NWd"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/sevenz/FolderStreamSplitter.ts"],"sourcesContent":["/**\n * FolderStreamSplitter - Splits a decompressed folder stream into individual file streams\n *\n * For multi-file solid archives, the folder is decompressed as a single stream.\n * This class splits that stream into individual file streams based on known file boundaries.\n *\n * Features:\n * - Lazy stream creation (streams created on first access)\n * - Backpressure propagation (returns false when downstream is full)\n * - Running CRC verification per file\n * - Automatic cleanup of completed streams\n */\n\nimport { crc32, PassThrough } from 'extract-base-iterator';\nimport type Stream from 'stream';\n\nexport interface FolderStreamSplitterOptions {\n /** Sizes of each file in the folder (in order) */\n fileSizes: number[];\n /** Whether to verify CRC for each file */\n verifyCrc?: boolean;\n /** Expected CRCs for each file (parallel to fileSizes) */\n expectedCrcs?: (number | undefined)[];\n}\n\n/**\n * Splits a decompressed folder stream into individual file streams.\n *\n * Usage:\n * ```\n * const splitter = new FolderStreamSplitter({ fileSizes: [1000, 2000, 500] });\n *\n * decompressStream.on('data', (chunk) => {\n * if (!splitter.write(chunk)) {\n * decompressStream.pause();\n * splitter.onDrain(() => decompressStream.resume());\n * }\n * });\n * decompressStream.on('end', () => splitter.end());\n *\n * // Get stream for file at index 1 (created lazily)\n * const fileStream = splitter.getFileStream(1);\n * ```\n */\nexport class FolderStreamSplitter {\n private fileBoundaries: number[]; // Cumulative offsets [0, size1, size1+size2, ...]\n private fileStreams: (Stream.PassThrough | null)[]; // Lazy-created, null after completion\n private fileCrcs: number[]; // Running CRC per file\n private currentFileIndex: number;\n private bytesWritten: number;\n private currentFileEnd: number;\n private verifyCrc: boolean;\n private expectedCrcs: (number | undefined)[];\n private finished: boolean;\n private error: Error | null;\n private drainCallbacks: (() => void)[];\n private _needsDrain: boolean;\n\n constructor(options: FolderStreamSplitterOptions) {\n const fileSizes = options.fileSizes;\n const verifyCrc = options.verifyCrc !== undefined ? options.verifyCrc : true;\n const expectedCrcs = options.expectedCrcs || [];\n\n this.verifyCrc = verifyCrc;\n this.expectedCrcs = expectedCrcs;\n this.currentFileIndex = 0;\n this.bytesWritten = 0;\n this.finished = false;\n this.error = null;\n this.drainCallbacks = [];\n this._needsDrain = false;\n\n // Calculate cumulative boundaries\n this.fileBoundaries = [0];\n for (let i = 0; i < fileSizes.length; i++) {\n this.fileBoundaries.push(this.fileBoundaries[this.fileBoundaries.length - 1] + fileSizes[i]);\n }\n\n // Initialize streams array (lazy creation - all null initially)\n this.fileStreams = [];\n this.fileCrcs = [];\n for (let i = 0; i < fileSizes.length; i++) {\n this.fileStreams.push(null);\n this.fileCrcs.push(0);\n }\n\n // Set first file boundary\n this.currentFileEnd = this.fileBoundaries[1] || 0;\n }\n\n /**\n * Write decompressed data chunk. Data is routed to appropriate file stream(s).\n * Returns false if backpressure should be applied (downstream is full).\n */\n write(chunk: Buffer): boolean {\n if (this.finished || this.error) return true;\n\n let offset = 0;\n let canContinue = true;\n\n while (offset < chunk.length && this.currentFileIndex < this.fileStreams.length) {\n const remaining = chunk.length - offset;\n const neededForFile = this.currentFileEnd - this.bytesWritten;\n const toWrite = Math.min(remaining, neededForFile);\n\n if (toWrite > 0) {\n const fileChunk = chunk.slice(offset, offset + toWrite);\n\n // Ensure stream exists (lazy creation)\n const fileStream = this.ensureFileStream(this.currentFileIndex);\n\n // Update CRC\n if (this.verifyCrc) {\n this.fileCrcs[this.currentFileIndex] = crc32(fileChunk, this.fileCrcs[this.currentFileIndex]);\n }\n\n // Write to file stream, track backpressure\n if (!fileStream.write(fileChunk)) {\n canContinue = false;\n this._needsDrain = true;\n fileStream.once('drain', () => {\n this._needsDrain = false;\n this.notifyDrain();\n });\n }\n }\n\n this.bytesWritten += toWrite;\n offset += toWrite;\n\n // Check if current file is complete\n if (this.bytesWritten >= this.currentFileEnd) {\n this.finishCurrentFile();\n }\n }\n\n return canContinue;\n }\n\n /**\n * Ensure stream exists for file index (lazy creation)\n */\n private ensureFileStream(fileIndex: number): Stream.PassThrough {\n let stream = this.fileStreams[fileIndex];\n if (!stream) {\n stream = new PassThrough();\n this.fileStreams[fileIndex] = stream;\n }\n return stream;\n }\n\n /**\n * Complete current file and move to next\n */\n private finishCurrentFile(): void {\n const fileStream = this.fileStreams[this.currentFileIndex];\n\n // Verify CRC if enabled\n if (this.verifyCrc) {\n const expectedCrc = this.expectedCrcs[this.currentFileIndex];\n if (expectedCrc !== undefined && this.fileCrcs[this.currentFileIndex] !== expectedCrc) {\n const err = new Error(`CRC mismatch for file ${this.currentFileIndex}: expected ${expectedCrc.toString(16)}, got ${this.fileCrcs[this.currentFileIndex].toString(16)}`);\n this.emitError(err);\n return;\n }\n }\n\n // End this file's stream\n if (fileStream) {\n fileStream.end();\n }\n\n // Release reference for GC\n this.fileStreams[this.currentFileIndex] = null;\n\n // Move to next file\n this.currentFileIndex++;\n if (this.currentFileIndex < this.fileBoundaries.length - 1) {\n this.currentFileEnd = this.fileBoundaries[this.currentFileIndex + 1];\n }\n }\n\n /**\n * Signal end of decompressed data\n */\n end(): void {\n if (this.finished) return;\n this.finished = true;\n\n // End any remaining streams\n for (let i = this.currentFileIndex; i < this.fileStreams.length; i++) {\n const stream = this.fileStreams[i];\n if (stream) {\n stream.end();\n }\n this.fileStreams[i] = null;\n }\n }\n\n /**\n * Emit error to all pending file streams\n */\n private emitError(err: Error): void {\n this.error = err;\n for (let i = this.currentFileIndex; i < this.fileStreams.length; i++) {\n const stream = this.fileStreams[i];\n if (stream) {\n stream.emit('error', err);\n stream.end();\n }\n this.fileStreams[i] = null;\n }\n }\n\n /**\n * Get the stream for a specific file by index.\n * Stream is created lazily on first access.\n */\n getFileStream(fileIndex: number): Stream.PassThrough {\n if (fileIndex < 0 || fileIndex >= this.fileBoundaries.length - 1) {\n throw new Error(`Invalid file index: ${fileIndex}`);\n }\n\n // Check if file already completed\n if (fileIndex < this.currentFileIndex) {\n throw new Error(`File ${fileIndex} already completed - streams must be accessed in order`);\n }\n\n return this.ensureFileStream(fileIndex);\n }\n\n /**\n * Register callback for when backpressure clears\n */\n onDrain(callback: () => void): void {\n if (!this._needsDrain) {\n callback();\n } else {\n this.drainCallbacks.push(callback);\n }\n }\n\n /**\n * Notify all drain callbacks\n */\n private notifyDrain(): void {\n const callbacks = this.drainCallbacks;\n this.drainCallbacks = [];\n for (let i = 0; i < callbacks.length; i++) {\n callbacks[i]();\n }\n }\n\n /**\n * Check if a specific file's stream has been fully written\n */\n isFileComplete(fileIndex: number): boolean {\n return fileIndex < this.currentFileIndex;\n }\n\n /**\n * Get total number of files in this folder\n */\n get fileCount(): number {\n return this.fileBoundaries.length - 1;\n }\n\n /**\n * Check if splitter has encountered an error\n */\n getError(): Error | null {\n return this.error;\n }\n}\n"],"names":["FolderStreamSplitter","options","fileSizes","verifyCrc","undefined","expectedCrcs","currentFileIndex","bytesWritten","finished","error","drainCallbacks","_needsDrain","fileBoundaries","i","length","push","fileStreams","fileCrcs","currentFileEnd","write","chunk","offset","canContinue","remaining","neededForFile","toWrite","Math","min","fileChunk","slice","fileStream","ensureFileStream","crc32","once","notifyDrain","finishCurrentFile","fileIndex","stream","PassThrough","expectedCrc","err","Error","toString","emitError","end","emit","getFileStream","onDrain","callback","callbacks","isFileComplete","getError","fileCount"],"mappings":"AAAA;;;;;;;;;;;CAWC;;;;+BAiCYA;;;eAAAA;;;mCA/BsB;;;;;;;;;;;;;;;;;;;;AA+B5B,IAAA,AAAMA,qCAAN;;aAAMA,qBAcCC,OAAoC;gCAdrCD;QAeT,IAAME,YAAYD,QAAQC,SAAS;QACnC,IAAMC,YAAYF,QAAQE,SAAS,KAAKC,YAAYH,QAAQE,SAAS,GAAG;QACxE,IAAME,eAAeJ,QAAQI,YAAY,IAAI,EAAE;QAE/C,IAAI,CAACF,SAAS,GAAGA;QACjB,IAAI,CAACE,YAAY,GAAGA;QACpB,IAAI,CAACC,gBAAgB,GAAG;QACxB,IAAI,CAACC,YAAY,GAAG;QACpB,IAAI,CAACC,QAAQ,GAAG;QAChB,IAAI,CAACC,KAAK,GAAG;QACb,IAAI,CAACC,cAAc,GAAG,EAAE;QACxB,IAAI,CAACC,WAAW,GAAG;QAEnB,kCAAkC;QAClC,IAAI,CAACC,cAAc,GAAG;YAAC;SAAE;QACzB,IAAK,IAAIC,IAAI,GAAGA,IAAIX,UAAUY,MAAM,EAAED,IAAK;YACzC,IAAI,CAACD,cAAc,CAACG,IAAI,CAAC,IAAI,CAACH,cAAc,CAAC,IAAI,CAACA,cAAc,CAACE,MAAM,GAAG,EAAE,GAAGZ,SAAS,CAACW,EAAE;QAC7F;QAEA,gEAAgE;QAChE,IAAI,CAACG,WAAW,GAAG,EAAE;QACrB,IAAI,CAACC,QAAQ,GAAG,EAAE;QAClB,IAAK,IAAIJ,KAAI,GAAGA,KAAIX,UAAUY,MAAM,EAAED,KAAK;YACzC,IAAI,CAACG,WAAW,CAACD,IAAI,CAAC;YACtB,IAAI,CAACE,QAAQ,CAACF,IAAI,CAAC;QACrB;QAEA,0BAA0B;QAC1B,IAAI,CAACG,cAAc,GAAG,IAAI,CAACN,cAAc,CAAC,EAAE,IAAI;;iBA3CvCZ;IA8CX;;;GAGC,GACDmB,OAAAA,KA2CC,GA3CDA,SAAAA,MAAMC,KAAa;;QACjB,IAAI,IAAI,CAACZ,QAAQ,IAAI,IAAI,CAACC,KAAK,EAAE,OAAO;QAExC,IAAIY,SAAS;QACb,IAAIC,cAAc;QAElB,MAAOD,SAASD,MAAMN,MAAM,IAAI,IAAI,CAACR,gBAAgB,GAAG,IAAI,CAACU,WAAW,CAACF,MAAM,CAAE;YAC/E,IAAMS,YAAYH,MAAMN,MAAM,GAAGO;YACjC,IAAMG,gBAAgB,IAAI,CAACN,cAAc,GAAG,IAAI,CAACX,YAAY;YAC7D,IAAMkB,UAAUC,KAAKC,GAAG,CAACJ,WAAWC;YAEpC,IAAIC,UAAU,GAAG;gBACf,IAAMG,YAAYR,MAAMS,KAAK,CAACR,QAAQA,SAASI;gBAE/C,uCAAuC;gBACvC,IAAMK,aAAa,IAAI,CAACC,gBAAgB,CAAC,IAAI,CAACzB,gBAAgB;gBAE9D,aAAa;gBACb,IAAI,IAAI,CAACH,SAAS,EAAE;oBAClB,IAAI,CAACc,QAAQ,CAAC,IAAI,CAACX,gBAAgB,CAAC,GAAG0B,IAAAA,0BAAK,EAACJ,WAAW,IAAI,CAACX,QAAQ,CAAC,IAAI,CAACX,gBAAgB,CAAC;gBAC9F;gBAEA,2CAA2C;gBAC3C,IAAI,CAACwB,WAAWX,KAAK,CAACS,YAAY;oBAChCN,cAAc;oBACd,IAAI,CAACX,WAAW,GAAG;oBACnBmB,WAAWG,IAAI,CAAC,SAAS;wBACvB,MAAKtB,WAAW,GAAG;wBACnB,MAAKuB,WAAW;oBAClB;gBACF;YACF;YAEA,IAAI,CAAC3B,YAAY,IAAIkB;YACrBJ,UAAUI;YAEV,oCAAoC;YACpC,IAAI,IAAI,CAAClB,YAAY,IAAI,IAAI,CAACW,cAAc,EAAE;gBAC5C,IAAI,CAACiB,iBAAiB;YACxB;QACF;QAEA,OAAOb;IACT;IAEA;;GAEC,GACD,OAAQS,gBAOP,GAPD,SAAQA,iBAAiBK,SAAiB;QACxC,IAAIC,SAAS,IAAI,CAACrB,WAAW,CAACoB,UAAU;QACxC,IAAI,CAACC,QAAQ;YACXA,SAAS,IAAIC,gCAAW;YACxB,IAAI,CAACtB,WAAW,CAACoB,UAAU,GAAGC;QAChC;QACA,OAAOA;IACT;IAEA;;GAEC,GACD,OAAQF,iBA0BP,GA1BD,SAAQA;QACN,IAAML,aAAa,IAAI,CAACd,WAAW,CAAC,IAAI,CAACV,gBAAgB,CAAC;QAE1D,wBAAwB;QACxB,IAAI,IAAI,CAACH,SAAS,EAAE;YAClB,IAAMoC,cAAc,IAAI,CAAClC,YAAY,CAAC,IAAI,CAACC,gBAAgB,CAAC;YAC5D,IAAIiC,gBAAgBnC,aAAa,IAAI,CAACa,QAAQ,CAAC,IAAI,CAACX,gBAAgB,CAAC,KAAKiC,aAAa;gBACrF,IAAMC,MAAM,IAAIC,MAAM,AAAC,yBAA2DF,OAAnC,IAAI,CAACjC,gBAAgB,EAAC,eAA8C,OAAjCiC,YAAYG,QAAQ,CAAC,KAAI,UAA0D,OAAlD,IAAI,CAACzB,QAAQ,CAAC,IAAI,CAACX,gBAAgB,CAAC,CAACoC,QAAQ,CAAC;gBACjK,IAAI,CAACC,SAAS,CAACH;gBACf;YACF;QACF;QAEA,yBAAyB;QACzB,IAAIV,YAAY;YACdA,WAAWc,GAAG;QAChB;QAEA,2BAA2B;QAC3B,IAAI,CAAC5B,WAAW,CAAC,IAAI,CAACV,gBAAgB,CAAC,GAAG;QAE1C,oBAAoB;QACpB,IAAI,CAACA,gBAAgB;QACrB,IAAI,IAAI,CAACA,gBAAgB,GAAG,IAAI,CAACM,cAAc,CAACE,MAAM,GAAG,GAAG;YAC1D,IAAI,CAACI,cAAc,GAAG,IAAI,CAACN,cAAc,CAAC,IAAI,CAACN,gBAAgB,GAAG,EAAE;QACtE;IACF;IAEA;;GAEC,GACDsC,OAAAA,GAYC,GAZDA,SAAAA;QACE,IAAI,IAAI,CAACpC,QAAQ,EAAE;QACnB,IAAI,CAACA,QAAQ,GAAG;QAEhB,4BAA4B;QAC5B,IAAK,IAAIK,IAAI,IAAI,CAACP,gBAAgB,EAAEO,IAAI,IAAI,CAACG,WAAW,CAACF,MAAM,EAAED,IAAK;YACpE,IAAMwB,SAAS,IAAI,CAACrB,WAAW,CAACH,EAAE;YAClC,IAAIwB,QAAQ;gBACVA,OAAOO,GAAG;YACZ;YACA,IAAI,CAAC5B,WAAW,CAACH,EAAE,GAAG;QACxB;IACF;IAEA;;GAEC,GACD,OAAQ8B,SAUP,GAVD,SAAQA,UAAUH,GAAU;QAC1B,IAAI,CAAC/B,KAAK,GAAG+B;QACb,IAAK,IAAI3B,IAAI,IAAI,CAACP,gBAAgB,EAAEO,IAAI,IAAI,CAACG,WAAW,CAACF,MAAM,EAAED,IAAK;YACpE,IAAMwB,SAAS,IAAI,CAACrB,WAAW,CAACH,EAAE;YAClC,IAAIwB,QAAQ;gBACVA,OAAOQ,IAAI,CAAC,SAASL;gBACrBH,OAAOO,GAAG;YACZ;YACA,IAAI,CAAC5B,WAAW,CAACH,EAAE,GAAG;QACxB;IACF;IAEA;;;GAGC,GACDiC,OAAAA,aAWC,GAXDA,SAAAA,cAAcV,SAAiB;QAC7B,IAAIA,YAAY,KAAKA,aAAa,IAAI,CAACxB,cAAc,CAACE,MAAM,GAAG,GAAG;YAChE,MAAM,IAAI2B,MAAM,AAAC,uBAAgC,OAAVL;QACzC;QAEA,kCAAkC;QAClC,IAAIA,YAAY,IAAI,CAAC9B,gBAAgB,EAAE;YACrC,MAAM,IAAImC,MAAM,AAAC,QAAiB,OAAVL,WAAU;QACpC;QAEA,OAAO,IAAI,CAACL,gBAAgB,CAACK;IAC/B;IAEA;;GAEC,GACDW,OAAAA,OAMC,GANDA,SAAAA,QAAQC,QAAoB;QAC1B,IAAI,CAAC,IAAI,CAACrC,WAAW,EAAE;YACrBqC;QACF,OAAO;YACL,IAAI,CAACtC,cAAc,CAACK,IAAI,CAACiC;QAC3B;IACF;IAEA;;GAEC,GACD,OAAQd,WAMP,GAND,SAAQA;QACN,IAAMe,YAAY,IAAI,CAACvC,cAAc;QACrC,IAAI,CAACA,cAAc,GAAG,EAAE;QACxB,IAAK,IAAIG,IAAI,GAAGA,IAAIoC,UAAUnC,MAAM,EAAED,IAAK;YACzCoC,SAAS,CAACpC,EAAE;QACd;IACF;IAEA;;GAEC,GACDqC,OAAAA,cAEC,GAFDA,SAAAA,eAAed,SAAiB;QAC9B,OAAOA,YAAY,IAAI,CAAC9B,gBAAgB;IAC1C;IASA;;GAEC,GACD6C,OAAAA,QAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAAC1C,KAAK;IACnB;kBApOWT;;YA2NPoD,KAAAA;iBAAJ,AAHA;;GAEC,GACD;gBACE,OAAO,IAAI,CAACxC,cAAc,CAACE,MAAM,GAAG;YACtC;;;WA7NWd"}
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/NumberCodec.ts"],"sourcesContent":["// Variable-length integer encoding for 7z format\n// Reference: https://py7zr.readthedocs.io/en/latest/archive_format.html\n//\n// 7z uses a space-efficient encoding where the first byte determines length:\n// 0xxxxxxx -> 1 byte (0-127)\n// 10xxxxxx xxxxxxxx -> 2 bytes (0-16383)\n// 110xxxxx + 2 bytes -> 3 bytes\n// 1110xxxx + 3 bytes -> 4 bytes\n// 11110xxx + 4 bytes -> 5 bytes\n// 111110xx + 5 bytes -> 6 bytes\n// 1111110x + 6 bytes -> 7 bytes\n// 11111110 + 7 bytes -> 8 bytes\n// 11111111 + 8 bytes -> 9 bytes (full 64-bit)\n//\n// NOTE: Returns JavaScript number which is accurate up to 2^53 - 1 (~9 PB).\n// This covers all practical file sizes.\n\nimport { readUInt64LE } from 'extract-base-iterator';\n\nexport interface NumberReadResult {\n value: number;\n bytesRead: number;\n}\n\n/**\n * Read a variable-length encoded number from a buffer\n * Uses 7z's variable-length uint64 encoding where the first byte indicates\n * how many additional bytes follow based on its value:\n * - 0x00-0x7F: 0 extra bytes (7 bits of data)\n * - 0x80-0xBF: 1 extra byte (14 bits of data)\n * - 0xC0-0xDF: 2 extra bytes (21 bits of data)\n * - 0xE0-0xEF: 3 extra bytes (28 bits of data)\n * - etc.\n * - 0xFF: 8 extra bytes (full 64-bit value)\n *\n * @param buf - Buffer containing encoded number\n * @param offset - Offset to start reading from\n * @returns Object with value and number of bytes consumed\n */\nexport function readNumber(buf: Buffer, offset: number): NumberReadResult {\n const firstByte = buf[offset];\n\n // Special case: 0xFF means 8 extra bytes (full 64-bit value)\n if (firstByte === 0xff) {\n return {\n value: readUInt64LE(buf, offset + 1),\n bytesRead: 9,\n };\n }\n\n // Determine number of extra bytes based on first byte value thresholds\n // This matches the 7z format specification\n let extraBytes = 0;\n let mask = 0x80;\n\n if (firstByte <= 0x7f) {\n extraBytes = 0;\n mask = 0x80;\n } else if (firstByte <= 0xbf) {\n extraBytes = 1;\n mask = 0x40;\n } else if (firstByte <= 0xdf) {\n extraBytes = 2;\n mask = 0x20;\n } else if (firstByte <= 0xef) {\n extraBytes = 3;\n mask = 0x10;\n } else if (firstByte <= 0xf7) {\n extraBytes = 4;\n mask = 0x08;\n } else if (firstByte <= 0xfb) {\n extraBytes = 5;\n mask = 0x04;\n } else if (firstByte <= 0xfd) {\n extraBytes = 6;\n mask = 0x02;\n } else {\n // 0xFE\n extraBytes = 7;\n mask = 0x01;\n }\n\n // Get high part from first byte (bits below the length indicator)\n const highPart = firstByte & (mask - 1);\n\n // Read extra bytes as LITTLE-ENDIAN\n let value = 0;\n for (let i = 0; i < extraBytes; i++) {\n value += buf[offset + 1 + i] * 256 ** i;\n }\n\n // Combine: value + (highPart << (extraBytes * 8))\n value += highPart * 256 ** extraBytes;\n\n return {\n value: value,\n bytesRead: 1 + extraBytes,\n };\n}\n\n/**\n * Read a raw 64-bit little-endian number (used in some fixed-size fields)\n * @param buf - Buffer containing the number\n * @param offset - Offset to start reading from\n * @returns The number value\n */\nexport function readRawNumber(buf: Buffer, offset: number): number {\n return readUInt64LE(buf, offset);\n}\n\n/**\n * Calculate the encoded size of a number\n * @param value - The number to encode\n * @returns Number of bytes needed to encode the value\n */\nexport function encodedSize(value: number): number {\n if (value < 0x80) return 1; // 7 bits\n if (value < 0x4000) return 2; // 14 bits\n if (value < 0x200000) return 3; // 21 bits\n if (value < 0x10000000) return 4; // 28 bits\n if (value < 0x800000000) return 5; // 35 bits\n if (value < 0x40000000000) return 6; // 42 bits\n if (value < 0x2000000000000) return 7; // 49 bits\n // 2^56 = 72057594037927936 (use calculated value to avoid precision loss)\n if (value < 72057594037927936) return 8; // 56 bits\n return 9; // 64 bits\n}\n\n/**\n * Read a boolean encoded as a single byte\n * @param buf - Buffer to read from\n * @param offset - Offset to read from\n * @returns true if byte is non-zero\n */\nexport function readBoolean(buf: Buffer, offset: number): boolean {\n return buf[offset] !== 0;\n}\n\n/**\n * Read a \"defined\" bitmask for an array of items.\n * Used when some items in a list have optional values.\n *\n * Format: If \"allDefined\" byte is 0, a bitmask follows indicating which items have values.\n * If \"allDefined\" byte is non-zero, all items are defined.\n *\n * @param buf - Buffer to read from\n * @param offset - Offset to start reading\n * @param count - Number of items\n * @returns Object with defined array and bytes consumed\n */\nexport function readDefinedVector(buf: Buffer, offset: number, count: number): { defined: boolean[]; bytesRead: number } {\n const allDefined = buf[offset] !== 0;\n let bytesRead = 1;\n const defined: boolean[] = [];\n\n if (allDefined) {\n // All items are defined\n for (let i = 0; i < count; i++) {\n defined.push(true);\n }\n } else {\n // Read bitmask\n const bitsNeeded = count;\n const bytesNeeded = Math.ceil(bitsNeeded / 8);\n\n for (let byteIdx = 0; byteIdx < bytesNeeded; byteIdx++) {\n const byte = buf[offset + 1 + byteIdx];\n for (let bit = 7; bit >= 0 && defined.length < count; bit--) {\n defined.push((byte & (1 << bit)) !== 0);\n }\n }\n bytesRead += bytesNeeded;\n }\n\n return { defined: defined, bytesRead: bytesRead };\n}\n\n/**\n * Read an array of variable-length numbers\n * @param buf - Buffer to read from\n * @param offset - Offset to start reading\n * @param count - Number of items to read\n * @returns Object with values array and bytes consumed\n */\nexport function readNumberArray(buf: Buffer, offset: number, count: number): { values: number[]; bytesRead: number } {\n const values: number[] = [];\n let totalBytesRead = 0;\n\n for (let i = 0; i < count; i++) {\n const result = readNumber(buf, offset + totalBytesRead);\n values.push(result.value);\n totalBytesRead += result.bytesRead;\n }\n\n return { values: values, bytesRead: totalBytesRead };\n}\n"],"names":["encodedSize","readBoolean","readDefinedVector","readNumber","readNumberArray","readRawNumber","buf","offset","firstByte","value","readUInt64LE","bytesRead","extraBytes","mask","highPart","i","count","allDefined","defined","push","bitsNeeded","bytesNeeded","Math","ceil","byteIdx","byte","bit","length","values","totalBytesRead","result"],"mappings":"AAAA,iDAAiD;AACjD,wEAAwE;AACxE,EAAE;AACF,6EAA6E;AAC7E,iDAAiD;AACjD,mDAAmD;AACnD,yCAAyC;AACzC,yCAAyC;AACzC,yCAAyC;AACzC,yCAAyC;AACzC,yCAAyC;AACzC,yCAAyC;AACzC,uDAAuD;AACvD,EAAE;AACF,4EAA4E;AAC5E,wCAAwC;;;;;;;;;;;;QAoGxBA;eAAAA;;QAmBAC;eAAAA;;QAgBAC;eAAAA;;QA/GAC;eAAAA;;QAiJAC;eAAAA;;QA9EAC;eAAAA;;;mCAzFa;AAsBtB,SAASF,WAAWG,GAAW,EAAEC,MAAc;IACpD,IAAMC,YAAYF,GAAG,CAACC,OAAO;IAE7B,6DAA6D;IAC7D,IAAIC,cAAc,MAAM;QACtB,OAAO;YACLC,OAAOC,IAAAA,iCAAY,EAACJ,KAAKC,SAAS;YAClCI,WAAW;QACb;IACF;IAEA,uEAAuE;IACvE,2CAA2C;IAC3C,IAAIC,aAAa;IACjB,IAAIC,OAAO;IAEX,IAAIL,aAAa,MAAM;QACrBI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO;QACL,OAAO;QACPD,aAAa;QACbC,OAAO;IACT;IAEA,kEAAkE;IAClE,IAAMC,WAAWN,YAAaK,OAAO;IAErC,oCAAoC;IACpC,IAAIJ,QAAQ;IACZ,IAAK,IAAIM,IAAI,GAAGA,IAAIH,YAAYG,IAAK;QACnCN,SAASH,GAAG,CAACC,SAAS,IAAIQ,EAAE,YAAG,KAAOA;IACxC;IAEA,kDAAkD;IAClDN,SAASK,oBAAW,KAAOF;IAE3B,OAAO;QACLH,OAAOA;QACPE,WAAW,IAAIC;IACjB;AACF;AAQO,SAASP,cAAcC,GAAW,EAAEC,MAAc;IACvD,OAAOG,IAAAA,iCAAY,EAACJ,KAAKC;AAC3B;AAOO,SAASP,YAAYS,KAAa;IACvC,IAAIA,QAAQ,MAAM,OAAO,GAAG,SAAS;IACrC,IAAIA,QAAQ,QAAQ,OAAO,GAAG,UAAU;IACxC,IAAIA,QAAQ,UAAU,OAAO,GAAG,UAAU;IAC1C,IAAIA,QAAQ,YAAY,OAAO,GAAG,UAAU;IAC5C,IAAIA,QAAQ,aAAa,OAAO,GAAG,UAAU;IAC7C,IAAIA,QAAQ,eAAe,OAAO,GAAG,UAAU;IAC/C,IAAIA,QAAQ,iBAAiB,OAAO,GAAG,UAAU;IACjD,0EAA0E;IAC1E,IAAIA,QAAQ,mBAAmB,OAAO,GAAG,UAAU;IACnD,OAAO,GAAG,UAAU;AACtB;AAQO,SAASR,YAAYK,GAAW,EAAEC,MAAc;IACrD,OAAOD,GAAG,CAACC,OAAO,KAAK;AACzB;AAcO,SAASL,kBAAkBI,GAAW,EAAEC,MAAc,EAAES,KAAa;IAC1E,IAAMC,aAAaX,GAAG,CAACC,OAAO,KAAK;IACnC,IAAII,YAAY;IAChB,IAAMO,UAAqB,EAAE;IAE7B,IAAID,YAAY;QACd,wBAAwB;QACxB,IAAK,IAAIF,IAAI,GAAGA,IAAIC,OAAOD,IAAK;YAC9BG,QAAQC,IAAI,CAAC;QACf;IACF,OAAO;QACL,eAAe;QACf,IAAMC,aAAaJ;QACnB,IAAMK,cAAcC,KAAKC,IAAI,CAACH,aAAa;QAE3C,IAAK,IAAII,UAAU,GAAGA,UAAUH,aAAaG,UAAW;YACtD,IAAMC,OAAOnB,GAAG,CAACC,SAAS,IAAIiB,QAAQ;YACtC,IAAK,IAAIE,MAAM,GAAGA,OAAO,KAAKR,QAAQS,MAAM,GAAGX,OAAOU,MAAO;gBAC3DR,QAAQC,IAAI,CAAC,AAACM,CAAAA,OAAQ,KAAKC,GAAG,MAAO;YACvC;QACF;QACAf,aAAaU;IACf;IAEA,OAAO;QAAEH,SAASA;QAASP,WAAWA;IAAU;AAClD;AASO,SAASP,gBAAgBE,GAAW,EAAEC,MAAc,EAAES,KAAa;IACxE,IAAMY,SAAmB,EAAE;IAC3B,IAAIC,iBAAiB;IAErB,IAAK,IAAId,IAAI,GAAGA,IAAIC,OAAOD,IAAK;QAC9B,IAAMe,SAAS3B,WAAWG,KAAKC,SAASsB;QACxCD,OAAOT,IAAI,CAACW,OAAOrB,KAAK;QACxBoB,kBAAkBC,OAAOnB,SAAS;IACpC;IAEA,OAAO;QAAEiB,QAAQA;QAAQjB,WAAWkB;IAAe;AACrD"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/iterators/7z-iterator/src/sevenz/NumberCodec.ts"],"sourcesContent":["// Variable-length integer encoding for 7z format\n// Reference: https://py7zr.readthedocs.io/en/latest/archive_format.html\n//\n// 7z uses a space-efficient encoding where the first byte determines length:\n// 0xxxxxxx -> 1 byte (0-127)\n// 10xxxxxx xxxxxxxx -> 2 bytes (0-16383)\n// 110xxxxx + 2 bytes -> 3 bytes\n// 1110xxxx + 3 bytes -> 4 bytes\n// 11110xxx + 4 bytes -> 5 bytes\n// 111110xx + 5 bytes -> 6 bytes\n// 1111110x + 6 bytes -> 7 bytes\n// 11111110 + 7 bytes -> 8 bytes\n// 11111111 + 8 bytes -> 9 bytes (full 64-bit)\n//\n// NOTE: Returns JavaScript number which is accurate up to 2^53 - 1 (~9 PB).\n// This covers all practical file sizes.\n\nimport { readUInt64LE } from 'extract-base-iterator';\n\nexport interface NumberReadResult {\n value: number;\n bytesRead: number;\n}\n\n/**\n * Read a variable-length encoded number from a buffer\n * Uses 7z's variable-length uint64 encoding where the first byte indicates\n * how many additional bytes follow based on its value:\n * - 0x00-0x7F: 0 extra bytes (7 bits of data)\n * - 0x80-0xBF: 1 extra byte (14 bits of data)\n * - 0xC0-0xDF: 2 extra bytes (21 bits of data)\n * - 0xE0-0xEF: 3 extra bytes (28 bits of data)\n * - etc.\n * - 0xFF: 8 extra bytes (full 64-bit value)\n *\n * @param buf - Buffer containing encoded number\n * @param offset - Offset to start reading from\n * @returns Object with value and number of bytes consumed\n */\nexport function readNumber(buf: Buffer, offset: number): NumberReadResult {\n const firstByte = buf[offset];\n\n // Special case: 0xFF means 8 extra bytes (full 64-bit value)\n if (firstByte === 0xff) {\n return {\n value: readUInt64LE(buf, offset + 1),\n bytesRead: 9,\n };\n }\n\n // Determine number of extra bytes based on first byte value thresholds\n // This matches the 7z format specification\n let extraBytes = 0;\n let mask = 0x80;\n\n if (firstByte <= 0x7f) {\n extraBytes = 0;\n mask = 0x80;\n } else if (firstByte <= 0xbf) {\n extraBytes = 1;\n mask = 0x40;\n } else if (firstByte <= 0xdf) {\n extraBytes = 2;\n mask = 0x20;\n } else if (firstByte <= 0xef) {\n extraBytes = 3;\n mask = 0x10;\n } else if (firstByte <= 0xf7) {\n extraBytes = 4;\n mask = 0x08;\n } else if (firstByte <= 0xfb) {\n extraBytes = 5;\n mask = 0x04;\n } else if (firstByte <= 0xfd) {\n extraBytes = 6;\n mask = 0x02;\n } else {\n // 0xFE\n extraBytes = 7;\n mask = 0x01;\n }\n\n // Get high part from first byte (bits below the length indicator)\n const highPart = firstByte & (mask - 1);\n\n // Read extra bytes as LITTLE-ENDIAN\n let value = 0;\n for (let i = 0; i < extraBytes; i++) {\n value += buf[offset + 1 + i] * 256 ** i;\n }\n\n // Combine: value + (highPart << (extraBytes * 8))\n value += highPart * 256 ** extraBytes;\n\n return {\n value: value,\n bytesRead: 1 + extraBytes,\n };\n}\n\n/**\n * Read a raw 64-bit little-endian number (used in some fixed-size fields)\n * @param buf - Buffer containing the number\n * @param offset - Offset to start reading from\n * @returns The number value\n */\nexport function readRawNumber(buf: Buffer, offset: number): number {\n return readUInt64LE(buf, offset);\n}\n\n/**\n * Calculate the encoded size of a number\n * @param value - The number to encode\n * @returns Number of bytes needed to encode the value\n */\nexport function encodedSize(value: number): number {\n if (value < 0x80) return 1; // 7 bits\n if (value < 0x4000) return 2; // 14 bits\n if (value < 0x200000) return 3; // 21 bits\n if (value < 0x10000000) return 4; // 28 bits\n if (value < 0x800000000) return 5; // 35 bits\n if (value < 0x40000000000) return 6; // 42 bits\n if (value < 0x2000000000000) return 7; // 49 bits\n // 2^56 = 72057594037927936 (use calculated value to avoid precision loss)\n if (value < 72057594037927936) return 8; // 56 bits\n return 9; // 64 bits\n}\n\n/**\n * Read a boolean encoded as a single byte\n * @param buf - Buffer to read from\n * @param offset - Offset to read from\n * @returns true if byte is non-zero\n */\nexport function readBoolean(buf: Buffer, offset: number): boolean {\n return buf[offset] !== 0;\n}\n\n/**\n * Read a \"defined\" bitmask for an array of items.\n * Used when some items in a list have optional values.\n *\n * Format: If \"allDefined\" byte is 0, a bitmask follows indicating which items have values.\n * If \"allDefined\" byte is non-zero, all items are defined.\n *\n * @param buf - Buffer to read from\n * @param offset - Offset to start reading\n * @param count - Number of items\n * @returns Object with defined array and bytes consumed\n */\nexport function readDefinedVector(buf: Buffer, offset: number, count: number): { defined: boolean[]; bytesRead: number } {\n const allDefined = buf[offset] !== 0;\n let bytesRead = 1;\n const defined: boolean[] = [];\n\n if (allDefined) {\n // All items are defined\n for (let i = 0; i < count; i++) {\n defined.push(true);\n }\n } else {\n // Read bitmask\n const bitsNeeded = count;\n const bytesNeeded = Math.ceil(bitsNeeded / 8);\n\n for (let byteIdx = 0; byteIdx < bytesNeeded; byteIdx++) {\n const byte = buf[offset + 1 + byteIdx];\n for (let bit = 7; bit >= 0 && defined.length < count; bit--) {\n defined.push((byte & (1 << bit)) !== 0);\n }\n }\n bytesRead += bytesNeeded;\n }\n\n return { defined: defined, bytesRead: bytesRead };\n}\n\n/**\n * Read an array of variable-length numbers\n * @param buf - Buffer to read from\n * @param offset - Offset to start reading\n * @param count - Number of items to read\n * @returns Object with values array and bytes consumed\n */\nexport function readNumberArray(buf: Buffer, offset: number, count: number): { values: number[]; bytesRead: number } {\n const values: number[] = [];\n let totalBytesRead = 0;\n\n for (let i = 0; i < count; i++) {\n const result = readNumber(buf, offset + totalBytesRead);\n values.push(result.value);\n totalBytesRead += result.bytesRead;\n }\n\n return { values: values, bytesRead: totalBytesRead };\n}\n"],"names":["encodedSize","readBoolean","readDefinedVector","readNumber","readNumberArray","readRawNumber","buf","offset","firstByte","value","readUInt64LE","bytesRead","extraBytes","mask","highPart","i","count","allDefined","defined","push","bitsNeeded","bytesNeeded","Math","ceil","byteIdx","byte","bit","length","values","totalBytesRead","result"],"mappings":"AAAA,iDAAiD;AACjD,wEAAwE;AACxE,EAAE;AACF,6EAA6E;AAC7E,iDAAiD;AACjD,mDAAmD;AACnD,yCAAyC;AACzC,yCAAyC;AACzC,yCAAyC;AACzC,yCAAyC;AACzC,yCAAyC;AACzC,yCAAyC;AACzC,uDAAuD;AACvD,EAAE;AACF,4EAA4E;AAC5E,wCAAwC;;;;;;;;;;;;QAoGxBA;eAAAA;;QAmBAC;eAAAA;;QAgBAC;eAAAA;;QA/GAC;eAAAA;;QAiJAC;eAAAA;;QA9EAC;eAAAA;;;mCAzFa;AAsBtB,SAASF,WAAWG,GAAW,EAAEC,MAAc;IACpD,IAAMC,YAAYF,GAAG,CAACC,OAAO;IAE7B,6DAA6D;IAC7D,IAAIC,cAAc,MAAM;QACtB,OAAO;YACLC,OAAOC,IAAAA,iCAAY,EAACJ,KAAKC,SAAS;YAClCI,WAAW;QACb;IACF;IAEA,uEAAuE;IACvE,2CAA2C;IAC3C,IAAIC,aAAa;IACjB,IAAIC,OAAO;IAEX,IAAIL,aAAa,MAAM;QACrBI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO,IAAIL,aAAa,MAAM;QAC5BI,aAAa;QACbC,OAAO;IACT,OAAO;QACL,OAAO;QACPD,aAAa;QACbC,OAAO;IACT;IAEA,kEAAkE;IAClE,IAAMC,WAAWN,YAAaK,OAAO;IAErC,oCAAoC;IACpC,IAAIJ,QAAQ;IACZ,IAAK,IAAIM,IAAI,GAAGA,IAAIH,YAAYG,IAAK;QACnCN,SAASH,GAAG,CAACC,SAAS,IAAIQ,EAAE,YAAG,KAAOA;IACxC;IAEA,kDAAkD;IAClDN,SAASK,oBAAW,KAAOF;IAE3B,OAAO;QACLH,OAAOA;QACPE,WAAW,IAAIC;IACjB;AACF;AAQO,SAASP,cAAcC,GAAW,EAAEC,MAAc;IACvD,OAAOG,IAAAA,iCAAY,EAACJ,KAAKC;AAC3B;AAOO,SAASP,YAAYS,KAAa;IACvC,IAAIA,QAAQ,MAAM,OAAO,GAAG,SAAS;IACrC,IAAIA,QAAQ,QAAQ,OAAO,GAAG,UAAU;IACxC,IAAIA,QAAQ,UAAU,OAAO,GAAG,UAAU;IAC1C,IAAIA,QAAQ,YAAY,OAAO,GAAG,UAAU;IAC5C,IAAIA,QAAQ,aAAa,OAAO,GAAG,UAAU;IAC7C,IAAIA,QAAQ,eAAe,OAAO,GAAG,UAAU;IAC/C,IAAIA,QAAQ,iBAAiB,OAAO,GAAG,UAAU;IACjD,0EAA0E;IAC1E,IAAIA,QAAQ,mBAAmB,OAAO,GAAG,UAAU;IACnD,OAAO,GAAG,UAAU;AACtB;AAQO,SAASR,YAAYK,GAAW,EAAEC,MAAc;IACrD,OAAOD,GAAG,CAACC,OAAO,KAAK;AACzB;AAcO,SAASL,kBAAkBI,GAAW,EAAEC,MAAc,EAAES,KAAa;IAC1E,IAAMC,aAAaX,GAAG,CAACC,OAAO,KAAK;IACnC,IAAII,YAAY;IAChB,IAAMO,UAAqB,EAAE;IAE7B,IAAID,YAAY;QACd,wBAAwB;QACxB,IAAK,IAAIF,IAAI,GAAGA,IAAIC,OAAOD,IAAK;YAC9BG,QAAQC,IAAI,CAAC;QACf;IACF,OAAO;QACL,eAAe;QACf,IAAMC,aAAaJ;QACnB,IAAMK,cAAcC,KAAKC,IAAI,CAACH,aAAa;QAE3C,IAAK,IAAII,UAAU,GAAGA,UAAUH,aAAaG,UAAW;YACtD,IAAMC,OAAOnB,GAAG,CAACC,SAAS,IAAIiB,QAAQ;YACtC,IAAK,IAAIE,MAAM,GAAGA,OAAO,KAAKR,QAAQS,MAAM,GAAGX,OAAOU,MAAO;gBAC3DR,QAAQC,IAAI,CAAC,AAACM,CAAAA,OAAQ,KAAKC,GAAG,MAAO;YACvC;QACF;QACAf,aAAaU;IACf;IAEA,OAAO;QAAEH,SAASA;QAASP,WAAWA;IAAU;AAClD;AASO,SAASP,gBAAgBE,GAAW,EAAEC,MAAc,EAAES,KAAa;IACxE,IAAMY,SAAmB,EAAE;IAC3B,IAAIC,iBAAiB;IAErB,IAAK,IAAId,IAAI,GAAGA,IAAIC,OAAOD,IAAK;QAC9B,IAAMe,SAAS3B,WAAWG,KAAKC,SAASsB;QACxCD,OAAOT,IAAI,CAACW,OAAOrB,KAAK;QACxBoB,kBAAkBC,OAAOnB,SAAS;IACpC;IAEA,OAAO;QAAEiB,QAAQA;QAAQjB,WAAWkB;IAAe;AACrD"}
@@ -38,6 +38,8 @@ export interface SevenZipEntry {
38
38
  _crc?: number;
39
39
  _canStream: boolean;
40
40
  }
41
+ /** Callback for operations that don't return data */
42
+ export type VoidCallback = (error: Error | null) => void;
41
43
  /**
42
44
  * SevenZipParser - parses 7z archives and provides entry iteration
43
45
  */
@@ -52,16 +54,20 @@ export declare class SevenZipParser {
52
54
  private filesPerFolder;
53
55
  private extractedPerFolder;
54
56
  private folderSplitters;
57
+ private pendingFolders;
55
58
  constructor(source: ArchiveSource);
59
+ private decodeWithCodec;
56
60
  /**
57
61
  * Parse the archive structure
58
62
  * Must be called before iterating entries
59
63
  */
60
- parse(): void;
64
+ parse(callback?: VoidCallback): Promise<void> | void;
65
+ private parseInternal;
61
66
  /**
62
67
  * Handle compressed header (kEncodedHeader)
63
68
  */
64
69
  private handleCompressedHeader;
70
+ private parseDecompressedHeader;
65
71
  /**
66
72
  * Parse streams info from encoded header block
67
73
  * This is a simplified parser for the header's own compression info
@@ -104,11 +110,16 @@ export declare class SevenZipParser {
104
110
  * Only caches when multiple files share a block, releases when last file extracted
105
111
  */
106
112
  private getDecompressedFolder;
113
+ private shouldCacheFolder;
114
+ private decodeFolderData;
115
+ private readPackedData;
116
+ private decodeFolderCoders;
107
117
  /**
108
118
  * Decompress a BCJ2 folder with multi-stream handling
109
119
  * BCJ2 uses 4 input streams: main, call, jump, range coder
110
120
  */
111
121
  private decompressBcj2Folder;
122
+ private finishBcj2Decode;
112
123
  /**
113
124
  * Get processing order for coders (dependency order)
114
125
  */
@@ -38,6 +38,8 @@ export interface SevenZipEntry {
38
38
  _crc?: number;
39
39
  _canStream: boolean;
40
40
  }
41
+ /** Callback for operations that don't return data */
42
+ export type VoidCallback = (error: Error | null) => void;
41
43
  /**
42
44
  * SevenZipParser - parses 7z archives and provides entry iteration
43
45
  */
@@ -52,16 +54,20 @@ export declare class SevenZipParser {
52
54
  private filesPerFolder;
53
55
  private extractedPerFolder;
54
56
  private folderSplitters;
57
+ private pendingFolders;
55
58
  constructor(source: ArchiveSource);
59
+ private decodeWithCodec;
56
60
  /**
57
61
  * Parse the archive structure
58
62
  * Must be called before iterating entries
59
63
  */
60
- parse(): void;
64
+ parse(callback?: VoidCallback): Promise<void> | void;
65
+ private parseInternal;
61
66
  /**
62
67
  * Handle compressed header (kEncodedHeader)
63
68
  */
64
69
  private handleCompressedHeader;
70
+ private parseDecompressedHeader;
65
71
  /**
66
72
  * Parse streams info from encoded header block
67
73
  * This is a simplified parser for the header's own compression info
@@ -104,11 +110,16 @@ export declare class SevenZipParser {
104
110
  * Only caches when multiple files share a block, releases when last file extracted
105
111
  */
106
112
  private getDecompressedFolder;
113
+ private shouldCacheFolder;
114
+ private decodeFolderData;
115
+ private readPackedData;
116
+ private decodeFolderCoders;
107
117
  /**
108
118
  * Decompress a BCJ2 folder with multi-stream handling
109
119
  * BCJ2 uses 4 input streams: main, call, jump, range coder
110
120
  */
111
121
  private decompressBcj2Folder;
122
+ private finishBcj2Decode;
112
123
  /**
113
124
  * Get processing order for coders (dependency order)
114
125
  */