7z-iterator 1.1.2 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (184) hide show
  1. package/dist/cjs/FileEntry.d.cts +12 -4
  2. package/dist/cjs/FileEntry.d.ts +12 -4
  3. package/dist/cjs/FileEntry.js +52 -24
  4. package/dist/cjs/FileEntry.js.map +1 -1
  5. package/dist/cjs/SevenZipIterator.d.cts +25 -2
  6. package/dist/cjs/SevenZipIterator.d.ts +25 -2
  7. package/dist/cjs/SevenZipIterator.js +68 -21
  8. package/dist/cjs/SevenZipIterator.js.map +1 -1
  9. package/dist/cjs/index.d.cts +0 -2
  10. package/dist/cjs/index.d.ts +0 -2
  11. package/dist/cjs/index.js +3 -12
  12. package/dist/cjs/index.js.map +1 -1
  13. package/dist/cjs/lib/streamToSource.d.cts +8 -11
  14. package/dist/cjs/lib/streamToSource.d.ts +8 -11
  15. package/dist/cjs/lib/streamToSource.js +21 -67
  16. package/dist/cjs/lib/streamToSource.js.map +1 -1
  17. package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
  18. package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
  19. package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
  20. package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
  21. package/dist/cjs/lzma/index.d.cts +13 -0
  22. package/dist/cjs/lzma/index.d.ts +13 -0
  23. package/dist/cjs/lzma/index.js +63 -0
  24. package/dist/cjs/lzma/index.js.map +1 -0
  25. package/dist/cjs/lzma/stream/transforms.d.cts +38 -0
  26. package/dist/cjs/lzma/stream/transforms.d.ts +38 -0
  27. package/dist/cjs/lzma/stream/transforms.js +149 -0
  28. package/dist/cjs/lzma/stream/transforms.js.map +1 -0
  29. package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +30 -0
  30. package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +30 -0
  31. package/dist/cjs/lzma/sync/Lzma2Decoder.js +135 -0
  32. package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
  33. package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +82 -0
  34. package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +82 -0
  35. package/dist/cjs/lzma/sync/LzmaDecoder.js +440 -0
  36. package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
  37. package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
  38. package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
  39. package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
  40. package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
  41. package/dist/cjs/lzma/types.d.cts +110 -0
  42. package/dist/cjs/lzma/types.d.ts +110 -0
  43. package/dist/cjs/lzma/types.js +264 -0
  44. package/dist/cjs/lzma/types.js.map +1 -0
  45. package/dist/cjs/nextEntry.js +24 -26
  46. package/dist/cjs/nextEntry.js.map +1 -1
  47. package/dist/cjs/sevenz/ArchiveSource.d.cts +16 -0
  48. package/dist/cjs/sevenz/ArchiveSource.d.ts +16 -0
  49. package/dist/cjs/sevenz/ArchiveSource.js +69 -0
  50. package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
  51. package/dist/cjs/sevenz/FolderStreamSplitter.d.cts +101 -0
  52. package/dist/cjs/sevenz/FolderStreamSplitter.d.ts +101 -0
  53. package/dist/cjs/sevenz/FolderStreamSplitter.js +229 -0
  54. package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -0
  55. package/dist/cjs/sevenz/SevenZipParser.d.cts +71 -10
  56. package/dist/cjs/sevenz/SevenZipParser.d.ts +71 -10
  57. package/dist/cjs/sevenz/SevenZipParser.js +574 -203
  58. package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
  59. package/dist/cjs/sevenz/codecs/BZip2.js +2 -1
  60. package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
  61. package/dist/cjs/sevenz/codecs/Bcj.d.cts +5 -4
  62. package/dist/cjs/sevenz/codecs/Bcj.d.ts +5 -4
  63. package/dist/cjs/sevenz/codecs/Bcj.js +102 -8
  64. package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
  65. package/dist/cjs/sevenz/codecs/BcjArm.d.cts +5 -4
  66. package/dist/cjs/sevenz/codecs/BcjArm.d.ts +5 -4
  67. package/dist/cjs/sevenz/codecs/BcjArm.js +51 -9
  68. package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
  69. package/dist/cjs/sevenz/codecs/Copy.d.cts +2 -4
  70. package/dist/cjs/sevenz/codecs/Copy.d.ts +2 -4
  71. package/dist/cjs/sevenz/codecs/Copy.js +2 -15
  72. package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
  73. package/dist/cjs/sevenz/codecs/Deflate.d.cts +6 -4
  74. package/dist/cjs/sevenz/codecs/Deflate.d.ts +6 -4
  75. package/dist/cjs/sevenz/codecs/Deflate.js +4 -9
  76. package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
  77. package/dist/cjs/sevenz/codecs/Delta.d.cts +5 -4
  78. package/dist/cjs/sevenz/codecs/Delta.d.ts +5 -4
  79. package/dist/cjs/sevenz/codecs/Delta.js +29 -10
  80. package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
  81. package/dist/cjs/sevenz/codecs/Lzma.d.cts +5 -2
  82. package/dist/cjs/sevenz/codecs/Lzma.d.ts +5 -2
  83. package/dist/cjs/sevenz/codecs/Lzma.js +13 -28
  84. package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
  85. package/dist/cjs/sevenz/codecs/Lzma2.d.cts +9 -3
  86. package/dist/cjs/sevenz/codecs/Lzma2.d.ts +9 -3
  87. package/dist/cjs/sevenz/codecs/Lzma2.js +17 -198
  88. package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
  89. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -2
  90. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
  91. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +2 -15
  92. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  93. package/dist/cjs/types.d.cts +2 -16
  94. package/dist/cjs/types.d.ts +2 -16
  95. package/dist/cjs/types.js.map +1 -1
  96. package/dist/esm/FileEntry.d.ts +12 -4
  97. package/dist/esm/FileEntry.js +52 -26
  98. package/dist/esm/FileEntry.js.map +1 -1
  99. package/dist/esm/SevenZipIterator.d.ts +25 -2
  100. package/dist/esm/SevenZipIterator.js +69 -22
  101. package/dist/esm/SevenZipIterator.js.map +1 -1
  102. package/dist/esm/index.d.ts +0 -2
  103. package/dist/esm/index.js +0 -1
  104. package/dist/esm/index.js.map +1 -1
  105. package/dist/esm/lib/streamToSource.d.ts +8 -11
  106. package/dist/esm/lib/streamToSource.js +22 -68
  107. package/dist/esm/lib/streamToSource.js.map +1 -1
  108. package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
  109. package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
  110. package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
  111. package/dist/esm/lzma/index.d.ts +13 -0
  112. package/dist/esm/lzma/index.js +15 -0
  113. package/dist/esm/lzma/index.js.map +1 -0
  114. package/dist/esm/lzma/stream/transforms.d.ts +38 -0
  115. package/dist/esm/lzma/stream/transforms.js +150 -0
  116. package/dist/esm/lzma/stream/transforms.js.map +1 -0
  117. package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +30 -0
  118. package/dist/esm/lzma/sync/Lzma2Decoder.js +115 -0
  119. package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
  120. package/dist/esm/lzma/sync/LzmaDecoder.d.ts +82 -0
  121. package/dist/esm/lzma/sync/LzmaDecoder.js +403 -0
  122. package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
  123. package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
  124. package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
  125. package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
  126. package/dist/esm/lzma/types.d.ts +110 -0
  127. package/dist/esm/lzma/types.js +154 -0
  128. package/dist/esm/lzma/types.js.map +1 -0
  129. package/dist/esm/nextEntry.js +24 -26
  130. package/dist/esm/nextEntry.js.map +1 -1
  131. package/dist/esm/sevenz/ArchiveSource.d.ts +16 -0
  132. package/dist/esm/sevenz/ArchiveSource.js +70 -1
  133. package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
  134. package/dist/esm/sevenz/FolderStreamSplitter.d.ts +101 -0
  135. package/dist/esm/sevenz/FolderStreamSplitter.js +207 -0
  136. package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -0
  137. package/dist/esm/sevenz/SevenZipParser.d.ts +71 -10
  138. package/dist/esm/sevenz/SevenZipParser.js +414 -198
  139. package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
  140. package/dist/esm/sevenz/codecs/BZip2.js +2 -1
  141. package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
  142. package/dist/esm/sevenz/codecs/Bcj.d.ts +5 -4
  143. package/dist/esm/sevenz/codecs/Bcj.js +106 -6
  144. package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
  145. package/dist/esm/sevenz/codecs/BcjArm.d.ts +5 -4
  146. package/dist/esm/sevenz/codecs/BcjArm.js +55 -7
  147. package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
  148. package/dist/esm/sevenz/codecs/Copy.d.ts +2 -4
  149. package/dist/esm/sevenz/codecs/Copy.js +1 -9
  150. package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
  151. package/dist/esm/sevenz/codecs/Deflate.d.ts +6 -4
  152. package/dist/esm/sevenz/codecs/Deflate.js +9 -7
  153. package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
  154. package/dist/esm/sevenz/codecs/Delta.d.ts +5 -4
  155. package/dist/esm/sevenz/codecs/Delta.js +33 -8
  156. package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
  157. package/dist/esm/sevenz/codecs/Lzma.d.ts +5 -2
  158. package/dist/esm/sevenz/codecs/Lzma.js +17 -24
  159. package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
  160. package/dist/esm/sevenz/codecs/Lzma2.d.ts +9 -3
  161. package/dist/esm/sevenz/codecs/Lzma2.js +15 -196
  162. package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
  163. package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
  164. package/dist/esm/sevenz/codecs/createBufferingDecoder.js +1 -9
  165. package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  166. package/dist/esm/types.d.ts +2 -16
  167. package/dist/esm/types.js.map +1 -1
  168. package/package.json +3 -3
  169. package/assets/lzma-purejs/LICENSE +0 -11
  170. package/assets/lzma-purejs/index.js +0 -19
  171. package/assets/lzma-purejs/lib/LZ/OutWindow.js +0 -78
  172. package/assets/lzma-purejs/lib/LZ.js +0 -6
  173. package/assets/lzma-purejs/lib/LZMA/Base.js +0 -48
  174. package/assets/lzma-purejs/lib/LZMA/Decoder.js +0 -328
  175. package/assets/lzma-purejs/lib/LZMA.js +0 -6
  176. package/assets/lzma-purejs/lib/RangeCoder/BitTreeDecoder.js +0 -41
  177. package/assets/lzma-purejs/lib/RangeCoder/Decoder.js +0 -58
  178. package/assets/lzma-purejs/lib/RangeCoder/Encoder.js +0 -106
  179. package/assets/lzma-purejs/lib/RangeCoder.js +0 -10
  180. package/assets/lzma-purejs/lib/Stream.js +0 -41
  181. package/assets/lzma-purejs/lib/Util.js +0 -114
  182. package/assets/lzma-purejs/lib/makeBuffer.js +0 -25
  183. package/assets/lzma-purejs/package-lock.json +0 -13
  184. package/assets/lzma-purejs/package.json +0 -8
@@ -19,12 +19,13 @@ function _interop_require_default(obj) {
19
19
  };
20
20
  }
21
21
  function nextEntry(iterator, callback) {
22
- if (!iterator.iterator) {
22
+ var internalIter = iterator._iterator;
23
+ if (!internalIter) {
23
24
  callback(new Error('iterator missing'));
24
25
  return;
25
26
  }
26
27
  var entry = null;
27
- entry = iterator.iterator.next();
28
+ entry = internalIter.next();
28
29
  var nextCallback = (0, _calloncefn.default)(function(err, entry) {
29
30
  // keep processing
30
31
  if (entry) iterator.push(nextEntry);
@@ -68,28 +69,24 @@ function nextEntry(iterator, callback) {
68
69
  {
69
70
  // For symlinks, the file content IS the symlink target path
70
71
  // Read the content to get the linkpath for SymbolicLinkEntry
71
- var parser = iterator.iterator.getParser();
72
- // Use callback-based async decompression
73
- parser.getEntryStreamAsync(entry, function(err, stream) {
74
- if (err) return nextCallback(err);
75
- if (!stream) return nextCallback(new Error('No stream returned'));
76
- var chunks = [];
77
- stream.on('data', function(chunk) {
78
- chunks.push(chunk);
79
- });
80
- stream.on('end', function() {
81
- var linkpath = Buffer.concat(chunks).toString('utf8');
82
- var linkAttributes = {
83
- path: attributes.path,
84
- mtime: attributes.mtime,
85
- mode: attributes.mode,
86
- linkpath: linkpath
87
- };
88
- nextCallback(null, new _extractbaseiterator.SymbolicLinkEntry(linkAttributes));
89
- });
90
- stream.on('error', function(streamErr) {
91
- nextCallback(streamErr);
92
- });
72
+ var parser = internalIter.getParser();
73
+ var stream = parser.getEntryStream(entry);
74
+ var chunks = [];
75
+ stream.on('data', function(chunk) {
76
+ chunks.push(chunk);
77
+ });
78
+ stream.on('end', function() {
79
+ var linkpath = Buffer.concat(chunks).toString('utf8');
80
+ var linkAttributes = {
81
+ path: attributes.path,
82
+ mtime: attributes.mtime,
83
+ mode: attributes.mode,
84
+ linkpath: linkpath
85
+ };
86
+ nextCallback(null, new _extractbaseiterator.SymbolicLinkEntry(linkAttributes));
87
+ });
88
+ stream.on('error', function(streamErr) {
89
+ nextCallback(streamErr);
93
90
  });
94
91
  return;
95
92
  }
@@ -97,8 +94,9 @@ function nextEntry(iterator, callback) {
97
94
  {
98
95
  attributes.type = 'file';
99
96
  attributes.size = entry.size;
100
- var parser2 = iterator.iterator.getParser();
101
- return nextCallback(null, new _FileEntryts.default(attributes, entry, parser2, iterator.lock));
97
+ var parser1 = internalIter.getParser();
98
+ var stream1 = parser1.getEntryStream(entry);
99
+ return nextCallback(null, new _FileEntryts.default(attributes, stream1, iterator.lock, entry._canStream));
102
100
  }
103
101
  }
104
102
  return callback(new Error("Unrecognized entry type: ".concat(type)));
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/nextEntry.ts"],"sourcesContent":["import once from 'call-once-fn';\nimport { type DirectoryAttributes, DirectoryEntry, type FileAttributes, type LinkAttributes, SymbolicLinkEntry } from 'extract-base-iterator';\nimport compact from 'lodash.compact';\nimport path from 'path';\nimport FileEntry from './FileEntry.ts';\nimport type SevenZipIterator from './SevenZipIterator.ts';\nimport type { SevenZipEntry } from './sevenz/SevenZipParser.ts';\nimport type { Entry, EntryCallback } from './types.ts';\n\nexport type NextCallback = (error?: Error, entry?: Entry) => void;\n\n// Entry attributes object that gets mutated in switch - union of possible shapes\n// mtime is number for FileAttributes compatibility (timestamp in ms)\ntype EntryAttributesBuilder = {\n path: string;\n basename: string;\n mtime: number;\n mode: number;\n type?: 'file' | 'directory';\n size?: number;\n};\n\nexport default function nextEntry<_T>(iterator: SevenZipIterator, callback: EntryCallback): void {\n if (!iterator.iterator) {\n callback(new Error('iterator missing'));\n return;\n }\n\n let entry: SevenZipEntry | null = null;\n entry = iterator.iterator.next();\n\n const nextCallback = once((err?: Error, entry?: Entry) => {\n // keep processing\n if (entry) iterator.push(nextEntry);\n err ? callback(err) : callback(null, entry ? { done: false, value: entry } : { done: true, value: null });\n }) as NextCallback;\n\n // done: signal iteration is complete (guard against stale lock)\n if (!iterator.lock || iterator.isDone() || !entry) return callback(null, { done: true, value: null });\n\n // Skip anti-files (these mark files to delete in delta archives)\n if (entry.isAntiFile) {\n iterator.push(nextEntry);\n return callback(null, null);\n }\n\n // Determine type from entry\n const type = entry.type;\n\n // Default modes (decimal values for Node 0.8 compatibility)\n // 0o755 = 493, 0o644 = 420\n const defaultMode = type === 'directory' ? 493 : 420;\n\n // Build attributes from 7z entry\n // mtime must be timestamp (number) for FileAttributes compatibility\n const mtimeDate = entry.mtime || new Date();\n const attributes: EntryAttributesBuilder = {\n path: compact(entry.path.split(path.sep)).join(path.sep),\n basename: entry.name,\n mtime: mtimeDate.getTime(),\n mode: entry.mode !== undefined ? entry.mode : defaultMode,\n };\n\n switch (type) {\n case 'directory':\n attributes.type = 'directory';\n return nextCallback(null, new DirectoryEntry(attributes as DirectoryAttributes));\n\n case 'link': {\n // For symlinks, the file content IS the symlink target path\n // Read the content to get the linkpath for SymbolicLinkEntry\n const parser = iterator.iterator.getParser();\n\n // Use callback-based async decompression\n parser.getEntryStreamAsync(entry, (err, stream) => {\n if (err) return nextCallback(err);\n if (!stream) return nextCallback(new Error('No stream returned'));\n\n const chunks: Buffer[] = [];\n\n stream.on('data', (chunk: Buffer) => {\n chunks.push(chunk);\n });\n stream.on('end', () => {\n const linkpath = Buffer.concat(chunks).toString('utf8');\n\n const linkAttributes: LinkAttributes = {\n path: attributes.path,\n mtime: attributes.mtime,\n mode: attributes.mode,\n linkpath: linkpath,\n };\n\n nextCallback(null, new SymbolicLinkEntry(linkAttributes));\n });\n stream.on('error', (streamErr: Error) => {\n nextCallback(streamErr);\n });\n });\n return;\n }\n\n case 'file': {\n attributes.type = 'file';\n attributes.size = entry.size;\n const parser2 = iterator.iterator.getParser();\n return nextCallback(null, new FileEntry(attributes as FileAttributes, entry, parser2, iterator.lock));\n }\n }\n\n return callback(new Error(`Unrecognized entry type: ${type}`));\n}\n"],"names":["nextEntry","iterator","callback","Error","entry","next","nextCallback","once","err","push","done","value","lock","isDone","isAntiFile","type","defaultMode","mtimeDate","mtime","Date","attributes","path","compact","split","sep","join","basename","name","getTime","mode","undefined","DirectoryEntry","parser","getParser","getEntryStreamAsync","stream","chunks","on","chunk","linkpath","Buffer","concat","toString","linkAttributes","SymbolicLinkEntry","streamErr","size","parser2","FileEntry"],"mappings":";;;;+BAsBA;;;eAAwBA;;;iEAtBP;mCACqG;oEAClG;2DACH;kEACK;;;;;;AAkBP,SAASA,UAAcC,QAA0B,EAAEC,QAAuB;IACvF,IAAI,CAACD,SAASA,QAAQ,EAAE;QACtBC,SAAS,IAAIC,MAAM;QACnB;IACF;IAEA,IAAIC,QAA8B;IAClCA,QAAQH,SAASA,QAAQ,CAACI,IAAI;IAE9B,IAAMC,eAAeC,IAAAA,mBAAI,EAAC,SAACC,KAAaJ;QACtC,kBAAkB;QAClB,IAAIA,OAAOH,SAASQ,IAAI,CAACT;QACzBQ,MAAMN,SAASM,OAAON,SAAS,MAAME,QAAQ;YAAEM,MAAM;YAAOC,OAAOP;QAAM,IAAI;YAAEM,MAAM;YAAMC,OAAO;QAAK;IACzG;IAEA,gEAAgE;IAChE,IAAI,CAACV,SAASW,IAAI,IAAIX,SAASY,MAAM,MAAM,CAACT,OAAO,OAAOF,SAAS,MAAM;QAAEQ,MAAM;QAAMC,OAAO;IAAK;IAEnG,iEAAiE;IACjE,IAAIP,MAAMU,UAAU,EAAE;QACpBb,SAASQ,IAAI,CAACT;QACd,OAAOE,SAAS,MAAM;IACxB;IAEA,4BAA4B;IAC5B,IAAMa,OAAOX,MAAMW,IAAI;IAEvB,4DAA4D;IAC5D,2BAA2B;IAC3B,IAAMC,cAAcD,SAAS,cAAc,MAAM;IAEjD,iCAAiC;IACjC,oEAAoE;IACpE,IAAME,YAAYb,MAAMc,KAAK,IAAI,IAAIC;IACrC,IAAMC,aAAqC;QACzCC,MAAMC,IAAAA,sBAAO,EAAClB,MAAMiB,IAAI,CAACE,KAAK,CAACF,aAAI,CAACG,GAAG,GAAGC,IAAI,CAACJ,aAAI,CAACG,GAAG;QACvDE,UAAUtB,MAAMuB,IAAI;QACpBT,OAAOD,UAAUW,OAAO;QACxBC,MAAMzB,MAAMyB,IAAI,KAAKC,YAAY1B,MAAMyB,IAAI,GAAGb;IAChD;IAEA,OAAQD;QACN,KAAK;YACHK,WAAWL,IAAI,GAAG;YAClB,OAAOT,aAAa,MAAM,IAAIyB,mCAAc,CAACX;QAE/C,KAAK;YAAQ;gBACX,4DAA4D;gBAC5D,6DAA6D;gBAC7D,IAAMY,SAAS/B,SAASA,QAAQ,CAACgC,SAAS;gBAE1C,yCAAyC;gBACzCD,OAAOE,mBAAmB,CAAC9B,OAAO,SAACI,KAAK2B;oBACtC,IAAI3B,KAAK,OAAOF,aAAaE;oBAC7B,IAAI,CAAC2B,QAAQ,OAAO7B,aAAa,IAAIH,MAAM;oBAE3C,IAAMiC,SAAmB,EAAE;oBAE3BD,OAAOE,EAAE,CAAC,QAAQ,SAACC;wBACjBF,OAAO3B,IAAI,CAAC6B;oBACd;oBACAH,OAAOE,EAAE,CAAC,OAAO;wBACf,IAAME,WAAWC,OAAOC,MAAM,CAACL,QAAQM,QAAQ,CAAC;wBAEhD,IAAMC,iBAAiC;4BACrCtB,MAAMD,WAAWC,IAAI;4BACrBH,OAAOE,WAAWF,KAAK;4BACvBW,MAAMT,WAAWS,IAAI;4BACrBU,UAAUA;wBACZ;wBAEAjC,aAAa,MAAM,IAAIsC,sCAAiB,CAACD;oBAC3C;oBACAR,OAAOE,EAAE,CAAC,SAAS,SAACQ;wBAClBvC,aAAauC;oBACf;gBACF;gBACA;YACF;QAEA,KAAK;YAAQ;gBACXzB,WAAWL,IAAI,GAAG;gBAClBK,WAAW0B,IAAI,GAAG1C,MAAM0C,IAAI;gBAC5B,IAAMC,UAAU9C,SAASA,QAAQ,CAACgC,SAAS;gBAC3C,OAAO3B,aAAa,MAAM,IAAI0C,oBAAS,CAAC5B,YAA8BhB,OAAO2C,SAAS9C,SAASW,IAAI;YACrG;IACF;IAEA,OAAOV,SAAS,IAAIC,MAAM,AAAC,4BAAgC,OAALY;AACxD"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/nextEntry.ts"],"sourcesContent":["import once from 'call-once-fn';\nimport { type DirectoryAttributes, DirectoryEntry, type FileAttributes, type LinkAttributes, SymbolicLinkEntry } from 'extract-base-iterator';\nimport compact from 'lodash.compact';\nimport path from 'path';\nimport FileEntry from './FileEntry.ts';\nimport type SevenZipIterator from './SevenZipIterator.ts';\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\nimport type { Entry, EntryCallback } from './types.ts';\n\nexport type NextCallback = (error?: Error, entry?: Entry) => void;\n\n/** @internal */\ninterface InternalIterator {\n next(): SevenZipEntry | null;\n getParser(): SevenZipParser;\n}\n\n// Entry attributes object that gets mutated in switch - union of possible shapes\n// mtime is number for FileAttributes compatibility (timestamp in ms)\ntype EntryAttributesBuilder = {\n path: string;\n basename: string;\n mtime: number;\n mode: number;\n type?: 'file' | 'directory';\n size?: number;\n};\n\nexport default function nextEntry<_T>(iterator: SevenZipIterator, callback: EntryCallback): void {\n const internalIter = iterator._iterator as InternalIterator | null;\n if (!internalIter) {\n callback(new Error('iterator missing'));\n return;\n }\n\n let entry: SevenZipEntry | null = null;\n entry = internalIter.next();\n\n const nextCallback = once((err?: Error, entry?: Entry) => {\n // keep processing\n if (entry) iterator.push(nextEntry);\n err ? callback(err) : callback(null, entry ? { done: false, value: entry } : { done: true, value: null });\n }) as NextCallback;\n\n // done: signal iteration is complete (guard against stale lock)\n if (!iterator.lock || iterator.isDone() || !entry) return callback(null, { done: true, value: null });\n\n // Skip anti-files (these mark files to delete in delta archives)\n if (entry.isAntiFile) {\n iterator.push(nextEntry);\n return callback(null, null);\n }\n\n // Determine type from entry\n const type = entry.type;\n\n // Default modes (decimal values for Node 0.8 compatibility)\n // 0o755 = 493, 0o644 = 420\n const defaultMode = type === 'directory' ? 493 : 420;\n\n // Build attributes from 7z entry\n // mtime must be timestamp (number) for FileAttributes compatibility\n const mtimeDate = entry.mtime || new Date();\n const attributes: EntryAttributesBuilder = {\n path: compact(entry.path.split(path.sep)).join(path.sep),\n basename: entry.name,\n mtime: mtimeDate.getTime(),\n mode: entry.mode !== undefined ? entry.mode : defaultMode,\n };\n\n switch (type) {\n case 'directory':\n attributes.type = 'directory';\n return nextCallback(null, new DirectoryEntry(attributes as DirectoryAttributes));\n\n case 'link': {\n // For symlinks, the file content IS the symlink target path\n // Read the content to get the linkpath for SymbolicLinkEntry\n const parser = internalIter.getParser();\n const stream = parser.getEntryStream(entry);\n\n const chunks: Buffer[] = [];\n\n stream.on('data', (chunk: Buffer) => {\n chunks.push(chunk);\n });\n stream.on('end', () => {\n const linkpath = Buffer.concat(chunks).toString('utf8');\n\n const linkAttributes: LinkAttributes = {\n path: attributes.path,\n mtime: attributes.mtime,\n mode: attributes.mode,\n linkpath: linkpath,\n };\n\n nextCallback(null, new SymbolicLinkEntry(linkAttributes));\n });\n stream.on('error', (streamErr: Error) => {\n nextCallback(streamErr);\n });\n return;\n }\n\n case 'file': {\n attributes.type = 'file';\n attributes.size = entry.size;\n const parser = internalIter.getParser();\n\n const stream = parser.getEntryStream(entry);\n return nextCallback(null, new FileEntry(attributes as FileAttributes, stream, iterator.lock, entry._canStream));\n }\n }\n\n return callback(new Error(`Unrecognized entry type: ${type}`));\n}\n"],"names":["nextEntry","iterator","callback","internalIter","_iterator","Error","entry","next","nextCallback","once","err","push","done","value","lock","isDone","isAntiFile","type","defaultMode","mtimeDate","mtime","Date","attributes","path","compact","split","sep","join","basename","name","getTime","mode","undefined","DirectoryEntry","parser","getParser","stream","getEntryStream","chunks","on","chunk","linkpath","Buffer","concat","toString","linkAttributes","SymbolicLinkEntry","streamErr","size","FileEntry","_canStream"],"mappings":";;;;+BA4BA;;;eAAwBA;;;iEA5BP;mCACqG;oEAClG;2DACH;kEACK;;;;;;AAwBP,SAASA,UAAcC,QAA0B,EAAEC,QAAuB;IACvF,IAAMC,eAAeF,SAASG,SAAS;IACvC,IAAI,CAACD,cAAc;QACjBD,SAAS,IAAIG,MAAM;QACnB;IACF;IAEA,IAAIC,QAA8B;IAClCA,QAAQH,aAAaI,IAAI;IAEzB,IAAMC,eAAeC,IAAAA,mBAAI,EAAC,SAACC,KAAaJ;QACtC,kBAAkB;QAClB,IAAIA,OAAOL,SAASU,IAAI,CAACX;QACzBU,MAAMR,SAASQ,OAAOR,SAAS,MAAMI,QAAQ;YAAEM,MAAM;YAAOC,OAAOP;QAAM,IAAI;YAAEM,MAAM;YAAMC,OAAO;QAAK;IACzG;IAEA,gEAAgE;IAChE,IAAI,CAACZ,SAASa,IAAI,IAAIb,SAASc,MAAM,MAAM,CAACT,OAAO,OAAOJ,SAAS,MAAM;QAAEU,MAAM;QAAMC,OAAO;IAAK;IAEnG,iEAAiE;IACjE,IAAIP,MAAMU,UAAU,EAAE;QACpBf,SAASU,IAAI,CAACX;QACd,OAAOE,SAAS,MAAM;IACxB;IAEA,4BAA4B;IAC5B,IAAMe,OAAOX,MAAMW,IAAI;IAEvB,4DAA4D;IAC5D,2BAA2B;IAC3B,IAAMC,cAAcD,SAAS,cAAc,MAAM;IAEjD,iCAAiC;IACjC,oEAAoE;IACpE,IAAME,YAAYb,MAAMc,KAAK,IAAI,IAAIC;IACrC,IAAMC,aAAqC;QACzCC,MAAMC,IAAAA,sBAAO,EAAClB,MAAMiB,IAAI,CAACE,KAAK,CAACF,aAAI,CAACG,GAAG,GAAGC,IAAI,CAACJ,aAAI,CAACG,GAAG;QACvDE,UAAUtB,MAAMuB,IAAI;QACpBT,OAAOD,UAAUW,OAAO;QACxBC,MAAMzB,MAAMyB,IAAI,KAAKC,YAAY1B,MAAMyB,IAAI,GAAGb;IAChD;IAEA,OAAQD;QACN,KAAK;YACHK,WAAWL,IAAI,GAAG;YAClB,OAAOT,aAAa,MAAM,IAAIyB,mCAAc,CAACX;QAE/C,KAAK;YAAQ;gBACX,4DAA4D;gBAC5D,6DAA6D;gBAC7D,IAAMY,SAAS/B,aAAagC,SAAS;gBACrC,IAAMC,SAASF,OAAOG,cAAc,CAAC/B;gBAErC,IAAMgC,SAAmB,EAAE;gBAE3BF,OAAOG,EAAE,CAAC,QAAQ,SAACC;oBACjBF,OAAO3B,IAAI,CAAC6B;gBACd;gBACAJ,OAAOG,EAAE,CAAC,OAAO;oBACf,IAAME,WAAWC,OAAOC,MAAM,CAACL,QAAQM,QAAQ,CAAC;oBAEhD,IAAMC,iBAAiC;wBACrCtB,MAAMD,WAAWC,IAAI;wBACrBH,OAAOE,WAAWF,KAAK;wBACvBW,MAAMT,WAAWS,IAAI;wBACrBU,UAAUA;oBACZ;oBAEAjC,aAAa,MAAM,IAAIsC,sCAAiB,CAACD;gBAC3C;gBACAT,OAAOG,EAAE,CAAC,SAAS,SAACQ;oBAClBvC,aAAauC;gBACf;gBACA;YACF;QAEA,KAAK;YAAQ;gBACXzB,WAAWL,IAAI,GAAG;gBAClBK,WAAW0B,IAAI,GAAG1C,MAAM0C,IAAI;gBAC5B,IAAMd,UAAS/B,aAAagC,SAAS;gBAErC,IAAMC,UAASF,QAAOG,cAAc,CAAC/B;gBACrC,OAAOE,aAAa,MAAM,IAAIyC,oBAAS,CAAC3B,YAA8Bc,SAAQnC,SAASa,IAAI,EAAER,MAAM4C,UAAU;YAC/G;IACF;IAEA,OAAOhD,SAAS,IAAIG,MAAM,AAAC,4BAAgC,OAALY;AACxD"}
@@ -4,6 +4,7 @@
4
4
  * Provides a common interface for reading archive data from either
5
5
  * a file descriptor or an in-memory buffer.
6
6
  */
7
+ import type Stream from 'stream';
7
8
  /**
8
9
  * Archive source abstraction - allows reading from file descriptor or buffer
9
10
  */
@@ -11,6 +12,11 @@ export interface ArchiveSource {
11
12
  read(position: number, length: number): Buffer;
12
13
  getSize(): number;
13
14
  close(): void;
15
+ /**
16
+ * Create a readable stream for a portion of the archive.
17
+ * Used for streaming decompression.
18
+ */
19
+ createReadStream(offset: number, length: number): Stream.Readable;
14
20
  }
15
21
  /**
16
22
  * Buffer-based archive source
@@ -23,6 +29,11 @@ export declare class BufferSource implements ArchiveSource {
23
29
  read(position: number, length: number): Buffer;
24
30
  getSize(): number;
25
31
  close(): void;
32
+ /**
33
+ * Create a readable stream for a portion of the buffer.
34
+ * Streams the data in chunks to avoid blocking.
35
+ */
36
+ createReadStream(offset: number, length: number): Stream.Readable;
26
37
  }
27
38
  /**
28
39
  * File descriptor based archive source
@@ -37,4 +48,9 @@ export declare class FileSource implements ArchiveSource {
37
48
  read(position: number, length: number): Buffer;
38
49
  getSize(): number;
39
50
  close(): void;
51
+ /**
52
+ * Create a readable stream for a portion of the file.
53
+ * Uses async fs.read() to avoid blocking the event loop.
54
+ */
55
+ createReadStream(offset: number, length: number): Stream.Readable;
40
56
  }
@@ -4,6 +4,7 @@
4
4
  * Provides a common interface for reading archive data from either
5
5
  * a file descriptor or an in-memory buffer.
6
6
  */
7
+ import type Stream from 'stream';
7
8
  /**
8
9
  * Archive source abstraction - allows reading from file descriptor or buffer
9
10
  */
@@ -11,6 +12,11 @@ export interface ArchiveSource {
11
12
  read(position: number, length: number): Buffer;
12
13
  getSize(): number;
13
14
  close(): void;
15
+ /**
16
+ * Create a readable stream for a portion of the archive.
17
+ * Used for streaming decompression.
18
+ */
19
+ createReadStream(offset: number, length: number): Stream.Readable;
14
20
  }
15
21
  /**
16
22
  * Buffer-based archive source
@@ -23,6 +29,11 @@ export declare class BufferSource implements ArchiveSource {
23
29
  read(position: number, length: number): Buffer;
24
30
  getSize(): number;
25
31
  close(): void;
32
+ /**
33
+ * Create a readable stream for a portion of the buffer.
34
+ * Streams the data in chunks to avoid blocking.
35
+ */
36
+ createReadStream(offset: number, length: number): Stream.Readable;
26
37
  }
27
38
  /**
28
39
  * File descriptor based archive source
@@ -37,4 +48,9 @@ export declare class FileSource implements ArchiveSource {
37
48
  read(position: number, length: number): Buffer;
38
49
  getSize(): number;
39
50
  close(): void;
51
+ /**
52
+ * Create a readable stream for a portion of the file.
53
+ * Uses async fs.read() to avoid blocking the event loop.
54
+ */
55
+ createReadStream(offset: number, length: number): Stream.Readable;
40
56
  }
@@ -33,6 +33,14 @@ function _interop_require_default(obj) {
33
33
  default: obj
34
34
  };
35
35
  }
36
+ // Helper to create a Readable stream compatible with Node 0.8
37
+ function createReadableStream(readFn) {
38
+ var stream = new _extractbaseiterator.Readable();
39
+ stream._read = function() {
40
+ readFn(this);
41
+ };
42
+ return stream;
43
+ }
36
44
  var BufferSource = /*#__PURE__*/ function() {
37
45
  "use strict";
38
46
  function BufferSource(buffer) {
@@ -49,6 +57,25 @@ var BufferSource = /*#__PURE__*/ function() {
49
57
  _proto.close = function close() {
50
58
  // Nothing to close for buffer
51
59
  };
60
+ /**
61
+ * Create a readable stream for a portion of the buffer.
62
+ * Streams the data in chunks to avoid blocking.
63
+ */ _proto.createReadStream = function createReadStream(offset, length) {
64
+ var buffer = this.buffer;
65
+ var end = Math.min(offset + length, buffer.length);
66
+ var currentPos = offset;
67
+ var chunkSize = 65536; // 64KB chunks
68
+ return createReadableStream(function(stream) {
69
+ if (currentPos >= end) {
70
+ stream.push(null);
71
+ return;
72
+ }
73
+ var toRead = Math.min(chunkSize, end - currentPos);
74
+ var chunk = buffer.slice(currentPos, currentPos + toRead);
75
+ currentPos += toRead;
76
+ stream.push(chunk);
77
+ });
78
+ };
52
79
  return BufferSource;
53
80
  }();
54
81
  var FileSource = /*#__PURE__*/ function() {
@@ -77,6 +104,48 @@ var FileSource = /*#__PURE__*/ function() {
77
104
  // Ignore close errors
78
105
  }
79
106
  };
107
+ /**
108
+ * Create a readable stream for a portion of the file.
109
+ * Uses async fs.read() to avoid blocking the event loop.
110
+ */ _proto.createReadStream = function createReadStream(offset, length) {
111
+ var fd = this.fd;
112
+ var bytesRead = 0;
113
+ var reading = false;
114
+ var finished = false;
115
+ var chunkSize = 65536; // 64KB chunks
116
+ var _streamRef = null;
117
+ var stream = createReadableStream(function(s) {
118
+ _streamRef = s;
119
+ if (reading || finished) return; // Prevent re-entrant reads
120
+ var toRead = Math.min(chunkSize, length - bytesRead);
121
+ if (toRead <= 0) {
122
+ finished = true;
123
+ s.push(null);
124
+ return;
125
+ }
126
+ reading = true;
127
+ var buffer = (0, _extractbaseiterator.allocBuffer)(toRead);
128
+ var currentOffset = offset + bytesRead;
129
+ _fs.default.read(fd, buffer, 0, toRead, currentOffset, function(err, n) {
130
+ reading = false;
131
+ if (err) {
132
+ // Emit error for Node 0.8 compatibility (no destroy method)
133
+ s.emit('error', err);
134
+ finished = true;
135
+ s.push(null);
136
+ return;
137
+ }
138
+ if (n === 0) {
139
+ finished = true;
140
+ s.push(null);
141
+ } else {
142
+ bytesRead += n;
143
+ s.push(buffer.slice(0, n));
144
+ }
145
+ });
146
+ });
147
+ return stream;
148
+ };
80
149
  return FileSource;
81
150
  }();
82
151
  /* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/ArchiveSource.ts"],"sourcesContent":["/**\n * ArchiveSource - Abstraction for reading 7z archive data\n *\n * Provides a common interface for reading archive data from either\n * a file descriptor or an in-memory buffer.\n */\n\nimport { allocBuffer } from 'extract-base-iterator';\nimport fs from 'fs';\n\n/**\n * Archive source abstraction - allows reading from file descriptor or buffer\n */\nexport interface ArchiveSource {\n read(position: number, length: number): Buffer;\n getSize(): number;\n close(): void;\n}\n\n/**\n * Buffer-based archive source\n *\n * Used when the entire archive is already in memory.\n */\nexport class BufferSource implements ArchiveSource {\n private buffer: Buffer;\n\n constructor(buffer: Buffer) {\n this.buffer = buffer;\n }\n\n read(position: number, length: number): Buffer {\n return this.buffer.slice(position, position + length);\n }\n\n getSize(): number {\n return this.buffer.length;\n }\n\n close(): void {\n // Nothing to close for buffer\n }\n}\n\n/**\n * File descriptor based archive source\n *\n * Used for reading directly from a file on disk.\n * More memory efficient for large archives.\n */\nexport class FileSource implements ArchiveSource {\n private fd: number;\n private size: number;\n\n constructor(fd: number, size: number) {\n this.fd = fd;\n this.size = size;\n }\n\n read(position: number, length: number): Buffer {\n const buf = allocBuffer(length);\n const bytesRead = fs.readSync(this.fd, buf, 0, length, position);\n if (bytesRead < length) {\n return buf.slice(0, bytesRead);\n }\n return buf;\n }\n\n getSize(): number {\n return this.size;\n }\n\n close(): void {\n try {\n fs.closeSync(this.fd);\n } catch (_e) {\n // Ignore close errors\n }\n }\n}\n"],"names":["BufferSource","FileSource","buffer","read","position","length","slice","getSize","close","fd","size","buf","allocBuffer","bytesRead","fs","readSync","closeSync","_e"],"mappings":"AAAA;;;;;CAKC;;;;;;;;;;;QAmBYA;eAAAA;;QA0BAC;eAAAA;;;mCA3Ce;yDACb;;;;;;;;;;;AAgBR,IAAA,AAAMD,6BAAN;;aAAMA,aAGCE,MAAc;gCAHfF;QAIT,IAAI,CAACE,MAAM,GAAGA;;iBAJLF;IAOXG,OAAAA,IAEC,GAFDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,OAAO,IAAI,CAACH,MAAM,CAACI,KAAK,CAACF,UAAUA,WAAWC;IAChD;IAEAE,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACL,MAAM,CAACG,MAAM;IAC3B;IAEAG,OAAAA,KAEC,GAFDA,SAAAA;IACE,8BAA8B;IAChC;WAjBWR;;AA0BN,IAAA,AAAMC,2BAAN;;aAAMA,WAICQ,EAAU,EAAEC,IAAY;gCAJzBT;QAKT,IAAI,CAACQ,EAAE,GAAGA;QACV,IAAI,CAACC,IAAI,GAAGA;;iBANHT;IASXE,OAAAA,IAOC,GAPDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,IAAMM,MAAMC,IAAAA,gCAAW,EAACP;QACxB,IAAMQ,YAAYC,WAAE,CAACC,QAAQ,CAAC,IAAI,CAACN,EAAE,EAAEE,KAAK,GAAGN,QAAQD;QACvD,IAAIS,YAAYR,QAAQ;YACtB,OAAOM,IAAIL,KAAK,CAAC,GAAGO;QACtB;QACA,OAAOF;IACT;IAEAJ,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACG,IAAI;IAClB;IAEAF,OAAAA,KAMC,GANDA,SAAAA;QACE,IAAI;YACFM,WAAE,CAACE,SAAS,CAAC,IAAI,CAACP,EAAE;QACtB,EAAE,OAAOQ,IAAI;QACX,sBAAsB;QACxB;IACF;WA5BWhB"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/ArchiveSource.ts"],"sourcesContent":["/**\n * ArchiveSource - Abstraction for reading 7z archive data\n *\n * Provides a common interface for reading archive data from either\n * a file descriptor or an in-memory buffer.\n */\n\nimport { allocBuffer, Readable } from 'extract-base-iterator';\nimport fs from 'fs';\nimport type Stream from 'stream';\n\n// Helper to create a Readable stream compatible with Node 0.8\nfunction createReadableStream(readFn: (stream: Stream.Readable) => void): Stream.Readable {\n const stream = new Readable();\n stream._read = function () {\n readFn(this);\n };\n return stream;\n}\n\n/**\n * Archive source abstraction - allows reading from file descriptor or buffer\n */\nexport interface ArchiveSource {\n read(position: number, length: number): Buffer;\n getSize(): number;\n close(): void;\n /**\n * Create a readable stream for a portion of the archive.\n * Used for streaming decompression.\n */\n createReadStream(offset: number, length: number): Stream.Readable;\n}\n\n/**\n * Buffer-based archive source\n *\n * Used when the entire archive is already in memory.\n */\nexport class BufferSource implements ArchiveSource {\n private buffer: Buffer;\n\n constructor(buffer: Buffer) {\n this.buffer = buffer;\n }\n\n read(position: number, length: number): Buffer {\n return this.buffer.slice(position, position + length);\n }\n\n getSize(): number {\n return this.buffer.length;\n }\n\n close(): void {\n // Nothing to close for buffer\n }\n\n /**\n * Create a readable stream for a portion of the buffer.\n * Streams the data in chunks to avoid blocking.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const buffer = this.buffer;\n const end = Math.min(offset + length, buffer.length);\n let currentPos = offset;\n const chunkSize = 65536; // 64KB chunks\n\n return createReadableStream((stream) => {\n if (currentPos >= end) {\n stream.push(null);\n return;\n }\n\n const toRead = Math.min(chunkSize, end - currentPos);\n const chunk = buffer.slice(currentPos, currentPos + toRead);\n currentPos += toRead;\n stream.push(chunk);\n });\n }\n}\n\n/**\n * File descriptor based archive source\n *\n * Used for reading directly from a file on disk.\n * More memory efficient for large archives.\n */\nexport class FileSource implements ArchiveSource {\n private fd: number;\n private size: number;\n\n constructor(fd: number, size: number) {\n this.fd = fd;\n this.size = size;\n }\n\n read(position: number, length: number): Buffer {\n const buf = allocBuffer(length);\n const bytesRead = fs.readSync(this.fd, buf, 0, length, position);\n if (bytesRead < length) {\n return buf.slice(0, bytesRead);\n }\n return buf;\n }\n\n getSize(): number {\n return this.size;\n }\n\n close(): void {\n try {\n fs.closeSync(this.fd);\n } catch (_e) {\n // Ignore close errors\n }\n }\n\n /**\n * Create a readable stream for a portion of the file.\n * Uses async fs.read() to avoid blocking the event loop.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const fd = this.fd;\n let bytesRead = 0;\n let reading = false;\n let finished = false;\n const chunkSize = 65536; // 64KB chunks\n let _streamRef: Stream.Readable | null = null;\n\n const stream = createReadableStream((s) => {\n _streamRef = s;\n if (reading || finished) return; // Prevent re-entrant reads\n\n const toRead = Math.min(chunkSize, length - bytesRead);\n if (toRead <= 0) {\n finished = true;\n s.push(null);\n return;\n }\n\n reading = true;\n const buffer = allocBuffer(toRead);\n const currentOffset = offset + bytesRead;\n\n fs.read(fd, buffer, 0, toRead, currentOffset, (err, n) => {\n reading = false;\n\n if (err) {\n // Emit error for Node 0.8 compatibility (no destroy method)\n s.emit('error', err);\n finished = true;\n s.push(null);\n return;\n }\n\n if (n === 0) {\n finished = true;\n s.push(null);\n } else {\n bytesRead += n;\n s.push(buffer.slice(0, n));\n }\n });\n });\n\n return stream;\n }\n}\n"],"names":["BufferSource","FileSource","createReadableStream","readFn","stream","Readable","_read","buffer","read","position","length","slice","getSize","close","createReadStream","offset","end","Math","min","currentPos","chunkSize","push","toRead","chunk","fd","size","buf","allocBuffer","bytesRead","fs","readSync","closeSync","_e","reading","finished","_streamRef","s","currentOffset","err","n","emit"],"mappings":"AAAA;;;;;CAKC;;;;;;;;;;;QAkCYA;eAAAA;;QAiDAC;eAAAA;;;mCAjFyB;yDACvB;;;;;;;;;;;AAGf,8DAA8D;AAC9D,SAASC,qBAAqBC,MAAyC;IACrE,IAAMC,SAAS,IAAIC,6BAAQ;IAC3BD,OAAOE,KAAK,GAAG;QACbH,OAAO,IAAI;IACb;IACA,OAAOC;AACT;AAqBO,IAAA,AAAMJ,6BAAN;;aAAMA,aAGCO,MAAc;gCAHfP;QAIT,IAAI,CAACO,MAAM,GAAGA;;iBAJLP;IAOXQ,OAAAA,IAEC,GAFDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,OAAO,IAAI,CAACH,MAAM,CAACI,KAAK,CAACF,UAAUA,WAAWC;IAChD;IAEAE,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACL,MAAM,CAACG,MAAM;IAC3B;IAEAG,OAAAA,KAEC,GAFDA,SAAAA;IACE,8BAA8B;IAChC;IAEA;;;GAGC,GACDC,OAAAA,gBAiBC,GAjBDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMH,SAAS,IAAI,CAACA,MAAM;QAC1B,IAAMS,MAAMC,KAAKC,GAAG,CAACH,SAASL,QAAQH,OAAOG,MAAM;QACnD,IAAIS,aAAaJ;QACjB,IAAMK,YAAY,OAAO,cAAc;QAEvC,OAAOlB,qBAAqB,SAACE;YAC3B,IAAIe,cAAcH,KAAK;gBACrBZ,OAAOiB,IAAI,CAAC;gBACZ;YACF;YAEA,IAAMC,SAASL,KAAKC,GAAG,CAACE,WAAWJ,MAAMG;YACzC,IAAMI,QAAQhB,OAAOI,KAAK,CAACQ,YAAYA,aAAaG;YACpDH,cAAcG;YACdlB,OAAOiB,IAAI,CAACE;QACd;IACF;WAxCWvB;;AAiDN,IAAA,AAAMC,2BAAN;;aAAMA,WAICuB,EAAU,EAAEC,IAAY;gCAJzBxB;QAKT,IAAI,CAACuB,EAAE,GAAGA;QACV,IAAI,CAACC,IAAI,GAAGA;;iBANHxB;IASXO,OAAAA,IAOC,GAPDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,IAAMgB,MAAMC,IAAAA,gCAAW,EAACjB;QACxB,IAAMkB,YAAYC,WAAE,CAACC,QAAQ,CAAC,IAAI,CAACN,EAAE,EAAEE,KAAK,GAAGhB,QAAQD;QACvD,IAAImB,YAAYlB,QAAQ;YACtB,OAAOgB,IAAIf,KAAK,CAAC,GAAGiB;QACtB;QACA,OAAOF;IACT;IAEAd,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACa,IAAI;IAClB;IAEAZ,OAAAA,KAMC,GANDA,SAAAA;QACE,IAAI;YACFgB,WAAE,CAACE,SAAS,CAAC,IAAI,CAACP,EAAE;QACtB,EAAE,OAAOQ,IAAI;QACX,sBAAsB;QACxB;IACF;IAEA;;;GAGC,GACDlB,OAAAA,gBA6CC,GA7CDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMc,KAAK,IAAI,CAACA,EAAE;QAClB,IAAII,YAAY;QAChB,IAAIK,UAAU;QACd,IAAIC,WAAW;QACf,IAAMd,YAAY,OAAO,cAAc;QACvC,IAAIe,aAAqC;QAEzC,IAAM/B,SAASF,qBAAqB,SAACkC;YACnCD,aAAaC;YACb,IAAIH,WAAWC,UAAU,QAAQ,2BAA2B;YAE5D,IAAMZ,SAASL,KAAKC,GAAG,CAACE,WAAWV,SAASkB;YAC5C,IAAIN,UAAU,GAAG;gBACfY,WAAW;gBACXE,EAAEf,IAAI,CAAC;gBACP;YACF;YAEAY,UAAU;YACV,IAAM1B,SAASoB,IAAAA,gCAAW,EAACL;YAC3B,IAAMe,gBAAgBtB,SAASa;YAE/BC,WAAE,CAACrB,IAAI,CAACgB,IAAIjB,QAAQ,GAAGe,QAAQe,eAAe,SAACC,KAAKC;gBAClDN,UAAU;gBAEV,IAAIK,KAAK;oBACP,4DAA4D;oBAC5DF,EAAEI,IAAI,CAAC,SAASF;oBAChBJ,WAAW;oBACXE,EAAEf,IAAI,CAAC;oBACP;gBACF;gBAEA,IAAIkB,MAAM,GAAG;oBACXL,WAAW;oBACXE,EAAEf,IAAI,CAAC;gBACT,OAAO;oBACLO,aAAaW;oBACbH,EAAEf,IAAI,CAACd,OAAOI,KAAK,CAAC,GAAG4B;gBACzB;YACF;QACF;QAEA,OAAOnC;IACT;WA/EWH"}
@@ -0,0 +1,101 @@
1
+ /**
2
+ * FolderStreamSplitter - Splits a decompressed folder stream into individual file streams
3
+ *
4
+ * For multi-file solid archives, the folder is decompressed as a single stream.
5
+ * This class splits that stream into individual file streams based on known file boundaries.
6
+ *
7
+ * Features:
8
+ * - Lazy stream creation (streams created on first access)
9
+ * - Backpressure propagation (returns false when downstream is full)
10
+ * - Running CRC verification per file
11
+ * - Automatic cleanup of completed streams
12
+ */
13
+ import type Stream from 'stream';
14
+ export interface FolderStreamSplitterOptions {
15
+ /** Sizes of each file in the folder (in order) */
16
+ fileSizes: number[];
17
+ /** Whether to verify CRC for each file */
18
+ verifyCrc?: boolean;
19
+ /** Expected CRCs for each file (parallel to fileSizes) */
20
+ expectedCrcs?: (number | undefined)[];
21
+ }
22
+ /**
23
+ * Splits a decompressed folder stream into individual file streams.
24
+ *
25
+ * Usage:
26
+ * ```
27
+ * const splitter = new FolderStreamSplitter({ fileSizes: [1000, 2000, 500] });
28
+ *
29
+ * decompressStream.on('data', (chunk) => {
30
+ * if (!splitter.write(chunk)) {
31
+ * decompressStream.pause();
32
+ * splitter.onDrain(() => decompressStream.resume());
33
+ * }
34
+ * });
35
+ * decompressStream.on('end', () => splitter.end());
36
+ *
37
+ * // Get stream for file at index 1 (created lazily)
38
+ * const fileStream = splitter.getFileStream(1);
39
+ * ```
40
+ */
41
+ export declare class FolderStreamSplitter {
42
+ private fileBoundaries;
43
+ private fileStreams;
44
+ private fileCrcs;
45
+ private currentFileIndex;
46
+ private bytesWritten;
47
+ private currentFileEnd;
48
+ private verifyCrc;
49
+ private expectedCrcs;
50
+ private finished;
51
+ private error;
52
+ private drainCallbacks;
53
+ private _needsDrain;
54
+ constructor(options: FolderStreamSplitterOptions);
55
+ /**
56
+ * Write decompressed data chunk. Data is routed to appropriate file stream(s).
57
+ * Returns false if backpressure should be applied (downstream is full).
58
+ */
59
+ write(chunk: Buffer): boolean;
60
+ /**
61
+ * Ensure stream exists for file index (lazy creation)
62
+ */
63
+ private ensureFileStream;
64
+ /**
65
+ * Complete current file and move to next
66
+ */
67
+ private finishCurrentFile;
68
+ /**
69
+ * Signal end of decompressed data
70
+ */
71
+ end(): void;
72
+ /**
73
+ * Emit error to all pending file streams
74
+ */
75
+ private emitError;
76
+ /**
77
+ * Get the stream for a specific file by index.
78
+ * Stream is created lazily on first access.
79
+ */
80
+ getFileStream(fileIndex: number): Stream.PassThrough;
81
+ /**
82
+ * Register callback for when backpressure clears
83
+ */
84
+ onDrain(callback: () => void): void;
85
+ /**
86
+ * Notify all drain callbacks
87
+ */
88
+ private notifyDrain;
89
+ /**
90
+ * Check if a specific file's stream has been fully written
91
+ */
92
+ isFileComplete(fileIndex: number): boolean;
93
+ /**
94
+ * Get total number of files in this folder
95
+ */
96
+ get fileCount(): number;
97
+ /**
98
+ * Check if splitter has encountered an error
99
+ */
100
+ getError(): Error | null;
101
+ }
@@ -0,0 +1,101 @@
1
+ /**
2
+ * FolderStreamSplitter - Splits a decompressed folder stream into individual file streams
3
+ *
4
+ * For multi-file solid archives, the folder is decompressed as a single stream.
5
+ * This class splits that stream into individual file streams based on known file boundaries.
6
+ *
7
+ * Features:
8
+ * - Lazy stream creation (streams created on first access)
9
+ * - Backpressure propagation (returns false when downstream is full)
10
+ * - Running CRC verification per file
11
+ * - Automatic cleanup of completed streams
12
+ */
13
+ import type Stream from 'stream';
14
+ export interface FolderStreamSplitterOptions {
15
+ /** Sizes of each file in the folder (in order) */
16
+ fileSizes: number[];
17
+ /** Whether to verify CRC for each file */
18
+ verifyCrc?: boolean;
19
+ /** Expected CRCs for each file (parallel to fileSizes) */
20
+ expectedCrcs?: (number | undefined)[];
21
+ }
22
+ /**
23
+ * Splits a decompressed folder stream into individual file streams.
24
+ *
25
+ * Usage:
26
+ * ```
27
+ * const splitter = new FolderStreamSplitter({ fileSizes: [1000, 2000, 500] });
28
+ *
29
+ * decompressStream.on('data', (chunk) => {
30
+ * if (!splitter.write(chunk)) {
31
+ * decompressStream.pause();
32
+ * splitter.onDrain(() => decompressStream.resume());
33
+ * }
34
+ * });
35
+ * decompressStream.on('end', () => splitter.end());
36
+ *
37
+ * // Get stream for file at index 1 (created lazily)
38
+ * const fileStream = splitter.getFileStream(1);
39
+ * ```
40
+ */
41
+ export declare class FolderStreamSplitter {
42
+ private fileBoundaries;
43
+ private fileStreams;
44
+ private fileCrcs;
45
+ private currentFileIndex;
46
+ private bytesWritten;
47
+ private currentFileEnd;
48
+ private verifyCrc;
49
+ private expectedCrcs;
50
+ private finished;
51
+ private error;
52
+ private drainCallbacks;
53
+ private _needsDrain;
54
+ constructor(options: FolderStreamSplitterOptions);
55
+ /**
56
+ * Write decompressed data chunk. Data is routed to appropriate file stream(s).
57
+ * Returns false if backpressure should be applied (downstream is full).
58
+ */
59
+ write(chunk: Buffer): boolean;
60
+ /**
61
+ * Ensure stream exists for file index (lazy creation)
62
+ */
63
+ private ensureFileStream;
64
+ /**
65
+ * Complete current file and move to next
66
+ */
67
+ private finishCurrentFile;
68
+ /**
69
+ * Signal end of decompressed data
70
+ */
71
+ end(): void;
72
+ /**
73
+ * Emit error to all pending file streams
74
+ */
75
+ private emitError;
76
+ /**
77
+ * Get the stream for a specific file by index.
78
+ * Stream is created lazily on first access.
79
+ */
80
+ getFileStream(fileIndex: number): Stream.PassThrough;
81
+ /**
82
+ * Register callback for when backpressure clears
83
+ */
84
+ onDrain(callback: () => void): void;
85
+ /**
86
+ * Notify all drain callbacks
87
+ */
88
+ private notifyDrain;
89
+ /**
90
+ * Check if a specific file's stream has been fully written
91
+ */
92
+ isFileComplete(fileIndex: number): boolean;
93
+ /**
94
+ * Get total number of files in this folder
95
+ */
96
+ get fileCount(): number;
97
+ /**
98
+ * Check if splitter has encountered an error
99
+ */
100
+ getError(): Error | null;
101
+ }