7z-iterator 1.1.1 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (188) hide show
  1. package/dist/cjs/FileEntry.d.cts +12 -4
  2. package/dist/cjs/FileEntry.d.ts +12 -4
  3. package/dist/cjs/FileEntry.js +52 -24
  4. package/dist/cjs/FileEntry.js.map +1 -1
  5. package/dist/cjs/SevenZipIterator.d.cts +25 -2
  6. package/dist/cjs/SevenZipIterator.d.ts +25 -2
  7. package/dist/cjs/SevenZipIterator.js +68 -21
  8. package/dist/cjs/SevenZipIterator.js.map +1 -1
  9. package/dist/cjs/compat.js +1 -8
  10. package/dist/cjs/compat.js.map +1 -1
  11. package/dist/cjs/index.d.cts +0 -2
  12. package/dist/cjs/index.d.ts +0 -2
  13. package/dist/cjs/index.js +3 -12
  14. package/dist/cjs/index.js.map +1 -1
  15. package/dist/cjs/lib/streamToSource.d.cts +8 -11
  16. package/dist/cjs/lib/streamToSource.d.ts +8 -11
  17. package/dist/cjs/lib/streamToSource.js +21 -67
  18. package/dist/cjs/lib/streamToSource.js.map +1 -1
  19. package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
  20. package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
  21. package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
  22. package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
  23. package/dist/cjs/lzma/index.d.cts +13 -0
  24. package/dist/cjs/lzma/index.d.ts +13 -0
  25. package/dist/cjs/lzma/index.js +63 -0
  26. package/dist/cjs/lzma/index.js.map +1 -0
  27. package/dist/cjs/lzma/stream/transforms.d.cts +38 -0
  28. package/dist/cjs/lzma/stream/transforms.d.ts +38 -0
  29. package/dist/cjs/lzma/stream/transforms.js +149 -0
  30. package/dist/cjs/lzma/stream/transforms.js.map +1 -0
  31. package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +30 -0
  32. package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +30 -0
  33. package/dist/cjs/lzma/sync/Lzma2Decoder.js +135 -0
  34. package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
  35. package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +82 -0
  36. package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +82 -0
  37. package/dist/cjs/lzma/sync/LzmaDecoder.js +440 -0
  38. package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
  39. package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
  40. package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
  41. package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
  42. package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
  43. package/dist/cjs/lzma/types.d.cts +110 -0
  44. package/dist/cjs/lzma/types.d.ts +110 -0
  45. package/dist/cjs/lzma/types.js +264 -0
  46. package/dist/cjs/lzma/types.js.map +1 -0
  47. package/dist/cjs/nextEntry.js +24 -26
  48. package/dist/cjs/nextEntry.js.map +1 -1
  49. package/dist/cjs/sevenz/ArchiveSource.d.cts +16 -0
  50. package/dist/cjs/sevenz/ArchiveSource.d.ts +16 -0
  51. package/dist/cjs/sevenz/ArchiveSource.js +69 -0
  52. package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
  53. package/dist/cjs/sevenz/FolderStreamSplitter.d.cts +101 -0
  54. package/dist/cjs/sevenz/FolderStreamSplitter.d.ts +101 -0
  55. package/dist/cjs/sevenz/FolderStreamSplitter.js +229 -0
  56. package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -0
  57. package/dist/cjs/sevenz/SevenZipParser.d.cts +71 -10
  58. package/dist/cjs/sevenz/SevenZipParser.d.ts +71 -10
  59. package/dist/cjs/sevenz/SevenZipParser.js +574 -203
  60. package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
  61. package/dist/cjs/sevenz/codecs/BZip2.js +2 -1
  62. package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
  63. package/dist/cjs/sevenz/codecs/Bcj.d.cts +5 -4
  64. package/dist/cjs/sevenz/codecs/Bcj.d.ts +5 -4
  65. package/dist/cjs/sevenz/codecs/Bcj.js +102 -8
  66. package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
  67. package/dist/cjs/sevenz/codecs/BcjArm.d.cts +5 -4
  68. package/dist/cjs/sevenz/codecs/BcjArm.d.ts +5 -4
  69. package/dist/cjs/sevenz/codecs/BcjArm.js +51 -9
  70. package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
  71. package/dist/cjs/sevenz/codecs/Copy.d.cts +2 -4
  72. package/dist/cjs/sevenz/codecs/Copy.d.ts +2 -4
  73. package/dist/cjs/sevenz/codecs/Copy.js +2 -15
  74. package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
  75. package/dist/cjs/sevenz/codecs/Deflate.d.cts +6 -4
  76. package/dist/cjs/sevenz/codecs/Deflate.d.ts +6 -4
  77. package/dist/cjs/sevenz/codecs/Deflate.js +4 -9
  78. package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
  79. package/dist/cjs/sevenz/codecs/Delta.d.cts +5 -4
  80. package/dist/cjs/sevenz/codecs/Delta.d.ts +5 -4
  81. package/dist/cjs/sevenz/codecs/Delta.js +29 -10
  82. package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
  83. package/dist/cjs/sevenz/codecs/Lzma.d.cts +5 -2
  84. package/dist/cjs/sevenz/codecs/Lzma.d.ts +5 -2
  85. package/dist/cjs/sevenz/codecs/Lzma.js +13 -28
  86. package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
  87. package/dist/cjs/sevenz/codecs/Lzma2.d.cts +9 -3
  88. package/dist/cjs/sevenz/codecs/Lzma2.d.ts +9 -3
  89. package/dist/cjs/sevenz/codecs/Lzma2.js +17 -198
  90. package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
  91. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -2
  92. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
  93. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +2 -15
  94. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  95. package/dist/cjs/types.d.cts +2 -16
  96. package/dist/cjs/types.d.ts +2 -16
  97. package/dist/cjs/types.js.map +1 -1
  98. package/dist/esm/FileEntry.d.ts +12 -4
  99. package/dist/esm/FileEntry.js +52 -26
  100. package/dist/esm/FileEntry.js.map +1 -1
  101. package/dist/esm/SevenZipIterator.d.ts +25 -2
  102. package/dist/esm/SevenZipIterator.js +69 -22
  103. package/dist/esm/SevenZipIterator.js.map +1 -1
  104. package/dist/esm/compat.js +1 -8
  105. package/dist/esm/compat.js.map +1 -1
  106. package/dist/esm/index.d.ts +0 -2
  107. package/dist/esm/index.js +0 -1
  108. package/dist/esm/index.js.map +1 -1
  109. package/dist/esm/lib/streamToSource.d.ts +8 -11
  110. package/dist/esm/lib/streamToSource.js +22 -68
  111. package/dist/esm/lib/streamToSource.js.map +1 -1
  112. package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
  113. package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
  114. package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
  115. package/dist/esm/lzma/index.d.ts +13 -0
  116. package/dist/esm/lzma/index.js +15 -0
  117. package/dist/esm/lzma/index.js.map +1 -0
  118. package/dist/esm/lzma/stream/transforms.d.ts +38 -0
  119. package/dist/esm/lzma/stream/transforms.js +150 -0
  120. package/dist/esm/lzma/stream/transforms.js.map +1 -0
  121. package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +30 -0
  122. package/dist/esm/lzma/sync/Lzma2Decoder.js +115 -0
  123. package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
  124. package/dist/esm/lzma/sync/LzmaDecoder.d.ts +82 -0
  125. package/dist/esm/lzma/sync/LzmaDecoder.js +403 -0
  126. package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
  127. package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
  128. package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
  129. package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
  130. package/dist/esm/lzma/types.d.ts +110 -0
  131. package/dist/esm/lzma/types.js +154 -0
  132. package/dist/esm/lzma/types.js.map +1 -0
  133. package/dist/esm/nextEntry.js +24 -26
  134. package/dist/esm/nextEntry.js.map +1 -1
  135. package/dist/esm/sevenz/ArchiveSource.d.ts +16 -0
  136. package/dist/esm/sevenz/ArchiveSource.js +70 -1
  137. package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
  138. package/dist/esm/sevenz/FolderStreamSplitter.d.ts +101 -0
  139. package/dist/esm/sevenz/FolderStreamSplitter.js +207 -0
  140. package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -0
  141. package/dist/esm/sevenz/SevenZipParser.d.ts +71 -10
  142. package/dist/esm/sevenz/SevenZipParser.js +414 -198
  143. package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
  144. package/dist/esm/sevenz/codecs/BZip2.js +2 -1
  145. package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
  146. package/dist/esm/sevenz/codecs/Bcj.d.ts +5 -4
  147. package/dist/esm/sevenz/codecs/Bcj.js +106 -6
  148. package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
  149. package/dist/esm/sevenz/codecs/BcjArm.d.ts +5 -4
  150. package/dist/esm/sevenz/codecs/BcjArm.js +55 -7
  151. package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
  152. package/dist/esm/sevenz/codecs/Copy.d.ts +2 -4
  153. package/dist/esm/sevenz/codecs/Copy.js +1 -9
  154. package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
  155. package/dist/esm/sevenz/codecs/Deflate.d.ts +6 -4
  156. package/dist/esm/sevenz/codecs/Deflate.js +9 -7
  157. package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
  158. package/dist/esm/sevenz/codecs/Delta.d.ts +5 -4
  159. package/dist/esm/sevenz/codecs/Delta.js +33 -8
  160. package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
  161. package/dist/esm/sevenz/codecs/Lzma.d.ts +5 -2
  162. package/dist/esm/sevenz/codecs/Lzma.js +17 -24
  163. package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
  164. package/dist/esm/sevenz/codecs/Lzma2.d.ts +9 -3
  165. package/dist/esm/sevenz/codecs/Lzma2.js +15 -196
  166. package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
  167. package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
  168. package/dist/esm/sevenz/codecs/createBufferingDecoder.js +1 -9
  169. package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  170. package/dist/esm/types.d.ts +2 -16
  171. package/dist/esm/types.js.map +1 -1
  172. package/package.json +3 -3
  173. package/assets/lzma-purejs/LICENSE +0 -11
  174. package/assets/lzma-purejs/index.js +0 -19
  175. package/assets/lzma-purejs/lib/LZ/OutWindow.js +0 -78
  176. package/assets/lzma-purejs/lib/LZ.js +0 -6
  177. package/assets/lzma-purejs/lib/LZMA/Base.js +0 -48
  178. package/assets/lzma-purejs/lib/LZMA/Decoder.js +0 -328
  179. package/assets/lzma-purejs/lib/LZMA.js +0 -6
  180. package/assets/lzma-purejs/lib/RangeCoder/BitTreeDecoder.js +0 -41
  181. package/assets/lzma-purejs/lib/RangeCoder/Decoder.js +0 -58
  182. package/assets/lzma-purejs/lib/RangeCoder/Encoder.js +0 -106
  183. package/assets/lzma-purejs/lib/RangeCoder.js +0 -10
  184. package/assets/lzma-purejs/lib/Stream.js +0 -41
  185. package/assets/lzma-purejs/lib/Util.js +0 -114
  186. package/assets/lzma-purejs/lib/makeBuffer.js +0 -25
  187. package/assets/lzma-purejs/package-lock.json +0 -13
  188. package/assets/lzma-purejs/package.json +0 -8
@@ -37,7 +37,61 @@ let SevenZipIterator = class SevenZipIterator extends BaseIterator {
37
37
  lock.release();
38
38
  }
39
39
  // Don't call base end here - Lock.__destroy() handles it
40
- this.iterator = null;
40
+ this._iterator = null;
41
+ }
42
+ /**
43
+ * Check if streaming extraction is available for any folder in this archive.
44
+ * Streaming is possible when folders use codecs like BZip2, Deflate, or Copy
45
+ * that can decompress incrementally without buffering the entire input.
46
+ *
47
+ * @returns true if at least one folder supports streaming
48
+ */ canStream() {
49
+ if (!this._iterator) return false;
50
+ const parser = this._iterator.getParser();
51
+ if (!parser) return false;
52
+ const entries = parser.getEntries();
53
+ const checkedFolders = {};
54
+ for(let i = 0; i < entries.length; i++){
55
+ const folderIndex = entries[i]._folderIndex;
56
+ if (folderIndex >= 0 && checkedFolders[folderIndex] === undefined) {
57
+ checkedFolders[folderIndex] = parser.canStreamFolder(folderIndex);
58
+ if (checkedFolders[folderIndex]) {
59
+ return true;
60
+ }
61
+ }
62
+ }
63
+ return false;
64
+ }
65
+ /**
66
+ * Get entries sorted for optimal streaming extraction.
67
+ *
68
+ * Entries are sorted by:
69
+ * 1. Folder index (process one folder at a time)
70
+ * 2. Stream index within folder (for solid block streaming)
71
+ *
72
+ * This ordering allows multi-file solid folders to stream with
73
+ * O(largest file) memory instead of O(folder size).
74
+ *
75
+ * @returns Array of entries in streaming order
76
+ */ getStreamingOrder() {
77
+ if (!this._iterator) return [];
78
+ const parser = this._iterator.getParser();
79
+ if (!parser) return [];
80
+ const entries = parser.getEntries();
81
+ // Create a copy and sort for streaming order
82
+ const sorted = [];
83
+ for(let i = 0; i < entries.length; i++){
84
+ sorted.push(entries[i]);
85
+ }
86
+ sorted.sort((a, b)=>{
87
+ // First by folder index
88
+ if (a._folderIndex !== b._folderIndex) {
89
+ return a._folderIndex - b._folderIndex;
90
+ }
91
+ // Then by stream index within folder
92
+ return a._streamIndexInFolder - b._streamIndexInFolder;
93
+ });
94
+ return sorted;
41
95
  }
42
96
  constructor(source, options = {}){
43
97
  super(options);
@@ -71,7 +125,7 @@ let SevenZipIterator = class SevenZipIterator extends BaseIterator {
71
125
  });
72
126
  });
73
127
  } else {
74
- // Stream input - use hybrid memory/temp-file approach
128
+ // Stream input - write to temp file for random access
75
129
  // Register cleanup for source stream
76
130
  const stream = source;
77
131
  this.lock.registerCleanup(()=>{
@@ -81,30 +135,23 @@ let SevenZipIterator = class SevenZipIterator extends BaseIterator {
81
135
  const tempPath = path.join(tmpdir(), '7z-iterator', shortHash(process.cwd()), tempSuffix('tmp.7z'));
82
136
  queue.defer((cb)=>{
83
137
  streamToSource(source, {
84
- memoryThreshold: options.memoryThreshold,
85
- tempPath: tempPath
138
+ tempPath
86
139
  }, (err, result)=>{
87
140
  if (this.done || cancelled) return;
88
141
  if (err) return cb(err);
89
142
  if (!result) return cb(new Error('No result from streamToSource'));
90
143
  archiveSource = result.source;
91
- if (result.fd !== undefined) {
92
- const fd = result.fd;
93
- // Register cleanup for file descriptor
94
- this.lock.registerCleanup(()=>{
95
- fs.closeSync(fd);
96
- });
97
- }
98
- if (result.tempPath) {
99
- const tp = result.tempPath;
100
- // Register cleanup for temp file
101
- this.lock.registerCleanup(()=>{
102
- try {
103
- rmSync(tp);
104
- } catch (_e) {
105
- /* ignore */ }
106
- });
107
- }
144
+ // Register cleanup for file descriptor
145
+ this.lock.registerCleanup(()=>{
146
+ fs.closeSync(result.fd);
147
+ });
148
+ // Register cleanup for temp file
149
+ this.lock.registerCleanup(()=>{
150
+ try {
151
+ rmSync(result.tempPath);
152
+ } catch (_e) {
153
+ /* ignore */ }
154
+ });
108
155
  cb();
109
156
  });
110
157
  });
@@ -116,7 +163,7 @@ let SevenZipIterator = class SevenZipIterator extends BaseIterator {
116
163
  try {
117
164
  const parser = new SevenZipParser(archiveSource);
118
165
  parser.parse();
119
- this.iterator = new EntryIterator(parser);
166
+ this._iterator = new EntryIterator(parser);
120
167
  cb();
121
168
  } catch (parseErr) {
122
169
  cb(parseErr);
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/SevenZipIterator.ts"],"sourcesContent":["import BaseIterator, { Lock } from 'extract-base-iterator';\nimport fs from 'fs';\nimport { rmSync } from 'fs-remove-compat';\nimport path from 'path';\nimport Queue from 'queue-cb';\nimport shortHash from 'short-hash';\nimport tempSuffix from 'temp-suffix';\nimport { tmpdir } from './compat.ts';\nimport streamToSource, { type SourceResult } from './lib/streamToSource.ts';\nimport nextEntry from './nextEntry.ts';\nimport { setPassword } from './sevenz/codecs/index.ts';\nimport { type ArchiveSource, FileSource, type SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\n\nimport type { Entry, ExtractOptions, SevenZipFileIterator } from './types.ts';\n\n/**\n * Iterator wrapper around SevenZipParser entries\n */\nclass EntryIterator implements SevenZipFileIterator {\n private parser: SevenZipParser;\n private entries: SevenZipEntry[];\n private index = 0;\n\n constructor(parser: SevenZipParser) {\n this.parser = parser;\n this.entries = parser.getEntries();\n }\n\n next(): SevenZipEntry | null {\n if (this.index >= this.entries.length) {\n return null;\n }\n return this.entries[this.index++];\n }\n\n getParser(): SevenZipParser {\n return this.parser;\n }\n}\n\nexport default class SevenZipIterator extends BaseIterator<Entry> {\n lock: Lock | null;\n iterator: SevenZipFileIterator;\n\n constructor(source: string | NodeJS.ReadableStream, options: ExtractOptions = {}) {\n super(options);\n this.lock = new Lock();\n this.lock.onDestroy = (err) => BaseIterator.prototype.end.call(this, err);\n const queue = new Queue(1);\n let cancelled = false;\n let archiveSource: ArchiveSource | null = null;\n const setup = (): void => {\n cancelled = true;\n };\n this.processing.push(setup);\n\n // Set password (or clear if not provided)\n setPassword(options.password || null);\n\n if (typeof source === 'string') {\n // File path input - use FileSource directly\n queue.defer((cb: (err?: Error) => void) => {\n fs.stat(source, (statErr, stats) => {\n if (this.done || cancelled) return;\n if (statErr) return cb(statErr);\n\n fs.open(source, 'r', (err, fd) => {\n if (this.done || cancelled) return;\n if (err) return cb(err);\n\n archiveSource = new FileSource(fd, stats.size);\n // Register cleanup for file descriptor\n this.lock.registerCleanup(() => {\n fs.closeSync(fd);\n });\n cb();\n });\n });\n });\n } else {\n // Stream input - use hybrid memory/temp-file approach\n // Register cleanup for source stream\n const stream = source as NodeJS.ReadableStream;\n this.lock.registerCleanup(() => {\n const s = stream as NodeJS.ReadableStream & { destroy?: () => void };\n if (typeof s.destroy === 'function') s.destroy();\n });\n\n const tempPath = path.join(tmpdir(), '7z-iterator', shortHash(process.cwd()), tempSuffix('tmp.7z'));\n queue.defer((cb: (err?: Error) => void) => {\n streamToSource(\n source,\n {\n memoryThreshold: options.memoryThreshold,\n tempPath: tempPath,\n },\n (err?: Error, result?: SourceResult) => {\n if (this.done || cancelled) return;\n if (err) return cb(err);\n if (!result) return cb(new Error('No result from streamToSource'));\n\n archiveSource = result.source;\n if (result.fd !== undefined) {\n const fd = result.fd;\n // Register cleanup for file descriptor\n this.lock.registerCleanup(() => {\n fs.closeSync(fd);\n });\n }\n if (result.tempPath) {\n const tp = result.tempPath;\n // Register cleanup for temp file\n this.lock.registerCleanup(() => {\n try {\n rmSync(tp);\n } catch (_e) {\n /* ignore */\n }\n });\n }\n cb();\n }\n );\n });\n }\n\n // Parse and build iterator\n queue.defer((cb: (err?: Error) => void) => {\n if (this.done || cancelled) return;\n if (!archiveSource) return cb(new Error('No archive source'));\n\n try {\n const parser = new SevenZipParser(archiveSource);\n parser.parse();\n this.iterator = new EntryIterator(parser);\n cb();\n } catch (parseErr) {\n cb(parseErr as Error);\n }\n });\n\n // start processing\n queue.await((err?: Error) => {\n this.processing.remove(setup);\n if (this.done || cancelled) return;\n err ? this.end(err) : this.push(nextEntry);\n });\n }\n\n end(err?: Error) {\n if (this.lock) {\n const lock = this.lock;\n this.lock = null; // Clear before release to prevent re-entrancy\n lock.err = err;\n lock.release();\n }\n // Don't call base end here - Lock.__destroy() handles it\n this.iterator = null;\n }\n}\n"],"names":["BaseIterator","Lock","fs","rmSync","path","Queue","shortHash","tempSuffix","tmpdir","streamToSource","nextEntry","setPassword","FileSource","SevenZipParser","EntryIterator","next","index","entries","length","getParser","parser","getEntries","SevenZipIterator","end","err","lock","release","iterator","source","options","onDestroy","prototype","call","queue","cancelled","archiveSource","setup","processing","push","password","defer","cb","stat","statErr","stats","done","open","fd","size","registerCleanup","closeSync","stream","s","destroy","tempPath","join","process","cwd","memoryThreshold","result","Error","undefined","tp","_e","parse","parseErr","await","remove"],"mappings":"AAAA,OAAOA,gBAAgBC,IAAI,QAAQ,wBAAwB;AAC3D,OAAOC,QAAQ,KAAK;AACpB,SAASC,MAAM,QAAQ,mBAAmB;AAC1C,OAAOC,UAAU,OAAO;AACxB,OAAOC,WAAW,WAAW;AAC7B,OAAOC,eAAe,aAAa;AACnC,OAAOC,gBAAgB,cAAc;AACrC,SAASC,MAAM,QAAQ,cAAc;AACrC,OAAOC,oBAA2C,0BAA0B;AAC5E,OAAOC,eAAe,iBAAiB;AACvC,SAASC,WAAW,QAAQ,2BAA2B;AACvD,SAA6BC,UAAU,EAAsBC,cAAc,QAAQ,6BAA6B;AAIhH;;CAEC,GACD,IAAA,AAAMC,gBAAN,MAAMA;IAUJC,OAA6B;QAC3B,IAAI,IAAI,CAACC,KAAK,IAAI,IAAI,CAACC,OAAO,CAACC,MAAM,EAAE;YACrC,OAAO;QACT;QACA,OAAO,IAAI,CAACD,OAAO,CAAC,IAAI,CAACD,KAAK,GAAG;IACnC;IAEAG,YAA4B;QAC1B,OAAO,IAAI,CAACC,MAAM;IACpB;IAdA,YAAYA,MAAsB,CAAE;aAF5BJ,QAAQ;QAGd,IAAI,CAACI,MAAM,GAAGA;QACd,IAAI,CAACH,OAAO,GAAGG,OAAOC,UAAU;IAClC;AAYF;AAEe,IAAA,AAAMC,mBAAN,MAAMA,yBAAyBtB;IA6G5CuB,IAAIC,GAAW,EAAE;QACf,IAAI,IAAI,CAACC,IAAI,EAAE;YACb,MAAMA,OAAO,IAAI,CAACA,IAAI;YACtB,IAAI,CAACA,IAAI,GAAG,MAAM,8CAA8C;YAChEA,KAAKD,GAAG,GAAGA;YACXC,KAAKC,OAAO;QACd;QACA,yDAAyD;QACzD,IAAI,CAACC,QAAQ,GAAG;IAClB;IAlHA,YAAYC,MAAsC,EAAEC,UAA0B,CAAC,CAAC,CAAE;QAChF,KAAK,CAACA;QACN,IAAI,CAACJ,IAAI,GAAG,IAAIxB;QAChB,IAAI,CAACwB,IAAI,CAACK,SAAS,GAAG,CAACN,MAAQxB,aAAa+B,SAAS,CAACR,GAAG,CAACS,IAAI,CAAC,IAAI,EAAER;QACrE,MAAMS,QAAQ,IAAI5B,MAAM;QACxB,IAAI6B,YAAY;QAChB,IAAIC,gBAAsC;QAC1C,MAAMC,QAAQ;YACZF,YAAY;QACd;QACA,IAAI,CAACG,UAAU,CAACC,IAAI,CAACF;QAErB,0CAA0C;QAC1CzB,YAAYkB,QAAQU,QAAQ,IAAI;QAEhC,IAAI,OAAOX,WAAW,UAAU;YAC9B,4CAA4C;YAC5CK,MAAMO,KAAK,CAAC,CAACC;gBACXvC,GAAGwC,IAAI,CAACd,QAAQ,CAACe,SAASC;oBACxB,IAAI,IAAI,CAACC,IAAI,IAAIX,WAAW;oBAC5B,IAAIS,SAAS,OAAOF,GAAGE;oBAEvBzC,GAAG4C,IAAI,CAAClB,QAAQ,KAAK,CAACJ,KAAKuB;wBACzB,IAAI,IAAI,CAACF,IAAI,IAAIX,WAAW;wBAC5B,IAAIV,KAAK,OAAOiB,GAAGjB;wBAEnBW,gBAAgB,IAAIvB,WAAWmC,IAAIH,MAAMI,IAAI;wBAC7C,uCAAuC;wBACvC,IAAI,CAACvB,IAAI,CAACwB,eAAe,CAAC;4BACxB/C,GAAGgD,SAAS,CAACH;wBACf;wBACAN;oBACF;gBACF;YACF;QACF,OAAO;YACL,sDAAsD;YACtD,qCAAqC;YACrC,MAAMU,SAASvB;YACf,IAAI,CAACH,IAAI,CAACwB,eAAe,CAAC;gBACxB,MAAMG,IAAID;gBACV,IAAI,OAAOC,EAAEC,OAAO,KAAK,YAAYD,EAAEC,OAAO;YAChD;YAEA,MAAMC,WAAWlD,KAAKmD,IAAI,CAAC/C,UAAU,eAAeF,UAAUkD,QAAQC,GAAG,KAAKlD,WAAW;YACzF0B,MAAMO,KAAK,CAAC,CAACC;gBACXhC,eACEmB,QACA;oBACE8B,iBAAiB7B,QAAQ6B,eAAe;oBACxCJ,UAAUA;gBACZ,GACA,CAAC9B,KAAamC;oBACZ,IAAI,IAAI,CAACd,IAAI,IAAIX,WAAW;oBAC5B,IAAIV,KAAK,OAAOiB,GAAGjB;oBACnB,IAAI,CAACmC,QAAQ,OAAOlB,GAAG,IAAImB,MAAM;oBAEjCzB,gBAAgBwB,OAAO/B,MAAM;oBAC7B,IAAI+B,OAAOZ,EAAE,KAAKc,WAAW;wBAC3B,MAAMd,KAAKY,OAAOZ,EAAE;wBACpB,uCAAuC;wBACvC,IAAI,CAACtB,IAAI,CAACwB,eAAe,CAAC;4BACxB/C,GAAGgD,SAAS,CAACH;wBACf;oBACF;oBACA,IAAIY,OAAOL,QAAQ,EAAE;wBACnB,MAAMQ,KAAKH,OAAOL,QAAQ;wBAC1B,iCAAiC;wBACjC,IAAI,CAAC7B,IAAI,CAACwB,eAAe,CAAC;4BACxB,IAAI;gCACF9C,OAAO2D;4BACT,EAAE,OAAOC,IAAI;4BACX,UAAU,GACZ;wBACF;oBACF;oBACAtB;gBACF;YAEJ;QACF;QAEA,2BAA2B;QAC3BR,MAAMO,KAAK,CAAC,CAACC;YACX,IAAI,IAAI,CAACI,IAAI,IAAIX,WAAW;YAC5B,IAAI,CAACC,eAAe,OAAOM,GAAG,IAAImB,MAAM;YAExC,IAAI;gBACF,MAAMxC,SAAS,IAAIP,eAAesB;gBAClCf,OAAO4C,KAAK;gBACZ,IAAI,CAACrC,QAAQ,GAAG,IAAIb,cAAcM;gBAClCqB;YACF,EAAE,OAAOwB,UAAU;gBACjBxB,GAAGwB;YACL;QACF;QAEA,mBAAmB;QACnBhC,MAAMiC,KAAK,CAAC,CAAC1C;YACX,IAAI,CAACa,UAAU,CAAC8B,MAAM,CAAC/B;YACvB,IAAI,IAAI,CAACS,IAAI,IAAIX,WAAW;YAC5BV,MAAM,IAAI,CAACD,GAAG,CAACC,OAAO,IAAI,CAACc,IAAI,CAAC5B;QAClC;IACF;AAYF;AAvHA,SAAqBY,8BAuHpB"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/SevenZipIterator.ts"],"sourcesContent":["import BaseIterator, { Lock } from 'extract-base-iterator';\nimport fs from 'fs';\nimport { rmSync } from 'fs-remove-compat';\nimport path from 'path';\nimport Queue from 'queue-cb';\nimport shortHash from 'short-hash';\nimport tempSuffix from 'temp-suffix';\nimport { tmpdir } from './compat.ts';\nimport streamToSource, { type SourceResult } from './lib/streamToSource.ts';\nimport nextEntry from './nextEntry.ts';\nimport { setPassword } from './sevenz/codecs/index.ts';\nimport { type ArchiveSource, FileSource, type SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\n\nimport type { Entry, ExtractOptions } from './types.ts';\n\n/**\n * Internal iterator interface for SevenZipParser entries\n * @internal\n */\ninterface SevenZipFileIterator {\n next(): SevenZipEntry | null;\n getParser(): SevenZipParser;\n}\n\n/**\n * Iterator wrapper around SevenZipParser entries\n */\nclass EntryIterator implements SevenZipFileIterator {\n private parser: SevenZipParser;\n private entries: SevenZipEntry[];\n private index = 0;\n\n constructor(parser: SevenZipParser) {\n this.parser = parser;\n this.entries = parser.getEntries();\n }\n\n next(): SevenZipEntry | null {\n if (this.index >= this.entries.length) {\n return null;\n }\n return this.entries[this.index++];\n }\n\n getParser(): SevenZipParser {\n return this.parser;\n }\n}\n\nexport default class SevenZipIterator extends BaseIterator<Entry> {\n lock: Lock | null;\n /** @internal - Do not use directly */\n _iterator: unknown;\n\n constructor(source: string | NodeJS.ReadableStream, options: ExtractOptions = {}) {\n super(options);\n this.lock = new Lock();\n this.lock.onDestroy = (err) => BaseIterator.prototype.end.call(this, err);\n const queue = new Queue(1);\n let cancelled = false;\n let archiveSource: ArchiveSource | null = null;\n const setup = (): void => {\n cancelled = true;\n };\n this.processing.push(setup);\n\n // Set password (or clear if not provided)\n setPassword(options.password || null);\n\n if (typeof source === 'string') {\n // File path input - use FileSource directly\n queue.defer((cb: (err?: Error) => void) => {\n fs.stat(source, (statErr, stats) => {\n if (this.done || cancelled) return;\n if (statErr) return cb(statErr);\n\n fs.open(source, 'r', (err, fd) => {\n if (this.done || cancelled) return;\n if (err) return cb(err);\n\n archiveSource = new FileSource(fd, stats.size);\n // Register cleanup for file descriptor\n this.lock.registerCleanup(() => {\n fs.closeSync(fd);\n });\n cb();\n });\n });\n });\n } else {\n // Stream input - write to temp file for random access\n // Register cleanup for source stream\n const stream = source as NodeJS.ReadableStream;\n this.lock.registerCleanup(() => {\n const s = stream as NodeJS.ReadableStream & { destroy?: () => void };\n if (typeof s.destroy === 'function') s.destroy();\n });\n\n const tempPath = path.join(tmpdir(), '7z-iterator', shortHash(process.cwd()), tempSuffix('tmp.7z'));\n queue.defer((cb: (err?: Error) => void) => {\n streamToSource(source, { tempPath }, (err?: Error, result?: SourceResult) => {\n if (this.done || cancelled) return;\n if (err) return cb(err);\n if (!result) return cb(new Error('No result from streamToSource'));\n\n archiveSource = result.source;\n\n // Register cleanup for file descriptor\n this.lock.registerCleanup(() => {\n fs.closeSync(result.fd);\n });\n\n // Register cleanup for temp file\n this.lock.registerCleanup(() => {\n try {\n rmSync(result.tempPath);\n } catch (_e) {\n /* ignore */\n }\n });\n\n cb();\n });\n });\n }\n\n // Parse and build iterator\n queue.defer((cb: (err?: Error) => void) => {\n if (this.done || cancelled) return;\n if (!archiveSource) return cb(new Error('No archive source'));\n\n try {\n const parser = new SevenZipParser(archiveSource);\n parser.parse();\n this._iterator = new EntryIterator(parser);\n cb();\n } catch (parseErr) {\n cb(parseErr as Error);\n }\n });\n\n // start processing\n queue.await((err?: Error) => {\n this.processing.remove(setup);\n if (this.done || cancelled) return;\n err ? this.end(err) : this.push(nextEntry);\n });\n }\n\n end(err?: Error) {\n if (this.lock) {\n const lock = this.lock;\n this.lock = null; // Clear before release to prevent re-entrancy\n lock.err = err;\n lock.release();\n }\n // Don't call base end here - Lock.__destroy() handles it\n this._iterator = null;\n }\n\n /**\n * Check if streaming extraction is available for any folder in this archive.\n * Streaming is possible when folders use codecs like BZip2, Deflate, or Copy\n * that can decompress incrementally without buffering the entire input.\n *\n * @returns true if at least one folder supports streaming\n */\n canStream(): boolean {\n if (!this._iterator) return false;\n const parser = (this._iterator as SevenZipFileIterator).getParser();\n if (!parser) return false;\n\n const entries = parser.getEntries();\n const checkedFolders: { [key: number]: boolean } = {};\n\n for (let i = 0; i < entries.length; i++) {\n const folderIndex = entries[i]._folderIndex;\n if (folderIndex >= 0 && checkedFolders[folderIndex] === undefined) {\n checkedFolders[folderIndex] = parser.canStreamFolder(folderIndex);\n if (checkedFolders[folderIndex]) {\n return true;\n }\n }\n }\n\n return false;\n }\n\n /**\n * Get entries sorted for optimal streaming extraction.\n *\n * Entries are sorted by:\n * 1. Folder index (process one folder at a time)\n * 2. Stream index within folder (for solid block streaming)\n *\n * This ordering allows multi-file solid folders to stream with\n * O(largest file) memory instead of O(folder size).\n *\n * @returns Array of entries in streaming order\n */\n getStreamingOrder(): SevenZipEntry[] {\n if (!this._iterator) return [];\n const parser = (this._iterator as SevenZipFileIterator).getParser();\n if (!parser) return [];\n\n const entries = parser.getEntries();\n\n // Create a copy and sort for streaming order\n const sorted: SevenZipEntry[] = [];\n for (let i = 0; i < entries.length; i++) {\n sorted.push(entries[i]);\n }\n\n sorted.sort((a, b) => {\n // First by folder index\n if (a._folderIndex !== b._folderIndex) {\n return a._folderIndex - b._folderIndex;\n }\n // Then by stream index within folder\n return a._streamIndexInFolder - b._streamIndexInFolder;\n });\n\n return sorted;\n }\n}\n"],"names":["BaseIterator","Lock","fs","rmSync","path","Queue","shortHash","tempSuffix","tmpdir","streamToSource","nextEntry","setPassword","FileSource","SevenZipParser","EntryIterator","next","index","entries","length","getParser","parser","getEntries","SevenZipIterator","end","err","lock","release","_iterator","canStream","checkedFolders","i","folderIndex","_folderIndex","undefined","canStreamFolder","getStreamingOrder","sorted","push","sort","a","b","_streamIndexInFolder","source","options","onDestroy","prototype","call","queue","cancelled","archiveSource","setup","processing","password","defer","cb","stat","statErr","stats","done","open","fd","size","registerCleanup","closeSync","stream","s","destroy","tempPath","join","process","cwd","result","Error","_e","parse","parseErr","await","remove"],"mappings":"AAAA,OAAOA,gBAAgBC,IAAI,QAAQ,wBAAwB;AAC3D,OAAOC,QAAQ,KAAK;AACpB,SAASC,MAAM,QAAQ,mBAAmB;AAC1C,OAAOC,UAAU,OAAO;AACxB,OAAOC,WAAW,WAAW;AAC7B,OAAOC,eAAe,aAAa;AACnC,OAAOC,gBAAgB,cAAc;AACrC,SAASC,MAAM,QAAQ,cAAc;AACrC,OAAOC,oBAA2C,0BAA0B;AAC5E,OAAOC,eAAe,iBAAiB;AACvC,SAASC,WAAW,QAAQ,2BAA2B;AACvD,SAA6BC,UAAU,EAAsBC,cAAc,QAAQ,6BAA6B;AAahH;;CAEC,GACD,IAAA,AAAMC,gBAAN,MAAMA;IAUJC,OAA6B;QAC3B,IAAI,IAAI,CAACC,KAAK,IAAI,IAAI,CAACC,OAAO,CAACC,MAAM,EAAE;YACrC,OAAO;QACT;QACA,OAAO,IAAI,CAACD,OAAO,CAAC,IAAI,CAACD,KAAK,GAAG;IACnC;IAEAG,YAA4B;QAC1B,OAAO,IAAI,CAACC,MAAM;IACpB;IAdA,YAAYA,MAAsB,CAAE;aAF5BJ,QAAQ;QAGd,IAAI,CAACI,MAAM,GAAGA;QACd,IAAI,CAACH,OAAO,GAAGG,OAAOC,UAAU;IAClC;AAYF;AAEe,IAAA,AAAMC,mBAAN,MAAMA,yBAAyBtB;IAoG5CuB,IAAIC,GAAW,EAAE;QACf,IAAI,IAAI,CAACC,IAAI,EAAE;YACb,MAAMA,OAAO,IAAI,CAACA,IAAI;YACtB,IAAI,CAACA,IAAI,GAAG,MAAM,8CAA8C;YAChEA,KAAKD,GAAG,GAAGA;YACXC,KAAKC,OAAO;QACd;QACA,yDAAyD;QACzD,IAAI,CAACC,SAAS,GAAG;IACnB;IAEA;;;;;;GAMC,GACDC,YAAqB;QACnB,IAAI,CAAC,IAAI,CAACD,SAAS,EAAE,OAAO;QAC5B,MAAMP,SAAS,AAAC,IAAI,CAACO,SAAS,CAA0BR,SAAS;QACjE,IAAI,CAACC,QAAQ,OAAO;QAEpB,MAAMH,UAAUG,OAAOC,UAAU;QACjC,MAAMQ,iBAA6C,CAAC;QAEpD,IAAK,IAAIC,IAAI,GAAGA,IAAIb,QAAQC,MAAM,EAAEY,IAAK;YACvC,MAAMC,cAAcd,OAAO,CAACa,EAAE,CAACE,YAAY;YAC3C,IAAID,eAAe,KAAKF,cAAc,CAACE,YAAY,KAAKE,WAAW;gBACjEJ,cAAc,CAACE,YAAY,GAAGX,OAAOc,eAAe,CAACH;gBACrD,IAAIF,cAAc,CAACE,YAAY,EAAE;oBAC/B,OAAO;gBACT;YACF;QACF;QAEA,OAAO;IACT;IAEA;;;;;;;;;;;GAWC,GACDI,oBAAqC;QACnC,IAAI,CAAC,IAAI,CAACR,SAAS,EAAE,OAAO,EAAE;QAC9B,MAAMP,SAAS,AAAC,IAAI,CAACO,SAAS,CAA0BR,SAAS;QACjE,IAAI,CAACC,QAAQ,OAAO,EAAE;QAEtB,MAAMH,UAAUG,OAAOC,UAAU;QAEjC,6CAA6C;QAC7C,MAAMe,SAA0B,EAAE;QAClC,IAAK,IAAIN,IAAI,GAAGA,IAAIb,QAAQC,MAAM,EAAEY,IAAK;YACvCM,OAAOC,IAAI,CAACpB,OAAO,CAACa,EAAE;QACxB;QAEAM,OAAOE,IAAI,CAAC,CAACC,GAAGC;YACd,wBAAwB;YACxB,IAAID,EAAEP,YAAY,KAAKQ,EAAER,YAAY,EAAE;gBACrC,OAAOO,EAAEP,YAAY,GAAGQ,EAAER,YAAY;YACxC;YACA,qCAAqC;YACrC,OAAOO,EAAEE,oBAAoB,GAAGD,EAAEC,oBAAoB;QACxD;QAEA,OAAOL;IACT;IAzKA,YAAYM,MAAsC,EAAEC,UAA0B,CAAC,CAAC,CAAE;QAChF,KAAK,CAACA;QACN,IAAI,CAAClB,IAAI,GAAG,IAAIxB;QAChB,IAAI,CAACwB,IAAI,CAACmB,SAAS,GAAG,CAACpB,MAAQxB,aAAa6C,SAAS,CAACtB,GAAG,CAACuB,IAAI,CAAC,IAAI,EAAEtB;QACrE,MAAMuB,QAAQ,IAAI1C,MAAM;QACxB,IAAI2C,YAAY;QAChB,IAAIC,gBAAsC;QAC1C,MAAMC,QAAQ;YACZF,YAAY;QACd;QACA,IAAI,CAACG,UAAU,CAACd,IAAI,CAACa;QAErB,0CAA0C;QAC1CvC,YAAYgC,QAAQS,QAAQ,IAAI;QAEhC,IAAI,OAAOV,WAAW,UAAU;YAC9B,4CAA4C;YAC5CK,MAAMM,KAAK,CAAC,CAACC;gBACXpD,GAAGqD,IAAI,CAACb,QAAQ,CAACc,SAASC;oBACxB,IAAI,IAAI,CAACC,IAAI,IAAIV,WAAW;oBAC5B,IAAIQ,SAAS,OAAOF,GAAGE;oBAEvBtD,GAAGyD,IAAI,CAACjB,QAAQ,KAAK,CAAClB,KAAKoC;wBACzB,IAAI,IAAI,CAACF,IAAI,IAAIV,WAAW;wBAC5B,IAAIxB,KAAK,OAAO8B,GAAG9B;wBAEnByB,gBAAgB,IAAIrC,WAAWgD,IAAIH,MAAMI,IAAI;wBAC7C,uCAAuC;wBACvC,IAAI,CAACpC,IAAI,CAACqC,eAAe,CAAC;4BACxB5D,GAAG6D,SAAS,CAACH;wBACf;wBACAN;oBACF;gBACF;YACF;QACF,OAAO;YACL,sDAAsD;YACtD,qCAAqC;YACrC,MAAMU,SAAStB;YACf,IAAI,CAACjB,IAAI,CAACqC,eAAe,CAAC;gBACxB,MAAMG,IAAID;gBACV,IAAI,OAAOC,EAAEC,OAAO,KAAK,YAAYD,EAAEC,OAAO;YAChD;YAEA,MAAMC,WAAW/D,KAAKgE,IAAI,CAAC5D,UAAU,eAAeF,UAAU+D,QAAQC,GAAG,KAAK/D,WAAW;YACzFwC,MAAMM,KAAK,CAAC,CAACC;gBACX7C,eAAeiC,QAAQ;oBAAEyB;gBAAS,GAAG,CAAC3C,KAAa+C;oBACjD,IAAI,IAAI,CAACb,IAAI,IAAIV,WAAW;oBAC5B,IAAIxB,KAAK,OAAO8B,GAAG9B;oBACnB,IAAI,CAAC+C,QAAQ,OAAOjB,GAAG,IAAIkB,MAAM;oBAEjCvB,gBAAgBsB,OAAO7B,MAAM;oBAE7B,uCAAuC;oBACvC,IAAI,CAACjB,IAAI,CAACqC,eAAe,CAAC;wBACxB5D,GAAG6D,SAAS,CAACQ,OAAOX,EAAE;oBACxB;oBAEA,iCAAiC;oBACjC,IAAI,CAACnC,IAAI,CAACqC,eAAe,CAAC;wBACxB,IAAI;4BACF3D,OAAOoE,OAAOJ,QAAQ;wBACxB,EAAE,OAAOM,IAAI;wBACX,UAAU,GACZ;oBACF;oBAEAnB;gBACF;YACF;QACF;QAEA,2BAA2B;QAC3BP,MAAMM,KAAK,CAAC,CAACC;YACX,IAAI,IAAI,CAACI,IAAI,IAAIV,WAAW;YAC5B,IAAI,CAACC,eAAe,OAAOK,GAAG,IAAIkB,MAAM;YAExC,IAAI;gBACF,MAAMpD,SAAS,IAAIP,eAAeoC;gBAClC7B,OAAOsD,KAAK;gBACZ,IAAI,CAAC/C,SAAS,GAAG,IAAIb,cAAcM;gBACnCkC;YACF,EAAE,OAAOqB,UAAU;gBACjBrB,GAAGqB;YACL;QACF;QAEA,mBAAmB;QACnB5B,MAAM6B,KAAK,CAAC,CAACpD;YACX,IAAI,CAAC2B,UAAU,CAAC0B,MAAM,CAAC3B;YACvB,IAAI,IAAI,CAACQ,IAAI,IAAIV,WAAW;YAC5BxB,MAAM,IAAI,CAACD,GAAG,CAACC,OAAO,IAAI,CAACa,IAAI,CAAC3B;QAClC;IACF;AA6EF;AA/KA,SAAqBY,8BA+KpB"}
@@ -2,13 +2,6 @@
2
2
  * Compatibility Layer for Node.js 0.8+
3
3
  * Local to this package - contains only needed functions.
4
4
  */ import os from 'os';
5
- /**
6
- * os.tmpdir wrapper for Node.js 0.8+
7
- * - Uses native os.tmpdir on Node 0.10+
8
- * - Falls back to os-shim on Node 0.8
9
- */ const hasTmpdir = typeof os.tmpdir === 'function';
10
5
  export function tmpdir() {
11
- if (hasTmpdir) return os.tmpdir();
12
- const osShim = require('os-shim');
13
- return osShim.tmpdir();
6
+ return typeof os.tmpdir === 'function' ? os.tmpdir() : require('os-shim').tmpdir();
14
7
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/compat.ts"],"sourcesContent":["/**\n * Compatibility Layer for Node.js 0.8+\n * Local to this package - contains only needed functions.\n */\nimport os from 'os';\n\n/**\n * os.tmpdir wrapper for Node.js 0.8+\n * - Uses native os.tmpdir on Node 0.10+\n * - Falls back to os-shim on Node 0.8\n */\nconst hasTmpdir = typeof os.tmpdir === 'function';\nexport function tmpdir(): string {\n if (hasTmpdir) return os.tmpdir();\n const osShim = require('os-shim');\n return osShim.tmpdir();\n}\n"],"names":["os","hasTmpdir","tmpdir","osShim","require"],"mappings":"AAAA;;;CAGC,GACD,OAAOA,QAAQ,KAAK;AAEpB;;;;CAIC,GACD,MAAMC,YAAY,OAAOD,GAAGE,MAAM,KAAK;AACvC,OAAO,SAASA;IACd,IAAID,WAAW,OAAOD,GAAGE,MAAM;IAC/B,MAAMC,SAASC,QAAQ;IACvB,OAAOD,OAAOD,MAAM;AACtB"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/compat.ts"],"sourcesContent":["/**\n * Compatibility Layer for Node.js 0.8+\n * Local to this package - contains only needed functions.\n */\nimport os from 'os';\n\nexport function tmpdir(): string {\n return typeof os.tmpdir === 'function' ? os.tmpdir() : require('os-shim').tmpdir();\n}\n"],"names":["os","tmpdir","require"],"mappings":"AAAA;;;CAGC,GACD,OAAOA,QAAQ,KAAK;AAEpB,OAAO,SAASC;IACd,OAAO,OAAOD,GAAGC,MAAM,KAAK,aAAaD,GAAGC,MAAM,KAAKC,QAAQ,WAAWD,MAAM;AAClF"}
@@ -1,4 +1,2 @@
1
1
  export { default } from './SevenZipIterator.js';
2
- export type { ArchiveSource, SevenZipEntry } from './sevenz/index.js';
3
- export { SevenZipParser } from './sevenz/index.js';
4
2
  export * from './types.js';
package/dist/esm/index.js CHANGED
@@ -1,3 +1,2 @@
1
1
  export { default } from './SevenZipIterator.js';
2
- export { SevenZipParser } from './sevenz/index.js';
3
2
  export * from './types.js';
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/index.ts"],"sourcesContent":["export { default } from './SevenZipIterator.ts';\nexport type { ArchiveSource, SevenZipEntry } from './sevenz/index.ts';\nexport { SevenZipParser } from './sevenz/index.ts';\nexport * from './types.ts';\n"],"names":["default","SevenZipParser"],"mappings":"AAAA,SAASA,OAAO,QAAQ,wBAAwB;AAEhD,SAASC,cAAc,QAAQ,oBAAoB;AACnD,cAAc,aAAa"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/index.ts"],"sourcesContent":["export { default } from './SevenZipIterator.ts';\nexport * from './types.ts';\n"],"names":["default"],"mappings":"AAAA,SAASA,OAAO,QAAQ,wBAAwB;AAChD,cAAc,aAAa"}
@@ -1,20 +1,17 @@
1
- import { BufferSource, FileSource } from '../sevenz/SevenZipParser.js';
1
+ import { FileSource } from '../sevenz/SevenZipParser.js';
2
2
  export interface StreamToSourceOptions {
3
- memoryThreshold?: number;
4
- tempPath?: string;
3
+ tempPath: string;
5
4
  }
6
5
  export interface SourceResult {
7
- source: BufferSource | FileSource;
8
- fd?: number;
9
- tempPath?: string;
6
+ source: FileSource;
7
+ fd: number;
8
+ tempPath: string;
10
9
  }
11
10
  export type Callback = (error?: Error, result?: SourceResult) => void;
12
11
  /**
13
- * Convert a stream to an ArchiveSource (BufferSource for small files, FileSource for large)
12
+ * Convert a stream to a FileSource by writing to temp file
14
13
  *
15
- * Algorithm:
16
- * 1. Buffer stream data in memory up to memoryThreshold
17
- * 2. If threshold exceeded, write all buffered data to temp file and continue streaming
18
- * 3. When done, return BufferSource for memory buffer or FileSource for temp file
14
+ * 7z format requires random access for header parsing, so temp file is necessary for streams.
15
+ * Writes directly to temp file for predictable O(1) memory usage during stream consumption.
19
16
  */
20
17
  export default function streamToSource(stream: NodeJS.ReadableStream, options: StreamToSourceOptions, callback: Callback): void;
@@ -1,91 +1,45 @@
1
- // Hybrid stream handling: buffers in memory up to threshold, then switches to temp file
1
+ // Stream to source conversion: writes stream to temp file for random access
2
2
  import once from 'call-once-fn';
3
3
  import { bufferFrom } from 'extract-base-iterator';
4
4
  import fs from 'fs';
5
5
  import mkdirp from 'mkdirp-classic';
6
6
  import oo from 'on-one';
7
7
  import path from 'path';
8
- import { BufferSource, FileSource } from '../sevenz/SevenZipParser.js';
9
- // Default memory threshold: 100 MB
10
- const DEFAULT_MEMORY_THRESHOLD = 100 * 1024 * 1024;
8
+ import { FileSource } from '../sevenz/SevenZipParser.js';
11
9
  /**
12
- * Convert a stream to an ArchiveSource (BufferSource for small files, FileSource for large)
10
+ * Convert a stream to a FileSource by writing to temp file
13
11
  *
14
- * Algorithm:
15
- * 1. Buffer stream data in memory up to memoryThreshold
16
- * 2. If threshold exceeded, write all buffered data to temp file and continue streaming
17
- * 3. When done, return BufferSource for memory buffer or FileSource for temp file
12
+ * 7z format requires random access for header parsing, so temp file is necessary for streams.
13
+ * Writes directly to temp file for predictable O(1) memory usage during stream consumption.
18
14
  */ export default function streamToSource(stream, options, callback) {
19
- const threshold = options.memoryThreshold !== undefined ? options.memoryThreshold : DEFAULT_MEMORY_THRESHOLD;
20
15
  const tempPath = options.tempPath;
21
- let chunks = [];
22
- let totalSize = 0;
23
- let writeStream = null;
24
- let useTempFile = false;
25
16
  const end = once(callback);
17
+ mkdirp.sync(path.dirname(tempPath));
18
+ const writeStream = fs.createWriteStream(tempPath);
26
19
  function onData(chunk) {
27
- // Convert string chunks to Buffer
28
20
  const buf = typeof chunk === 'string' ? bufferFrom(chunk) : chunk;
29
- totalSize += buf.length;
30
- if (!useTempFile && totalSize <= threshold) {
31
- // Still under threshold - buffer in memory
32
- chunks.push(buf);
33
- } else if (!useTempFile) {
34
- // Just exceeded threshold - switch to temp file
35
- useTempFile = true;
36
- if (!tempPath) {
37
- end(new Error('memoryThreshold exceeded but no tempPath provided'));
38
- return;
39
- }
40
- mkdirp.sync(path.dirname(tempPath));
41
- writeStream = fs.createWriteStream(tempPath);
42
- // Write all buffered chunks to temp file
43
- for(let i = 0; i < chunks.length; i++){
44
- writeStream.write(chunks[i]);
45
- }
46
- chunks = []; // Allow GC
47
- // Write current chunk
48
- writeStream.write(buf);
49
- } else {
50
- // Already using temp file - write directly
51
- if (writeStream) {
52
- writeStream.write(buf);
53
- }
54
- }
21
+ writeStream.write(buf);
55
22
  }
56
23
  function onEnd() {
57
- if (useTempFile && writeStream && tempPath) {
58
- // Close write stream, then open for reading
59
- const filePath = tempPath; // Capture for closure
60
- writeStream.end(()=>{
61
- fs.open(filePath, 'r', (err, fd)=>{
62
- if (err) return end(err);
63
- fs.stat(filePath, (statErr, stats)=>{
64
- if (statErr) {
65
- fs.closeSync(fd);
66
- return end(statErr);
67
- }
68
- end(null, {
69
- source: new FileSource(fd, stats.size),
70
- fd: fd,
71
- tempPath: filePath
72
- });
24
+ writeStream.end(()=>{
25
+ fs.open(tempPath, 'r', (err, fd)=>{
26
+ if (err) return end(err);
27
+ fs.stat(tempPath, (statErr, stats)=>{
28
+ if (statErr) {
29
+ fs.closeSync(fd);
30
+ return end(statErr);
31
+ }
32
+ end(null, {
33
+ source: new FileSource(fd, stats.size),
34
+ fd: fd,
35
+ tempPath: tempPath
73
36
  });
74
37
  });
75
38
  });
76
- } else {
77
- // Use memory buffer
78
- const fullBuffer = Buffer.concat(chunks);
79
- end(null, {
80
- source: new BufferSource(fullBuffer)
81
- });
82
- }
39
+ });
83
40
  }
84
41
  function onError(err) {
85
- // Clean up if we created a temp file
86
- if (writeStream) {
87
- writeStream.end();
88
- }
42
+ writeStream.end();
89
43
  end(err);
90
44
  }
91
45
  stream.on('data', onData);
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/lib/streamToSource.ts"],"sourcesContent":["// Hybrid stream handling: buffers in memory up to threshold, then switches to temp file\nimport once from 'call-once-fn';\nimport { bufferFrom } from 'extract-base-iterator';\nimport fs from 'fs';\nimport mkdirp from 'mkdirp-classic';\nimport oo from 'on-one';\nimport path from 'path';\nimport { BufferSource, FileSource } from '../sevenz/SevenZipParser.ts';\n\n// Default memory threshold: 100 MB\nconst DEFAULT_MEMORY_THRESHOLD = 100 * 1024 * 1024;\n\nexport interface StreamToSourceOptions {\n memoryThreshold?: number;\n tempPath?: string;\n}\n\nexport interface SourceResult {\n source: BufferSource | FileSource;\n fd?: number; // Set if FileSource was used (caller must close)\n tempPath?: string; // Set if temp file was created (caller must clean up)\n}\n\nexport type Callback = (error?: Error, result?: SourceResult) => void;\n\n/**\n * Convert a stream to an ArchiveSource (BufferSource for small files, FileSource for large)\n *\n * Algorithm:\n * 1. Buffer stream data in memory up to memoryThreshold\n * 2. If threshold exceeded, write all buffered data to temp file and continue streaming\n * 3. When done, return BufferSource for memory buffer or FileSource for temp file\n */\nexport default function streamToSource(stream: NodeJS.ReadableStream, options: StreamToSourceOptions, callback: Callback): void {\n const threshold = options.memoryThreshold !== undefined ? options.memoryThreshold : DEFAULT_MEMORY_THRESHOLD;\n const tempPath = options.tempPath;\n\n let chunks: Buffer[] = [];\n let totalSize = 0;\n let writeStream: fs.WriteStream | null = null;\n let useTempFile = false;\n\n const end = once(callback);\n\n function onData(chunk: Buffer | string): void {\n // Convert string chunks to Buffer\n const buf = typeof chunk === 'string' ? bufferFrom(chunk) : chunk;\n totalSize += buf.length;\n\n if (!useTempFile && totalSize <= threshold) {\n // Still under threshold - buffer in memory\n chunks.push(buf);\n } else if (!useTempFile) {\n // Just exceeded threshold - switch to temp file\n useTempFile = true;\n\n if (!tempPath) {\n end(new Error('memoryThreshold exceeded but no tempPath provided'));\n return;\n }\n\n mkdirp.sync(path.dirname(tempPath));\n writeStream = fs.createWriteStream(tempPath);\n\n // Write all buffered chunks to temp file\n for (let i = 0; i < chunks.length; i++) {\n writeStream.write(chunks[i]);\n }\n chunks = []; // Allow GC\n\n // Write current chunk\n writeStream.write(buf);\n } else {\n // Already using temp file - write directly\n if (writeStream) {\n writeStream.write(buf);\n }\n }\n }\n\n function onEnd(): void {\n if (useTempFile && writeStream && tempPath) {\n // Close write stream, then open for reading\n const filePath = tempPath; // Capture for closure\n writeStream.end(() => {\n fs.open(filePath, 'r', (err, fd) => {\n if (err) return end(err);\n fs.stat(filePath, (statErr, stats) => {\n if (statErr) {\n fs.closeSync(fd);\n return end(statErr);\n }\n end(null, {\n source: new FileSource(fd, stats.size),\n fd: fd,\n tempPath: filePath,\n });\n });\n });\n });\n } else {\n // Use memory buffer\n const fullBuffer = Buffer.concat(chunks);\n end(null, {\n source: new BufferSource(fullBuffer),\n });\n }\n }\n\n function onError(err: Error): void {\n // Clean up if we created a temp file\n if (writeStream) {\n writeStream.end();\n }\n end(err);\n }\n\n stream.on('data', onData);\n oo(stream, ['error'], onError);\n oo(stream, ['end', 'close', 'finish'], onEnd);\n}\n"],"names":["once","bufferFrom","fs","mkdirp","oo","path","BufferSource","FileSource","DEFAULT_MEMORY_THRESHOLD","streamToSource","stream","options","callback","threshold","memoryThreshold","undefined","tempPath","chunks","totalSize","writeStream","useTempFile","end","onData","chunk","buf","length","push","Error","sync","dirname","createWriteStream","i","write","onEnd","filePath","open","err","fd","stat","statErr","stats","closeSync","source","size","fullBuffer","Buffer","concat","onError","on"],"mappings":"AAAA,wFAAwF;AACxF,OAAOA,UAAU,eAAe;AAChC,SAASC,UAAU,QAAQ,wBAAwB;AACnD,OAAOC,QAAQ,KAAK;AACpB,OAAOC,YAAY,iBAAiB;AACpC,OAAOC,QAAQ,SAAS;AACxB,OAAOC,UAAU,OAAO;AACxB,SAASC,YAAY,EAAEC,UAAU,QAAQ,8BAA8B;AAEvE,mCAAmC;AACnC,MAAMC,2BAA2B,MAAM,OAAO;AAe9C;;;;;;;CAOC,GACD,eAAe,SAASC,eAAeC,MAA6B,EAAEC,OAA8B,EAAEC,QAAkB;IACtH,MAAMC,YAAYF,QAAQG,eAAe,KAAKC,YAAYJ,QAAQG,eAAe,GAAGN;IACpF,MAAMQ,WAAWL,QAAQK,QAAQ;IAEjC,IAAIC,SAAmB,EAAE;IACzB,IAAIC,YAAY;IAChB,IAAIC,cAAqC;IACzC,IAAIC,cAAc;IAElB,MAAMC,MAAMrB,KAAKY;IAEjB,SAASU,OAAOC,KAAsB;QACpC,kCAAkC;QAClC,MAAMC,MAAM,OAAOD,UAAU,WAAWtB,WAAWsB,SAASA;QAC5DL,aAAaM,IAAIC,MAAM;QAEvB,IAAI,CAACL,eAAeF,aAAaL,WAAW;YAC1C,2CAA2C;YAC3CI,OAAOS,IAAI,CAACF;QACd,OAAO,IAAI,CAACJ,aAAa;YACvB,gDAAgD;YAChDA,cAAc;YAEd,IAAI,CAACJ,UAAU;gBACbK,IAAI,IAAIM,MAAM;gBACd;YACF;YAEAxB,OAAOyB,IAAI,CAACvB,KAAKwB,OAAO,CAACb;YACzBG,cAAcjB,GAAG4B,iBAAiB,CAACd;YAEnC,yCAAyC;YACzC,IAAK,IAAIe,IAAI,GAAGA,IAAId,OAAOQ,MAAM,EAAEM,IAAK;gBACtCZ,YAAYa,KAAK,CAACf,MAAM,CAACc,EAAE;YAC7B;YACAd,SAAS,EAAE,EAAE,WAAW;YAExB,sBAAsB;YACtBE,YAAYa,KAAK,CAACR;QACpB,OAAO;YACL,2CAA2C;YAC3C,IAAIL,aAAa;gBACfA,YAAYa,KAAK,CAACR;YACpB;QACF;IACF;IAEA,SAASS;QACP,IAAIb,eAAeD,eAAeH,UAAU;YAC1C,4CAA4C;YAC5C,MAAMkB,WAAWlB,UAAU,sBAAsB;YACjDG,YAAYE,GAAG,CAAC;gBACdnB,GAAGiC,IAAI,CAACD,UAAU,KAAK,CAACE,KAAKC;oBAC3B,IAAID,KAAK,OAAOf,IAAIe;oBACpBlC,GAAGoC,IAAI,CAACJ,UAAU,CAACK,SAASC;wBAC1B,IAAID,SAAS;4BACXrC,GAAGuC,SAAS,CAACJ;4BACb,OAAOhB,IAAIkB;wBACb;wBACAlB,IAAI,MAAM;4BACRqB,QAAQ,IAAInC,WAAW8B,IAAIG,MAAMG,IAAI;4BACrCN,IAAIA;4BACJrB,UAAUkB;wBACZ;oBACF;gBACF;YACF;QACF,OAAO;YACL,oBAAoB;YACpB,MAAMU,aAAaC,OAAOC,MAAM,CAAC7B;YACjCI,IAAI,MAAM;gBACRqB,QAAQ,IAAIpC,aAAasC;YAC3B;QACF;IACF;IAEA,SAASG,QAAQX,GAAU;QACzB,qCAAqC;QACrC,IAAIjB,aAAa;YACfA,YAAYE,GAAG;QACjB;QACAA,IAAIe;IACN;IAEA1B,OAAOsC,EAAE,CAAC,QAAQ1B;IAClBlB,GAAGM,QAAQ;QAAC;KAAQ,EAAEqC;IACtB3C,GAAGM,QAAQ;QAAC;QAAO;QAAS;KAAS,EAAEuB;AACzC"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/lib/streamToSource.ts"],"sourcesContent":["// Stream to source conversion: writes stream to temp file for random access\nimport once from 'call-once-fn';\nimport { bufferFrom } from 'extract-base-iterator';\nimport fs from 'fs';\nimport mkdirp from 'mkdirp-classic';\nimport oo from 'on-one';\nimport path from 'path';\nimport { FileSource } from '../sevenz/SevenZipParser.ts';\n\nexport interface StreamToSourceOptions {\n tempPath: string;\n}\n\nexport interface SourceResult {\n source: FileSource;\n fd: number; // Caller must close\n tempPath: string; // Caller must clean up\n}\n\nexport type Callback = (error?: Error, result?: SourceResult) => void;\n\n/**\n * Convert a stream to a FileSource by writing to temp file\n *\n * 7z format requires random access for header parsing, so temp file is necessary for streams.\n * Writes directly to temp file for predictable O(1) memory usage during stream consumption.\n */\nexport default function streamToSource(stream: NodeJS.ReadableStream, options: StreamToSourceOptions, callback: Callback): void {\n const tempPath = options.tempPath;\n\n const end = once(callback);\n\n mkdirp.sync(path.dirname(tempPath));\n const writeStream = fs.createWriteStream(tempPath);\n\n function onData(chunk: Buffer | string): void {\n const buf = typeof chunk === 'string' ? bufferFrom(chunk) : chunk;\n writeStream.write(buf);\n }\n\n function onEnd(): void {\n writeStream.end(() => {\n fs.open(tempPath, 'r', (err, fd) => {\n if (err) return end(err);\n fs.stat(tempPath, (statErr, stats) => {\n if (statErr) {\n fs.closeSync(fd);\n return end(statErr);\n }\n end(null, {\n source: new FileSource(fd, stats.size),\n fd: fd,\n tempPath: tempPath,\n });\n });\n });\n });\n }\n\n function onError(err: Error): void {\n writeStream.end();\n end(err);\n }\n\n stream.on('data', onData);\n oo(stream, ['error'], onError);\n oo(stream, ['end', 'close', 'finish'], onEnd);\n}\n"],"names":["once","bufferFrom","fs","mkdirp","oo","path","FileSource","streamToSource","stream","options","callback","tempPath","end","sync","dirname","writeStream","createWriteStream","onData","chunk","buf","write","onEnd","open","err","fd","stat","statErr","stats","closeSync","source","size","onError","on"],"mappings":"AAAA,4EAA4E;AAC5E,OAAOA,UAAU,eAAe;AAChC,SAASC,UAAU,QAAQ,wBAAwB;AACnD,OAAOC,QAAQ,KAAK;AACpB,OAAOC,YAAY,iBAAiB;AACpC,OAAOC,QAAQ,SAAS;AACxB,OAAOC,UAAU,OAAO;AACxB,SAASC,UAAU,QAAQ,8BAA8B;AAczD;;;;;CAKC,GACD,eAAe,SAASC,eAAeC,MAA6B,EAAEC,OAA8B,EAAEC,QAAkB;IACtH,MAAMC,WAAWF,QAAQE,QAAQ;IAEjC,MAAMC,MAAMZ,KAAKU;IAEjBP,OAAOU,IAAI,CAACR,KAAKS,OAAO,CAACH;IACzB,MAAMI,cAAcb,GAAGc,iBAAiB,CAACL;IAEzC,SAASM,OAAOC,KAAsB;QACpC,MAAMC,MAAM,OAAOD,UAAU,WAAWjB,WAAWiB,SAASA;QAC5DH,YAAYK,KAAK,CAACD;IACpB;IAEA,SAASE;QACPN,YAAYH,GAAG,CAAC;YACdV,GAAGoB,IAAI,CAACX,UAAU,KAAK,CAACY,KAAKC;gBAC3B,IAAID,KAAK,OAAOX,IAAIW;gBACpBrB,GAAGuB,IAAI,CAACd,UAAU,CAACe,SAASC;oBAC1B,IAAID,SAAS;wBACXxB,GAAG0B,SAAS,CAACJ;wBACb,OAAOZ,IAAIc;oBACb;oBACAd,IAAI,MAAM;wBACRiB,QAAQ,IAAIvB,WAAWkB,IAAIG,MAAMG,IAAI;wBACrCN,IAAIA;wBACJb,UAAUA;oBACZ;gBACF;YACF;QACF;IACF;IAEA,SAASoB,QAAQR,GAAU;QACzBR,YAAYH,GAAG;QACfA,IAAIW;IACN;IAEAf,OAAOwB,EAAE,CAAC,QAAQf;IAClBb,GAAGI,QAAQ;QAAC;KAAQ,EAAEuB;IACtB3B,GAAGI,QAAQ;QAAC;QAAO;QAAS;KAAS,EAAEa;AACzC"}
@@ -0,0 +1,73 @@
1
+ /**
2
+ * LZMA2 Chunk Parser
3
+ *
4
+ * Shared parsing logic for LZMA2 chunk headers.
5
+ * Used by both synchronous and streaming decoders.
6
+ *
7
+ * LZMA2 control byte ranges:
8
+ * 0x00 = End of stream
9
+ * 0x01 = Uncompressed chunk, dictionary reset
10
+ * 0x02 = Uncompressed chunk, no dictionary reset
11
+ * 0x80-0x9F = LZMA chunk, no reset (solid mode)
12
+ * 0xA0-0xBF = LZMA chunk, reset state (probabilities)
13
+ * 0xC0-0xDF = LZMA chunk, reset state + new properties
14
+ * 0xE0-0xFF = LZMA chunk, reset dictionary + state + new properties
15
+ */
16
+ /**
17
+ * LZMA properties extracted from chunk header
18
+ */
19
+ export interface LzmaChunkProps {
20
+ lc: number;
21
+ lp: number;
22
+ pb: number;
23
+ }
24
+ /**
25
+ * Parsed LZMA2 chunk information
26
+ */
27
+ export interface Lzma2Chunk {
28
+ /** Chunk type */
29
+ type: 'end' | 'uncompressed' | 'lzma';
30
+ /** Total bytes consumed by header (including control byte) */
31
+ headerSize: number;
32
+ /** Whether to reset dictionary */
33
+ dictReset: boolean;
34
+ /** Whether to reset state/probabilities */
35
+ stateReset: boolean;
36
+ /** New LZMA properties (only for control >= 0xC0) */
37
+ newProps: LzmaChunkProps | null;
38
+ /** Uncompressed data size */
39
+ uncompSize: number;
40
+ /** Compressed data size (0 for uncompressed chunks) */
41
+ compSize: number;
42
+ }
43
+ /**
44
+ * Result of parsing attempt
45
+ */
46
+ export type ParseResult = {
47
+ success: true;
48
+ chunk: Lzma2Chunk;
49
+ } | {
50
+ success: false;
51
+ needBytes: number;
52
+ };
53
+ /**
54
+ * Parse an LZMA2 chunk header
55
+ *
56
+ * @param input - Input buffer
57
+ * @param offset - Offset to start parsing
58
+ * @returns Parsed chunk info or number of bytes needed
59
+ */
60
+ export declare function parseLzma2ChunkHeader(input: Buffer, offset: number): ParseResult;
61
+ /** Result type for hasCompleteChunk with totalSize included on success */
62
+ export type CompleteChunkResult = {
63
+ success: true;
64
+ chunk: Lzma2Chunk;
65
+ totalSize: number;
66
+ } | {
67
+ success: false;
68
+ needBytes: number;
69
+ };
70
+ /**
71
+ * Check if we have enough data for the complete chunk (header + data)
72
+ */
73
+ export declare function hasCompleteChunk(input: Buffer, offset: number): CompleteChunkResult;
@@ -0,0 +1,137 @@
1
+ /**
2
+ * LZMA2 Chunk Parser
3
+ *
4
+ * Shared parsing logic for LZMA2 chunk headers.
5
+ * Used by both synchronous and streaming decoders.
6
+ *
7
+ * LZMA2 control byte ranges:
8
+ * 0x00 = End of stream
9
+ * 0x01 = Uncompressed chunk, dictionary reset
10
+ * 0x02 = Uncompressed chunk, no dictionary reset
11
+ * 0x80-0x9F = LZMA chunk, no reset (solid mode)
12
+ * 0xA0-0xBF = LZMA chunk, reset state (probabilities)
13
+ * 0xC0-0xDF = LZMA chunk, reset state + new properties
14
+ * 0xE0-0xFF = LZMA chunk, reset dictionary + state + new properties
15
+ */ /**
16
+ * LZMA properties extracted from chunk header
17
+ */ /**
18
+ * Parse an LZMA2 chunk header
19
+ *
20
+ * @param input - Input buffer
21
+ * @param offset - Offset to start parsing
22
+ * @returns Parsed chunk info or number of bytes needed
23
+ */ export function parseLzma2ChunkHeader(input, offset) {
24
+ if (offset >= input.length) {
25
+ return {
26
+ success: false,
27
+ needBytes: 1
28
+ };
29
+ }
30
+ const control = input[offset];
31
+ // End of stream
32
+ if (control === 0x00) {
33
+ return {
34
+ success: true,
35
+ chunk: {
36
+ type: 'end',
37
+ headerSize: 1,
38
+ dictReset: false,
39
+ stateReset: false,
40
+ newProps: null,
41
+ uncompSize: 0,
42
+ compSize: 0
43
+ }
44
+ };
45
+ }
46
+ // Uncompressed chunk
47
+ if (control === 0x01 || control === 0x02) {
48
+ // Need 3 bytes: control + 2 size bytes
49
+ if (offset + 3 > input.length) {
50
+ return {
51
+ success: false,
52
+ needBytes: 3 - (input.length - offset)
53
+ };
54
+ }
55
+ const uncompSize = (input[offset + 1] << 8 | input[offset + 2]) + 1;
56
+ return {
57
+ success: true,
58
+ chunk: {
59
+ type: 'uncompressed',
60
+ headerSize: 3,
61
+ dictReset: control === 0x01,
62
+ stateReset: false,
63
+ newProps: null,
64
+ uncompSize,
65
+ compSize: 0
66
+ }
67
+ };
68
+ }
69
+ // LZMA compressed chunk
70
+ if (control >= 0x80) {
71
+ const hasNewProps = control >= 0xc0;
72
+ const minHeaderSize = hasNewProps ? 6 : 5; // control + 2 uncomp + 2 comp + (1 props)
73
+ if (offset + minHeaderSize > input.length) {
74
+ return {
75
+ success: false,
76
+ needBytes: minHeaderSize - (input.length - offset)
77
+ };
78
+ }
79
+ // Parse sizes
80
+ const uncompHigh = control & 0x1f;
81
+ const uncompSize = (uncompHigh << 16 | input[offset + 1] << 8 | input[offset + 2]) + 1;
82
+ const compSize = (input[offset + 3] << 8 | input[offset + 4]) + 1;
83
+ // Parse properties if present
84
+ let newProps = null;
85
+ if (hasNewProps) {
86
+ const propsByte = input[offset + 5];
87
+ const lc = propsByte % 9;
88
+ const remainder = ~~(propsByte / 9);
89
+ const lp = remainder % 5;
90
+ const pb = ~~(remainder / 5);
91
+ newProps = {
92
+ lc,
93
+ lp,
94
+ pb
95
+ };
96
+ }
97
+ return {
98
+ success: true,
99
+ chunk: {
100
+ type: 'lzma',
101
+ headerSize: minHeaderSize,
102
+ dictReset: control >= 0xe0,
103
+ stateReset: control >= 0xa0,
104
+ newProps,
105
+ uncompSize,
106
+ compSize
107
+ }
108
+ };
109
+ }
110
+ // Invalid control byte
111
+ throw new Error(`Invalid LZMA2 control byte: 0x${control.toString(16)}`);
112
+ }
113
+ /**
114
+ * Check if we have enough data for the complete chunk (header + data)
115
+ */ export function hasCompleteChunk(input, offset) {
116
+ const result = parseLzma2ChunkHeader(input, offset);
117
+ if (result.success === false) {
118
+ return {
119
+ success: false,
120
+ needBytes: result.needBytes
121
+ };
122
+ }
123
+ const { chunk } = result;
124
+ const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;
125
+ const totalSize = chunk.headerSize + dataSize;
126
+ if (offset + totalSize > input.length) {
127
+ return {
128
+ success: false,
129
+ needBytes: totalSize - (input.length - offset)
130
+ };
131
+ }
132
+ return {
133
+ success: true,
134
+ chunk,
135
+ totalSize
136
+ };
137
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/lzma/Lzma2ChunkParser.ts"],"sourcesContent":["/**\n * LZMA2 Chunk Parser\n *\n * Shared parsing logic for LZMA2 chunk headers.\n * Used by both synchronous and streaming decoders.\n *\n * LZMA2 control byte ranges:\n * 0x00 = End of stream\n * 0x01 = Uncompressed chunk, dictionary reset\n * 0x02 = Uncompressed chunk, no dictionary reset\n * 0x80-0x9F = LZMA chunk, no reset (solid mode)\n * 0xA0-0xBF = LZMA chunk, reset state (probabilities)\n * 0xC0-0xDF = LZMA chunk, reset state + new properties\n * 0xE0-0xFF = LZMA chunk, reset dictionary + state + new properties\n */\n\n/**\n * LZMA properties extracted from chunk header\n */\nexport interface LzmaChunkProps {\n lc: number;\n lp: number;\n pb: number;\n}\n\n/**\n * Parsed LZMA2 chunk information\n */\nexport interface Lzma2Chunk {\n /** Chunk type */\n type: 'end' | 'uncompressed' | 'lzma';\n /** Total bytes consumed by header (including control byte) */\n headerSize: number;\n /** Whether to reset dictionary */\n dictReset: boolean;\n /** Whether to reset state/probabilities */\n stateReset: boolean;\n /** New LZMA properties (only for control >= 0xC0) */\n newProps: LzmaChunkProps | null;\n /** Uncompressed data size */\n uncompSize: number;\n /** Compressed data size (0 for uncompressed chunks) */\n compSize: number;\n}\n\n/**\n * Result of parsing attempt\n */\nexport type ParseResult = { success: true; chunk: Lzma2Chunk } | { success: false; needBytes: number };\n\n/**\n * Parse an LZMA2 chunk header\n *\n * @param input - Input buffer\n * @param offset - Offset to start parsing\n * @returns Parsed chunk info or number of bytes needed\n */\nexport function parseLzma2ChunkHeader(input: Buffer, offset: number): ParseResult {\n if (offset >= input.length) {\n return { success: false, needBytes: 1 };\n }\n\n const control = input[offset];\n\n // End of stream\n if (control === 0x00) {\n return {\n success: true,\n chunk: {\n type: 'end',\n headerSize: 1,\n dictReset: false,\n stateReset: false,\n newProps: null,\n uncompSize: 0,\n compSize: 0,\n },\n };\n }\n\n // Uncompressed chunk\n if (control === 0x01 || control === 0x02) {\n // Need 3 bytes: control + 2 size bytes\n if (offset + 3 > input.length) {\n return { success: false, needBytes: 3 - (input.length - offset) };\n }\n\n const uncompSize = ((input[offset + 1] << 8) | input[offset + 2]) + 1;\n\n return {\n success: true,\n chunk: {\n type: 'uncompressed',\n headerSize: 3,\n dictReset: control === 0x01,\n stateReset: false,\n newProps: null,\n uncompSize,\n compSize: 0,\n },\n };\n }\n\n // LZMA compressed chunk\n if (control >= 0x80) {\n const hasNewProps = control >= 0xc0;\n const minHeaderSize = hasNewProps ? 6 : 5; // control + 2 uncomp + 2 comp + (1 props)\n\n if (offset + minHeaderSize > input.length) {\n return { success: false, needBytes: minHeaderSize - (input.length - offset) };\n }\n\n // Parse sizes\n const uncompHigh = control & 0x1f;\n const uncompSize = ((uncompHigh << 16) | (input[offset + 1] << 8) | input[offset + 2]) + 1;\n const compSize = ((input[offset + 3] << 8) | input[offset + 4]) + 1;\n\n // Parse properties if present\n let newProps: LzmaChunkProps | null = null;\n if (hasNewProps) {\n const propsByte = input[offset + 5];\n const lc = propsByte % 9;\n const remainder = ~~(propsByte / 9);\n const lp = remainder % 5;\n const pb = ~~(remainder / 5);\n newProps = { lc, lp, pb };\n }\n\n return {\n success: true,\n chunk: {\n type: 'lzma',\n headerSize: minHeaderSize,\n dictReset: control >= 0xe0,\n stateReset: control >= 0xa0,\n newProps,\n uncompSize,\n compSize,\n },\n };\n }\n\n // Invalid control byte\n throw new Error(`Invalid LZMA2 control byte: 0x${control.toString(16)}`);\n}\n\n/** Result type for hasCompleteChunk with totalSize included on success */\nexport type CompleteChunkResult = { success: true; chunk: Lzma2Chunk; totalSize: number } | { success: false; needBytes: number };\n\n/**\n * Check if we have enough data for the complete chunk (header + data)\n */\nexport function hasCompleteChunk(input: Buffer, offset: number): CompleteChunkResult {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (result.success === false) {\n return { success: false, needBytes: result.needBytes };\n }\n\n const { chunk } = result;\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n const totalSize = chunk.headerSize + dataSize;\n\n if (offset + totalSize > input.length) {\n return { success: false, needBytes: totalSize - (input.length - offset) };\n }\n\n return { success: true, chunk, totalSize };\n}\n"],"names":["parseLzma2ChunkHeader","input","offset","length","success","needBytes","control","chunk","type","headerSize","dictReset","stateReset","newProps","uncompSize","compSize","hasNewProps","minHeaderSize","uncompHigh","propsByte","lc","remainder","lp","pb","Error","toString","hasCompleteChunk","result","dataSize","totalSize"],"mappings":"AAAA;;;;;;;;;;;;;;CAcC,GAED;;CAEC,GAgCD;;;;;;CAMC,GACD,OAAO,SAASA,sBAAsBC,KAAa,EAAEC,MAAc;IACjE,IAAIA,UAAUD,MAAME,MAAM,EAAE;QAC1B,OAAO;YAAEC,SAAS;YAAOC,WAAW;QAAE;IACxC;IAEA,MAAMC,UAAUL,KAAK,CAACC,OAAO;IAE7B,gBAAgB;IAChB,IAAII,YAAY,MAAM;QACpB,OAAO;YACLF,SAAS;YACTG,OAAO;gBACLC,MAAM;gBACNC,YAAY;gBACZC,WAAW;gBACXC,YAAY;gBACZC,UAAU;gBACVC,YAAY;gBACZC,UAAU;YACZ;QACF;IACF;IAEA,qBAAqB;IACrB,IAAIR,YAAY,QAAQA,YAAY,MAAM;QACxC,uCAAuC;QACvC,IAAIJ,SAAS,IAAID,MAAME,MAAM,EAAE;YAC7B,OAAO;gBAAEC,SAAS;gBAAOC,WAAW,IAAKJ,CAAAA,MAAME,MAAM,GAAGD,MAAK;YAAG;QAClE;QAEA,MAAMW,aAAa,AAAC,CAAA,AAACZ,KAAK,CAACC,SAAS,EAAE,IAAI,IAAKD,KAAK,CAACC,SAAS,EAAE,AAAD,IAAK;QAEpE,OAAO;YACLE,SAAS;YACTG,OAAO;gBACLC,MAAM;gBACNC,YAAY;gBACZC,WAAWJ,YAAY;gBACvBK,YAAY;gBACZC,UAAU;gBACVC;gBACAC,UAAU;YACZ;QACF;IACF;IAEA,wBAAwB;IACxB,IAAIR,WAAW,MAAM;QACnB,MAAMS,cAAcT,WAAW;QAC/B,MAAMU,gBAAgBD,cAAc,IAAI,GAAG,0CAA0C;QAErF,IAAIb,SAASc,gBAAgBf,MAAME,MAAM,EAAE;YACzC,OAAO;gBAAEC,SAAS;gBAAOC,WAAWW,gBAAiBf,CAAAA,MAAME,MAAM,GAAGD,MAAK;YAAG;QAC9E;QAEA,cAAc;QACd,MAAMe,aAAaX,UAAU;QAC7B,MAAMO,aAAa,AAAC,CAAA,AAACI,cAAc,KAAOhB,KAAK,CAACC,SAAS,EAAE,IAAI,IAAKD,KAAK,CAACC,SAAS,EAAE,AAAD,IAAK;QACzF,MAAMY,WAAW,AAAC,CAAA,AAACb,KAAK,CAACC,SAAS,EAAE,IAAI,IAAKD,KAAK,CAACC,SAAS,EAAE,AAAD,IAAK;QAElE,8BAA8B;QAC9B,IAAIU,WAAkC;QACtC,IAAIG,aAAa;YACf,MAAMG,YAAYjB,KAAK,CAACC,SAAS,EAAE;YACnC,MAAMiB,KAAKD,YAAY;YACvB,MAAME,YAAY,CAAC,CAAEF,CAAAA,YAAY,CAAA;YACjC,MAAMG,KAAKD,YAAY;YACvB,MAAME,KAAK,CAAC,CAAEF,CAAAA,YAAY,CAAA;YAC1BR,WAAW;gBAAEO;gBAAIE;gBAAIC;YAAG;QAC1B;QAEA,OAAO;YACLlB,SAAS;YACTG,OAAO;gBACLC,MAAM;gBACNC,YAAYO;gBACZN,WAAWJ,WAAW;gBACtBK,YAAYL,WAAW;gBACvBM;gBACAC;gBACAC;YACF;QACF;IACF;IAEA,uBAAuB;IACvB,MAAM,IAAIS,MAAM,CAAC,8BAA8B,EAAEjB,QAAQkB,QAAQ,CAAC,KAAK;AACzE;AAKA;;CAEC,GACD,OAAO,SAASC,iBAAiBxB,KAAa,EAAEC,MAAc;IAC5D,MAAMwB,SAAS1B,sBAAsBC,OAAOC;IAE5C,IAAIwB,OAAOtB,OAAO,KAAK,OAAO;QAC5B,OAAO;YAAEA,SAAS;YAAOC,WAAWqB,OAAOrB,SAAS;QAAC;IACvD;IAEA,MAAM,EAAEE,KAAK,EAAE,GAAGmB;IAClB,MAAMC,WAAWpB,MAAMC,IAAI,KAAK,iBAAiBD,MAAMM,UAAU,GAAGN,MAAMO,QAAQ;IAClF,MAAMc,YAAYrB,MAAME,UAAU,GAAGkB;IAErC,IAAIzB,SAAS0B,YAAY3B,MAAME,MAAM,EAAE;QACrC,OAAO;YAAEC,SAAS;YAAOC,WAAWuB,YAAa3B,CAAAA,MAAME,MAAM,GAAGD,MAAK;QAAG;IAC1E;IAEA,OAAO;QAAEE,SAAS;QAAMG;QAAOqB;IAAU;AAC3C"}
@@ -0,0 +1,13 @@
1
+ /**
2
+ * LZMA Decoder Module
3
+ *
4
+ * Provides both synchronous and streaming LZMA1/LZMA2 decoders.
5
+ *
6
+ * Synchronous API: Use when input is a complete Buffer
7
+ * Streaming API: Use with Transform streams for memory-efficient decompression
8
+ */
9
+ export { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.js';
10
+ export { decodeLzma2, Lzma2Decoder } from './sync/Lzma2Decoder.js';
11
+ export { decodeLzma, LzmaDecoder } from './sync/LzmaDecoder.js';
12
+ export { BitTreeDecoder, RangeDecoder } from './sync/RangeDecoder.js';
13
+ export * from './types.js';