7z-iterator 1.1.2 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (184) hide show
  1. package/dist/cjs/FileEntry.d.cts +12 -4
  2. package/dist/cjs/FileEntry.d.ts +12 -4
  3. package/dist/cjs/FileEntry.js +52 -24
  4. package/dist/cjs/FileEntry.js.map +1 -1
  5. package/dist/cjs/SevenZipIterator.d.cts +25 -2
  6. package/dist/cjs/SevenZipIterator.d.ts +25 -2
  7. package/dist/cjs/SevenZipIterator.js +68 -21
  8. package/dist/cjs/SevenZipIterator.js.map +1 -1
  9. package/dist/cjs/index.d.cts +1 -2
  10. package/dist/cjs/index.d.ts +1 -2
  11. package/dist/cjs/index.js +19 -3
  12. package/dist/cjs/index.js.map +1 -1
  13. package/dist/cjs/lib/streamToSource.d.cts +8 -11
  14. package/dist/cjs/lib/streamToSource.d.ts +8 -11
  15. package/dist/cjs/lib/streamToSource.js +21 -67
  16. package/dist/cjs/lib/streamToSource.js.map +1 -1
  17. package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
  18. package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
  19. package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
  20. package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
  21. package/dist/cjs/lzma/index.d.cts +13 -0
  22. package/dist/cjs/lzma/index.d.ts +13 -0
  23. package/dist/cjs/lzma/index.js +63 -0
  24. package/dist/cjs/lzma/index.js.map +1 -0
  25. package/dist/cjs/lzma/stream/transforms.d.cts +38 -0
  26. package/dist/cjs/lzma/stream/transforms.d.ts +38 -0
  27. package/dist/cjs/lzma/stream/transforms.js +149 -0
  28. package/dist/cjs/lzma/stream/transforms.js.map +1 -0
  29. package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +30 -0
  30. package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +30 -0
  31. package/dist/cjs/lzma/sync/Lzma2Decoder.js +135 -0
  32. package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
  33. package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +82 -0
  34. package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +82 -0
  35. package/dist/cjs/lzma/sync/LzmaDecoder.js +440 -0
  36. package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
  37. package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
  38. package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
  39. package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
  40. package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
  41. package/dist/cjs/lzma/types.d.cts +110 -0
  42. package/dist/cjs/lzma/types.d.ts +110 -0
  43. package/dist/cjs/lzma/types.js +264 -0
  44. package/dist/cjs/lzma/types.js.map +1 -0
  45. package/dist/cjs/nextEntry.js +24 -26
  46. package/dist/cjs/nextEntry.js.map +1 -1
  47. package/dist/cjs/sevenz/ArchiveSource.d.cts +16 -0
  48. package/dist/cjs/sevenz/ArchiveSource.d.ts +16 -0
  49. package/dist/cjs/sevenz/ArchiveSource.js +69 -0
  50. package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
  51. package/dist/cjs/sevenz/FolderStreamSplitter.d.cts +101 -0
  52. package/dist/cjs/sevenz/FolderStreamSplitter.d.ts +101 -0
  53. package/dist/cjs/sevenz/FolderStreamSplitter.js +229 -0
  54. package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -0
  55. package/dist/cjs/sevenz/SevenZipParser.d.cts +71 -10
  56. package/dist/cjs/sevenz/SevenZipParser.d.ts +71 -10
  57. package/dist/cjs/sevenz/SevenZipParser.js +574 -203
  58. package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
  59. package/dist/cjs/sevenz/codecs/BZip2.js +2 -1
  60. package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
  61. package/dist/cjs/sevenz/codecs/Bcj.d.cts +5 -4
  62. package/dist/cjs/sevenz/codecs/Bcj.d.ts +5 -4
  63. package/dist/cjs/sevenz/codecs/Bcj.js +102 -8
  64. package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
  65. package/dist/cjs/sevenz/codecs/BcjArm.d.cts +5 -4
  66. package/dist/cjs/sevenz/codecs/BcjArm.d.ts +5 -4
  67. package/dist/cjs/sevenz/codecs/BcjArm.js +51 -9
  68. package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
  69. package/dist/cjs/sevenz/codecs/Copy.d.cts +2 -4
  70. package/dist/cjs/sevenz/codecs/Copy.d.ts +2 -4
  71. package/dist/cjs/sevenz/codecs/Copy.js +2 -15
  72. package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
  73. package/dist/cjs/sevenz/codecs/Deflate.d.cts +6 -4
  74. package/dist/cjs/sevenz/codecs/Deflate.d.ts +6 -4
  75. package/dist/cjs/sevenz/codecs/Deflate.js +4 -9
  76. package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
  77. package/dist/cjs/sevenz/codecs/Delta.d.cts +5 -4
  78. package/dist/cjs/sevenz/codecs/Delta.d.ts +5 -4
  79. package/dist/cjs/sevenz/codecs/Delta.js +29 -10
  80. package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
  81. package/dist/cjs/sevenz/codecs/Lzma.d.cts +5 -2
  82. package/dist/cjs/sevenz/codecs/Lzma.d.ts +5 -2
  83. package/dist/cjs/sevenz/codecs/Lzma.js +13 -28
  84. package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
  85. package/dist/cjs/sevenz/codecs/Lzma2.d.cts +9 -3
  86. package/dist/cjs/sevenz/codecs/Lzma2.d.ts +9 -3
  87. package/dist/cjs/sevenz/codecs/Lzma2.js +17 -198
  88. package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
  89. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -2
  90. package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
  91. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +2 -15
  92. package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  93. package/dist/cjs/types.d.cts +2 -16
  94. package/dist/cjs/types.d.ts +2 -16
  95. package/dist/cjs/types.js.map +1 -1
  96. package/dist/esm/FileEntry.d.ts +12 -4
  97. package/dist/esm/FileEntry.js +52 -26
  98. package/dist/esm/FileEntry.js.map +1 -1
  99. package/dist/esm/SevenZipIterator.d.ts +25 -2
  100. package/dist/esm/SevenZipIterator.js +69 -22
  101. package/dist/esm/SevenZipIterator.js.map +1 -1
  102. package/dist/esm/index.d.ts +1 -2
  103. package/dist/esm/index.js +2 -1
  104. package/dist/esm/index.js.map +1 -1
  105. package/dist/esm/lib/streamToSource.d.ts +8 -11
  106. package/dist/esm/lib/streamToSource.js +22 -68
  107. package/dist/esm/lib/streamToSource.js.map +1 -1
  108. package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
  109. package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
  110. package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
  111. package/dist/esm/lzma/index.d.ts +13 -0
  112. package/dist/esm/lzma/index.js +15 -0
  113. package/dist/esm/lzma/index.js.map +1 -0
  114. package/dist/esm/lzma/stream/transforms.d.ts +38 -0
  115. package/dist/esm/lzma/stream/transforms.js +150 -0
  116. package/dist/esm/lzma/stream/transforms.js.map +1 -0
  117. package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +30 -0
  118. package/dist/esm/lzma/sync/Lzma2Decoder.js +115 -0
  119. package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
  120. package/dist/esm/lzma/sync/LzmaDecoder.d.ts +82 -0
  121. package/dist/esm/lzma/sync/LzmaDecoder.js +403 -0
  122. package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
  123. package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
  124. package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
  125. package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
  126. package/dist/esm/lzma/types.d.ts +110 -0
  127. package/dist/esm/lzma/types.js +154 -0
  128. package/dist/esm/lzma/types.js.map +1 -0
  129. package/dist/esm/nextEntry.js +24 -26
  130. package/dist/esm/nextEntry.js.map +1 -1
  131. package/dist/esm/sevenz/ArchiveSource.d.ts +16 -0
  132. package/dist/esm/sevenz/ArchiveSource.js +70 -1
  133. package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
  134. package/dist/esm/sevenz/FolderStreamSplitter.d.ts +101 -0
  135. package/dist/esm/sevenz/FolderStreamSplitter.js +207 -0
  136. package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -0
  137. package/dist/esm/sevenz/SevenZipParser.d.ts +71 -10
  138. package/dist/esm/sevenz/SevenZipParser.js +414 -198
  139. package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
  140. package/dist/esm/sevenz/codecs/BZip2.js +2 -1
  141. package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
  142. package/dist/esm/sevenz/codecs/Bcj.d.ts +5 -4
  143. package/dist/esm/sevenz/codecs/Bcj.js +106 -6
  144. package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
  145. package/dist/esm/sevenz/codecs/BcjArm.d.ts +5 -4
  146. package/dist/esm/sevenz/codecs/BcjArm.js +55 -7
  147. package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
  148. package/dist/esm/sevenz/codecs/Copy.d.ts +2 -4
  149. package/dist/esm/sevenz/codecs/Copy.js +1 -9
  150. package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
  151. package/dist/esm/sevenz/codecs/Deflate.d.ts +6 -4
  152. package/dist/esm/sevenz/codecs/Deflate.js +9 -7
  153. package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
  154. package/dist/esm/sevenz/codecs/Delta.d.ts +5 -4
  155. package/dist/esm/sevenz/codecs/Delta.js +33 -8
  156. package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
  157. package/dist/esm/sevenz/codecs/Lzma.d.ts +5 -2
  158. package/dist/esm/sevenz/codecs/Lzma.js +17 -24
  159. package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
  160. package/dist/esm/sevenz/codecs/Lzma2.d.ts +9 -3
  161. package/dist/esm/sevenz/codecs/Lzma2.js +15 -196
  162. package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
  163. package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
  164. package/dist/esm/sevenz/codecs/createBufferingDecoder.js +1 -9
  165. package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
  166. package/dist/esm/types.d.ts +2 -16
  167. package/dist/esm/types.js.map +1 -1
  168. package/package.json +3 -3
  169. package/assets/lzma-purejs/LICENSE +0 -11
  170. package/assets/lzma-purejs/index.js +0 -19
  171. package/assets/lzma-purejs/lib/LZ/OutWindow.js +0 -78
  172. package/assets/lzma-purejs/lib/LZ.js +0 -6
  173. package/assets/lzma-purejs/lib/LZMA/Base.js +0 -48
  174. package/assets/lzma-purejs/lib/LZMA/Decoder.js +0 -328
  175. package/assets/lzma-purejs/lib/LZMA.js +0 -6
  176. package/assets/lzma-purejs/lib/RangeCoder/BitTreeDecoder.js +0 -41
  177. package/assets/lzma-purejs/lib/RangeCoder/Decoder.js +0 -58
  178. package/assets/lzma-purejs/lib/RangeCoder/Encoder.js +0 -106
  179. package/assets/lzma-purejs/lib/RangeCoder.js +0 -10
  180. package/assets/lzma-purejs/lib/Stream.js +0 -41
  181. package/assets/lzma-purejs/lib/Util.js +0 -114
  182. package/assets/lzma-purejs/lib/makeBuffer.js +0 -25
  183. package/assets/lzma-purejs/package-lock.json +0 -13
  184. package/assets/lzma-purejs/package.json +0 -8
@@ -5,10 +5,11 @@
5
5
  // these to absolute addresses during compression, and back during decompression.
6
6
  //
7
7
  // Reference: https://github.com/kornelski/7z/blob/main/C/Bra.c
8
- import { bufferFrom } from 'extract-base-iterator';
9
- import createBufferingDecoder from './createBufferingDecoder.js';
8
+ //
9
+ // This implementation uses true streaming - processes data chunk by chunk.
10
+ import { allocBuffer, bufferFrom, Transform } from 'extract-base-iterator';
10
11
  /**
11
- * Decode ARM BCJ filtered data
12
+ * Decode ARM BCJ filtered data (synchronous, for buffered use)
12
13
  * Reverses the BCJ transformation by converting absolute addresses back to relative
13
14
  *
14
15
  * ARM BL instruction format:
@@ -35,7 +36,6 @@ import createBufferingDecoder from './createBufferingDecoder.js';
35
36
  }
36
37
  // Convert absolute to relative:
37
38
  // Subtract current position (in words, so divide by 4)
38
- // ARM PC is 2 words (8 bytes) ahead during execution
39
39
  const relAddr = addr - (pos >>> 2);
40
40
  // Write back lower 24 bits
41
41
  output[pos] = relAddr & 0xff;
@@ -47,7 +47,55 @@ import createBufferingDecoder from './createBufferingDecoder.js';
47
47
  return output;
48
48
  }
49
49
  /**
50
- * Create an ARM BCJ decoder Transform stream
51
- */ export function createBcjArmDecoder(properties, unpackSize) {
52
- return createBufferingDecoder(decodeBcjArm, properties, unpackSize);
50
+ * Create a streaming ARM BCJ decoder Transform.
51
+ * Processes data in 4-byte aligned chunks.
52
+ */ export function createBcjArmDecoder(_properties, _unpackSize) {
53
+ let globalPos = 0; // Position in the overall stream (in bytes)
54
+ let pending = null; // Incomplete 4-byte group
55
+ const transform = new Transform({
56
+ transform: (chunk, _encoding, callback)=>{
57
+ // Combine pending bytes with new chunk
58
+ let data;
59
+ if (pending && pending.length > 0) {
60
+ data = Buffer.concat([
61
+ pending,
62
+ chunk
63
+ ]);
64
+ } else {
65
+ data = chunk;
66
+ }
67
+ // Process only complete 4-byte groups
68
+ const completeBytes = data.length - data.length % 4;
69
+ if (completeBytes === 0) {
70
+ pending = data;
71
+ callback(null, allocBuffer(0));
72
+ return;
73
+ }
74
+ const output = bufferFrom(data.slice(0, completeBytes));
75
+ pending = data.length > completeBytes ? data.slice(completeBytes) : null;
76
+ let pos = 0;
77
+ while(pos + 4 <= output.length){
78
+ if (output[pos + 3] === 0xeb) {
79
+ let addr = output[pos] | output[pos + 1] << 8 | output[pos + 2] << 16;
80
+ if (addr & 0x800000) {
81
+ addr |= 0xff000000;
82
+ }
83
+ const relAddr = addr - (globalPos >>> 2);
84
+ output[pos] = relAddr & 0xff;
85
+ output[pos + 1] = relAddr >>> 8 & 0xff;
86
+ output[pos + 2] = relAddr >>> 16 & 0xff;
87
+ }
88
+ pos += 4;
89
+ globalPos += 4;
90
+ }
91
+ callback(null, output);
92
+ },
93
+ flush: function(callback) {
94
+ if (pending && pending.length > 0) {
95
+ this.push(pending);
96
+ }
97
+ callback(null);
98
+ }
99
+ });
100
+ return transform;
53
101
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/BcjArm.ts"],"sourcesContent":["// BCJ (ARM 32-bit) filter codec - converts ARM branch instruction addresses\n// This filter makes ARM executables more compressible by LZMA\n//\n// ARM branch instructions (BL) use relative addressing. The filter converts\n// these to absolute addresses during compression, and back during decompression.\n//\n// Reference: https://github.com/kornelski/7z/blob/main/C/Bra.c\n\nimport { bufferFrom } from 'extract-base-iterator';\nimport type { Transform } from 'readable-stream';\nimport createBufferingDecoder from './createBufferingDecoder.ts';\n\n/**\n * Decode ARM BCJ filtered data\n * Reverses the BCJ transformation by converting absolute addresses back to relative\n *\n * ARM BL instruction format:\n * - 4 bytes aligned\n * - Byte pattern: XX XX XX EB (where EB = 0xEB opcode for BL)\n * - Lower 24 bits are signed offset (in words, not bytes)\n *\n * @param input - ARM BCJ filtered data\n * @param _properties - Unused for ARM BCJ\n * @param _unpackSize - Unused for ARM BCJ\n * @returns Unfiltered data\n */\nexport function decodeBcjArm(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer {\n const output = bufferFrom(input); // Copy since we modify in place\n let pos = 0;\n\n // Process 4-byte aligned positions\n while (pos + 4 <= output.length) {\n // Check for BL instruction: byte 3 is 0xEB\n if (output[pos + 3] === 0xeb) {\n // Read 24-bit address (little-endian in bytes 0-2)\n let addr = output[pos] | (output[pos + 1] << 8) | (output[pos + 2] << 16);\n\n // Sign-extend 24-bit to 32-bit\n if (addr & 0x800000) {\n addr |= 0xff000000;\n }\n\n // Convert absolute to relative:\n // Subtract current position (in words, so divide by 4)\n // ARM PC is 2 words (8 bytes) ahead during execution\n const relAddr = addr - (pos >>> 2);\n\n // Write back lower 24 bits\n output[pos] = relAddr & 0xff;\n output[pos + 1] = (relAddr >>> 8) & 0xff;\n output[pos + 2] = (relAddr >>> 16) & 0xff;\n }\n pos += 4;\n }\n\n return output;\n}\n\n/**\n * Create an ARM BCJ decoder Transform stream\n */\nexport function createBcjArmDecoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeBcjArm, properties, unpackSize);\n}\n"],"names":["bufferFrom","createBufferingDecoder","decodeBcjArm","input","_properties","_unpackSize","output","pos","length","addr","relAddr","createBcjArmDecoder","properties","unpackSize"],"mappings":"AAAA,4EAA4E;AAC5E,8DAA8D;AAC9D,EAAE;AACF,4EAA4E;AAC5E,iFAAiF;AACjF,EAAE;AACF,+DAA+D;AAE/D,SAASA,UAAU,QAAQ,wBAAwB;AAEnD,OAAOC,4BAA4B,8BAA8B;AAEjE;;;;;;;;;;;;;CAaC,GACD,OAAO,SAASC,aAAaC,KAAa,EAAEC,WAAoB,EAAEC,WAAoB;IACpF,MAAMC,SAASN,WAAWG,QAAQ,gCAAgC;IAClE,IAAII,MAAM;IAEV,mCAAmC;IACnC,MAAOA,MAAM,KAAKD,OAAOE,MAAM,CAAE;QAC/B,2CAA2C;QAC3C,IAAIF,MAAM,CAACC,MAAM,EAAE,KAAK,MAAM;YAC5B,mDAAmD;YACnD,IAAIE,OAAOH,MAAM,CAACC,IAAI,GAAID,MAAM,CAACC,MAAM,EAAE,IAAI,IAAMD,MAAM,CAACC,MAAM,EAAE,IAAI;YAEtE,+BAA+B;YAC/B,IAAIE,OAAO,UAAU;gBACnBA,QAAQ;YACV;YAEA,gCAAgC;YAChC,uDAAuD;YACvD,qDAAqD;YACrD,MAAMC,UAAUD,OAAQF,CAAAA,QAAQ,CAAA;YAEhC,2BAA2B;YAC3BD,MAAM,CAACC,IAAI,GAAGG,UAAU;YACxBJ,MAAM,CAACC,MAAM,EAAE,GAAG,AAACG,YAAY,IAAK;YACpCJ,MAAM,CAACC,MAAM,EAAE,GAAG,AAACG,YAAY,KAAM;QACvC;QACAH,OAAO;IACT;IAEA,OAAOD;AACT;AAEA;;CAEC,GACD,OAAO,SAASK,oBAAoBC,UAAmB,EAAEC,UAAmB;IAC1E,OAAOZ,uBAAuBC,cAAcU,YAAYC;AAC1D"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/BcjArm.ts"],"sourcesContent":["// BCJ (ARM 32-bit) filter codec - converts ARM branch instruction addresses\n// This filter makes ARM executables more compressible by LZMA\n//\n// ARM branch instructions (BL) use relative addressing. The filter converts\n// these to absolute addresses during compression, and back during decompression.\n//\n// Reference: https://github.com/kornelski/7z/blob/main/C/Bra.c\n//\n// This implementation uses true streaming - processes data chunk by chunk.\n\nimport { allocBuffer, bufferFrom, Transform } from 'extract-base-iterator';\n\n/**\n * Decode ARM BCJ filtered data (synchronous, for buffered use)\n * Reverses the BCJ transformation by converting absolute addresses back to relative\n *\n * ARM BL instruction format:\n * - 4 bytes aligned\n * - Byte pattern: XX XX XX EB (where EB = 0xEB opcode for BL)\n * - Lower 24 bits are signed offset (in words, not bytes)\n *\n * @param input - ARM BCJ filtered data\n * @param _properties - Unused for ARM BCJ\n * @param _unpackSize - Unused for ARM BCJ\n * @returns Unfiltered data\n */\nexport function decodeBcjArm(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer {\n const output = bufferFrom(input); // Copy since we modify in place\n let pos = 0;\n\n // Process 4-byte aligned positions\n while (pos + 4 <= output.length) {\n // Check for BL instruction: byte 3 is 0xEB\n if (output[pos + 3] === 0xeb) {\n // Read 24-bit address (little-endian in bytes 0-2)\n let addr = output[pos] | (output[pos + 1] << 8) | (output[pos + 2] << 16);\n\n // Sign-extend 24-bit to 32-bit\n if (addr & 0x800000) {\n addr |= 0xff000000;\n }\n\n // Convert absolute to relative:\n // Subtract current position (in words, so divide by 4)\n const relAddr = addr - (pos >>> 2);\n\n // Write back lower 24 bits\n output[pos] = relAddr & 0xff;\n output[pos + 1] = (relAddr >>> 8) & 0xff;\n output[pos + 2] = (relAddr >>> 16) & 0xff;\n }\n pos += 4;\n }\n\n return output;\n}\n\n/**\n * Create a streaming ARM BCJ decoder Transform.\n * Processes data in 4-byte aligned chunks.\n */\nexport function createBcjArmDecoder(_properties?: Buffer, _unpackSize?: number): InstanceType<typeof Transform> {\n let globalPos = 0; // Position in the overall stream (in bytes)\n let pending: Buffer | null = null; // Incomplete 4-byte group\n\n const transform = new Transform({\n transform: (chunk: Buffer, _encoding: string, callback: (err?: Error | null, data?: Buffer) => void) => {\n // Combine pending bytes with new chunk\n let data: Buffer;\n if (pending && pending.length > 0) {\n data = Buffer.concat([pending, chunk]);\n } else {\n data = chunk;\n }\n\n // Process only complete 4-byte groups\n const completeBytes = data.length - (data.length % 4);\n if (completeBytes === 0) {\n pending = data;\n callback(null, allocBuffer(0));\n return;\n }\n\n const output = bufferFrom(data.slice(0, completeBytes));\n pending = data.length > completeBytes ? data.slice(completeBytes) : null;\n\n let pos = 0;\n while (pos + 4 <= output.length) {\n if (output[pos + 3] === 0xeb) {\n let addr = output[pos] | (output[pos + 1] << 8) | (output[pos + 2] << 16);\n if (addr & 0x800000) {\n addr |= 0xff000000;\n }\n const relAddr = addr - (globalPos >>> 2);\n output[pos] = relAddr & 0xff;\n output[pos + 1] = (relAddr >>> 8) & 0xff;\n output[pos + 2] = (relAddr >>> 16) & 0xff;\n }\n pos += 4;\n globalPos += 4;\n }\n\n callback(null, output);\n },\n flush: function (this: InstanceType<typeof Transform>, callback: (err?: Error | null) => void) {\n if (pending && pending.length > 0) {\n this.push(pending);\n }\n callback(null);\n },\n });\n\n return transform;\n}\n"],"names":["allocBuffer","bufferFrom","Transform","decodeBcjArm","input","_properties","_unpackSize","output","pos","length","addr","relAddr","createBcjArmDecoder","globalPos","pending","transform","chunk","_encoding","callback","data","Buffer","concat","completeBytes","slice","flush","push"],"mappings":"AAAA,4EAA4E;AAC5E,8DAA8D;AAC9D,EAAE;AACF,4EAA4E;AAC5E,iFAAiF;AACjF,EAAE;AACF,+DAA+D;AAC/D,EAAE;AACF,2EAA2E;AAE3E,SAASA,WAAW,EAAEC,UAAU,EAAEC,SAAS,QAAQ,wBAAwB;AAE3E;;;;;;;;;;;;;CAaC,GACD,OAAO,SAASC,aAAaC,KAAa,EAAEC,WAAoB,EAAEC,WAAoB;IACpF,MAAMC,SAASN,WAAWG,QAAQ,gCAAgC;IAClE,IAAII,MAAM;IAEV,mCAAmC;IACnC,MAAOA,MAAM,KAAKD,OAAOE,MAAM,CAAE;QAC/B,2CAA2C;QAC3C,IAAIF,MAAM,CAACC,MAAM,EAAE,KAAK,MAAM;YAC5B,mDAAmD;YACnD,IAAIE,OAAOH,MAAM,CAACC,IAAI,GAAID,MAAM,CAACC,MAAM,EAAE,IAAI,IAAMD,MAAM,CAACC,MAAM,EAAE,IAAI;YAEtE,+BAA+B;YAC/B,IAAIE,OAAO,UAAU;gBACnBA,QAAQ;YACV;YAEA,gCAAgC;YAChC,uDAAuD;YACvD,MAAMC,UAAUD,OAAQF,CAAAA,QAAQ,CAAA;YAEhC,2BAA2B;YAC3BD,MAAM,CAACC,IAAI,GAAGG,UAAU;YACxBJ,MAAM,CAACC,MAAM,EAAE,GAAG,AAACG,YAAY,IAAK;YACpCJ,MAAM,CAACC,MAAM,EAAE,GAAG,AAACG,YAAY,KAAM;QACvC;QACAH,OAAO;IACT;IAEA,OAAOD;AACT;AAEA;;;CAGC,GACD,OAAO,SAASK,oBAAoBP,WAAoB,EAAEC,WAAoB;IAC5E,IAAIO,YAAY,GAAG,4CAA4C;IAC/D,IAAIC,UAAyB,MAAM,0BAA0B;IAE7D,MAAMC,YAAY,IAAIb,UAAU;QAC9Ba,WAAW,CAACC,OAAeC,WAAmBC;YAC5C,uCAAuC;YACvC,IAAIC;YACJ,IAAIL,WAAWA,QAAQL,MAAM,GAAG,GAAG;gBACjCU,OAAOC,OAAOC,MAAM,CAAC;oBAACP;oBAASE;iBAAM;YACvC,OAAO;gBACLG,OAAOH;YACT;YAEA,sCAAsC;YACtC,MAAMM,gBAAgBH,KAAKV,MAAM,GAAIU,KAAKV,MAAM,GAAG;YACnD,IAAIa,kBAAkB,GAAG;gBACvBR,UAAUK;gBACVD,SAAS,MAAMlB,YAAY;gBAC3B;YACF;YAEA,MAAMO,SAASN,WAAWkB,KAAKI,KAAK,CAAC,GAAGD;YACxCR,UAAUK,KAAKV,MAAM,GAAGa,gBAAgBH,KAAKI,KAAK,CAACD,iBAAiB;YAEpE,IAAId,MAAM;YACV,MAAOA,MAAM,KAAKD,OAAOE,MAAM,CAAE;gBAC/B,IAAIF,MAAM,CAACC,MAAM,EAAE,KAAK,MAAM;oBAC5B,IAAIE,OAAOH,MAAM,CAACC,IAAI,GAAID,MAAM,CAACC,MAAM,EAAE,IAAI,IAAMD,MAAM,CAACC,MAAM,EAAE,IAAI;oBACtE,IAAIE,OAAO,UAAU;wBACnBA,QAAQ;oBACV;oBACA,MAAMC,UAAUD,OAAQG,CAAAA,cAAc,CAAA;oBACtCN,MAAM,CAACC,IAAI,GAAGG,UAAU;oBACxBJ,MAAM,CAACC,MAAM,EAAE,GAAG,AAACG,YAAY,IAAK;oBACpCJ,MAAM,CAACC,MAAM,EAAE,GAAG,AAACG,YAAY,KAAM;gBACvC;gBACAH,OAAO;gBACPK,aAAa;YACf;YAEAK,SAAS,MAAMX;QACjB;QACAiB,OAAO,SAAgDN,QAAsC;YAC3F,IAAIJ,WAAWA,QAAQL,MAAM,GAAG,GAAG;gBACjC,IAAI,CAACgB,IAAI,CAACX;YACZ;YACAI,SAAS;QACX;IACF;IAEA,OAAOH;AACT"}
@@ -1,10 +1,9 @@
1
- import Stream from 'stream';
2
- type Transform = Stream.Transform;
1
+ import { type Transform } from 'extract-base-iterator';
3
2
  /**
4
3
  * Create a Copy decoder stream
5
4
  * Simply passes through data unchanged
6
5
  */
7
- export declare function createCopyDecoder(): Transform;
6
+ export declare function createCopyDecoder(): InstanceType<typeof Transform>;
8
7
  /**
9
8
  * Decode a buffer using Copy codec (no-op)
10
9
  * @param input - Input buffer
@@ -13,4 +12,3 @@ export declare function createCopyDecoder(): Transform;
13
12
  * @returns Same buffer (no transformation)
14
13
  */
15
14
  export declare function decodeCopy(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer;
16
- export {};
@@ -1,14 +1,6 @@
1
1
  // Copy codec - passthrough (no compression)
2
2
  // This is the simplest codec, just passes data through unchanged
3
- import Stream from 'stream';
4
- // Use native streams when available, readable-stream only for Node 0.x
5
- const major = +process.versions.node.split('.')[0];
6
- let PassThrough;
7
- if (major > 0) {
8
- PassThrough = Stream.PassThrough;
9
- } else {
10
- PassThrough = require('readable-stream').PassThrough;
11
- }
3
+ import { PassThrough } from 'extract-base-iterator';
12
4
  /**
13
5
  * Create a Copy decoder stream
14
6
  * Simply passes through data unchanged
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Copy.ts"],"sourcesContent":["// Copy codec - passthrough (no compression)\n// This is the simplest codec, just passes data through unchanged\n\nimport Stream from 'stream';\n\n// Use native streams when available, readable-stream only for Node 0.x\nconst major = +process.versions.node.split('.')[0];\nlet PassThrough: typeof Stream.PassThrough;\nif (major > 0) {\n PassThrough = Stream.PassThrough;\n} else {\n PassThrough = require('readable-stream').PassThrough;\n}\ntype Transform = Stream.Transform;\n\n/**\n * Create a Copy decoder stream\n * Simply passes through data unchanged\n */\nexport function createCopyDecoder(): Transform {\n return new PassThrough();\n}\n\n/**\n * Decode a buffer using Copy codec (no-op)\n * @param input - Input buffer\n * @param _properties - Unused\n * @param _unpackSize - Unused\n * @returns Same buffer (no transformation)\n */\nexport function decodeCopy(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer {\n return input;\n}\n"],"names":["Stream","major","process","versions","node","split","PassThrough","require","createCopyDecoder","decodeCopy","input","_properties","_unpackSize"],"mappings":"AAAA,4CAA4C;AAC5C,iEAAiE;AAEjE,OAAOA,YAAY,SAAS;AAE5B,uEAAuE;AACvE,MAAMC,QAAQ,CAACC,QAAQC,QAAQ,CAACC,IAAI,CAACC,KAAK,CAAC,IAAI,CAAC,EAAE;AAClD,IAAIC;AACJ,IAAIL,QAAQ,GAAG;IACbK,cAAcN,OAAOM,WAAW;AAClC,OAAO;IACLA,cAAcC,QAAQ,mBAAmBD,WAAW;AACtD;AAGA;;;CAGC,GACD,OAAO,SAASE;IACd,OAAO,IAAIF;AACb;AAEA;;;;;;CAMC,GACD,OAAO,SAASG,WAAWC,KAAa,EAAEC,WAAoB,EAAEC,WAAoB;IAClF,OAAOF;AACT"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Copy.ts"],"sourcesContent":["// Copy codec - passthrough (no compression)\n// This is the simplest codec, just passes data through unchanged\n\nimport { PassThrough, type Transform } from 'extract-base-iterator';\n\n/**\n * Create a Copy decoder stream\n * Simply passes through data unchanged\n */\nexport function createCopyDecoder(): InstanceType<typeof Transform> {\n return new PassThrough();\n}\n\n/**\n * Decode a buffer using Copy codec (no-op)\n * @param input - Input buffer\n * @param _properties - Unused\n * @param _unpackSize - Unused\n * @returns Same buffer (no transformation)\n */\nexport function decodeCopy(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer {\n return input;\n}\n"],"names":["PassThrough","createCopyDecoder","decodeCopy","input","_properties","_unpackSize"],"mappings":"AAAA,4CAA4C;AAC5C,iEAAiE;AAEjE,SAASA,WAAW,QAAwB,wBAAwB;AAEpE;;;CAGC,GACD,OAAO,SAASC;IACd,OAAO,IAAID;AACb;AAEA;;;;;;CAMC,GACD,OAAO,SAASE,WAAWC,KAAa,EAAEC,WAAoB,EAAEC,WAAoB;IAClF,OAAOF;AACT"}
@@ -1,6 +1,6 @@
1
- import type { Transform } from 'readable-stream';
1
+ import type { Transform } from 'stream';
2
2
  /**
3
- * Decode Deflate compressed data
3
+ * Decode Deflate compressed data synchronously
4
4
  *
5
5
  * @param input - Deflate compressed data
6
6
  * @param _properties - Unused for Deflate
@@ -9,6 +9,8 @@ import type { Transform } from 'readable-stream';
9
9
  */
10
10
  export declare function decodeDeflate(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer;
11
11
  /**
12
- * Create a Deflate decoder Transform stream
12
+ * Create a Deflate decoder Transform stream.
13
+ * Uses zlib's streaming createInflateRaw() for true streaming decompression.
14
+ * Data is decompressed incrementally as it flows through, not buffered.
13
15
  */
14
- export declare function createDeflateDecoder(properties?: Buffer, unpackSize?: number): Transform;
16
+ export declare function createDeflateDecoder(_properties?: Buffer, _unpackSize?: number): Transform;
@@ -1,11 +1,11 @@
1
1
  // Deflate codec - standard zlib/zip compression
2
2
  // 7z uses raw deflate without zlib or gzip headers
3
3
  //
4
- // Uses native zlib on Node 0.11.12+, falls back to pako for older versions
5
- import { inflateRaw } from 'extract-base-iterator';
6
- import createBufferingDecoder from './createBufferingDecoder.js';
4
+ // Uses native zlib.createInflateRaw() for true streaming decompression
5
+ // Falls back to pako for older Node versions via extract-base-iterator
6
+ import { createInflateRawStream, inflateRaw } from 'extract-base-iterator';
7
7
  /**
8
- * Decode Deflate compressed data
8
+ * Decode Deflate compressed data synchronously
9
9
  *
10
10
  * @param input - Deflate compressed data
11
11
  * @param _properties - Unused for Deflate
@@ -15,7 +15,9 @@ import createBufferingDecoder from './createBufferingDecoder.js';
15
15
  return inflateRaw(input);
16
16
  }
17
17
  /**
18
- * Create a Deflate decoder Transform stream
19
- */ export function createDeflateDecoder(properties, unpackSize) {
20
- return createBufferingDecoder(decodeDeflate, properties, unpackSize);
18
+ * Create a Deflate decoder Transform stream.
19
+ * Uses zlib's streaming createInflateRaw() for true streaming decompression.
20
+ * Data is decompressed incrementally as it flows through, not buffered.
21
+ */ export function createDeflateDecoder(_properties, _unpackSize) {
22
+ return createInflateRawStream();
21
23
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Deflate.ts"],"sourcesContent":["// Deflate codec - standard zlib/zip compression\n// 7z uses raw deflate without zlib or gzip headers\n//\n// Uses native zlib on Node 0.11.12+, falls back to pako for older versions\n\nimport { inflateRaw } from 'extract-base-iterator';\nimport type { Transform } from 'readable-stream';\nimport createBufferingDecoder from './createBufferingDecoder.ts';\n\n/**\n * Decode Deflate compressed data\n *\n * @param input - Deflate compressed data\n * @param _properties - Unused for Deflate\n * @param _unpackSize - Unused for Deflate\n * @returns Decompressed data\n */\nexport function decodeDeflate(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer {\n return inflateRaw(input);\n}\n\n/**\n * Create a Deflate decoder Transform stream\n */\nexport function createDeflateDecoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeDeflate, properties, unpackSize);\n}\n"],"names":["inflateRaw","createBufferingDecoder","decodeDeflate","input","_properties","_unpackSize","createDeflateDecoder","properties","unpackSize"],"mappings":"AAAA,gDAAgD;AAChD,mDAAmD;AACnD,EAAE;AACF,2EAA2E;AAE3E,SAASA,UAAU,QAAQ,wBAAwB;AAEnD,OAAOC,4BAA4B,8BAA8B;AAEjE;;;;;;;CAOC,GACD,OAAO,SAASC,cAAcC,KAAa,EAAEC,WAAoB,EAAEC,WAAoB;IACrF,OAAOL,WAAWG;AACpB;AAEA;;CAEC,GACD,OAAO,SAASG,qBAAqBC,UAAmB,EAAEC,UAAmB;IAC3E,OAAOP,uBAAuBC,eAAeK,YAAYC;AAC3D"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Deflate.ts"],"sourcesContent":["// Deflate codec - standard zlib/zip compression\n// 7z uses raw deflate without zlib or gzip headers\n//\n// Uses native zlib.createInflateRaw() for true streaming decompression\n// Falls back to pako for older Node versions via extract-base-iterator\n\nimport { createInflateRawStream, inflateRaw } from 'extract-base-iterator';\nimport type { Transform } from 'stream';\n\n/**\n * Decode Deflate compressed data synchronously\n *\n * @param input - Deflate compressed data\n * @param _properties - Unused for Deflate\n * @param _unpackSize - Unused for Deflate\n * @returns Decompressed data\n */\nexport function decodeDeflate(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer {\n return inflateRaw(input);\n}\n\n/**\n * Create a Deflate decoder Transform stream.\n * Uses zlib's streaming createInflateRaw() for true streaming decompression.\n * Data is decompressed incrementally as it flows through, not buffered.\n */\nexport function createDeflateDecoder(_properties?: Buffer, _unpackSize?: number): Transform {\n return createInflateRawStream() as Transform;\n}\n"],"names":["createInflateRawStream","inflateRaw","decodeDeflate","input","_properties","_unpackSize","createDeflateDecoder"],"mappings":"AAAA,gDAAgD;AAChD,mDAAmD;AACnD,EAAE;AACF,uEAAuE;AACvE,uEAAuE;AAEvE,SAASA,sBAAsB,EAAEC,UAAU,QAAQ,wBAAwB;AAG3E;;;;;;;CAOC,GACD,OAAO,SAASC,cAAcC,KAAa,EAAEC,WAAoB,EAAEC,WAAoB;IACrF,OAAOJ,WAAWE;AACpB;AAEA;;;;CAIC,GACD,OAAO,SAASG,qBAAqBF,WAAoB,EAAEC,WAAoB;IAC7E,OAAOL;AACT"}
@@ -1,6 +1,6 @@
1
- import type { Transform } from 'readable-stream';
1
+ import { Transform } from 'extract-base-iterator';
2
2
  /**
3
- * Decode Delta filtered data
3
+ * Decode Delta filtered data (synchronous, for buffered use)
4
4
  * Reverses the delta transformation by adding previous values
5
5
  *
6
6
  * @param input - Delta filtered data
@@ -10,6 +10,7 @@ import type { Transform } from 'readable-stream';
10
10
  */
11
11
  export declare function decodeDelta(input: Buffer, properties?: Buffer, _unpackSize?: number): Buffer;
12
12
  /**
13
- * Create a Delta decoder Transform stream
13
+ * Create a streaming Delta decoder Transform.
14
+ * Processes data chunk by chunk, maintaining state between chunks.
14
15
  */
15
- export declare function createDeltaDecoder(properties?: Buffer, unpackSize?: number): Transform;
16
+ export declare function createDeltaDecoder(properties?: Buffer, _unpackSize?: number): InstanceType<typeof Transform>;
@@ -4,10 +4,12 @@
4
4
  // The Delta filter stores the difference between each byte and the byte
5
5
  // N positions before it, where N is the "distance" parameter (default 1).
6
6
  // This makes data with regular patterns more compressible.
7
- import { bufferFrom } from 'extract-base-iterator';
8
- import createBufferingDecoder from './createBufferingDecoder.js';
7
+ //
8
+ // This implementation uses true streaming - processes data chunk by chunk
9
+ // while maintaining state between chunks.
10
+ import { allocBuffer, bufferFrom, Transform } from 'extract-base-iterator';
9
11
  /**
10
- * Decode Delta filtered data
12
+ * Decode Delta filtered data (synchronous, for buffered use)
11
13
  * Reverses the delta transformation by adding previous values
12
14
  *
13
15
  * @param input - Delta filtered data
@@ -23,9 +25,9 @@ import createBufferingDecoder from './createBufferingDecoder.js';
23
25
  }
24
26
  const output = bufferFrom(input); // Copy since we modify in place
25
27
  // State buffer for multi-byte distance
26
- const state = new Array(distance);
28
+ const state = [];
27
29
  for(let i = 0; i < distance; i++){
28
- state[i] = 0;
30
+ state.push(0);
29
31
  }
30
32
  for(let j = 0; j < output.length; j++){
31
33
  const idx = j % distance;
@@ -35,7 +37,30 @@ import createBufferingDecoder from './createBufferingDecoder.js';
35
37
  return output;
36
38
  }
37
39
  /**
38
- * Create a Delta decoder Transform stream
39
- */ export function createDeltaDecoder(properties, unpackSize) {
40
- return createBufferingDecoder(decodeDelta, properties, unpackSize);
40
+ * Create a streaming Delta decoder Transform.
41
+ * Processes data chunk by chunk, maintaining state between chunks.
42
+ */ export function createDeltaDecoder(properties, _unpackSize) {
43
+ // Distance parameter: default is 1
44
+ let distance = 1;
45
+ if (properties && properties.length >= 1) {
46
+ distance = properties[0] + 1;
47
+ }
48
+ // State buffer for multi-byte distance
49
+ const state = [];
50
+ for(let i = 0; i < distance; i++){
51
+ state.push(0);
52
+ }
53
+ let byteIndex = 0;
54
+ return new Transform({
55
+ transform: (chunk, _encoding, callback)=>{
56
+ const output = allocBuffer(chunk.length);
57
+ for(let j = 0; j < chunk.length; j++){
58
+ const idx = byteIndex % distance;
59
+ state[idx] = state[idx] + chunk[j] & 0xff;
60
+ output[j] = state[idx];
61
+ byteIndex++;
62
+ }
63
+ callback(null, output);
64
+ }
65
+ });
41
66
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Delta.ts"],"sourcesContent":["// Delta filter codec - stores differences between consecutive bytes\n// Useful for data with gradual changes (images, audio, sensor data)\n//\n// The Delta filter stores the difference between each byte and the byte\n// N positions before it, where N is the \"distance\" parameter (default 1).\n// This makes data with regular patterns more compressible.\n\nimport { bufferFrom } from 'extract-base-iterator';\nimport type { Transform } from 'readable-stream';\nimport createBufferingDecoder from './createBufferingDecoder.ts';\n\n/**\n * Decode Delta filtered data\n * Reverses the delta transformation by adding previous values\n *\n * @param input - Delta filtered data\n * @param properties - Optional 1-byte properties (distance - 1)\n * @param _unpackSize - Unused for Delta\n * @returns Unfiltered data\n */\nexport function decodeDelta(input: Buffer, properties?: Buffer, _unpackSize?: number): Buffer {\n // Distance parameter: default is 1\n let distance = 1;\n if (properties && properties.length >= 1) {\n // Properties byte contains (distance - 1)\n distance = properties[0] + 1;\n }\n\n const output = bufferFrom(input); // Copy since we modify in place\n\n // State buffer for multi-byte distance\n const state = new Array(distance);\n for (let i = 0; i < distance; i++) {\n state[i] = 0;\n }\n\n for (let j = 0; j < output.length; j++) {\n const idx = j % distance;\n state[idx] = (state[idx] + output[j]) & 0xff;\n output[j] = state[idx];\n }\n\n return output;\n}\n\n/**\n * Create a Delta decoder Transform stream\n */\nexport function createDeltaDecoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeDelta, properties, unpackSize);\n}\n"],"names":["bufferFrom","createBufferingDecoder","decodeDelta","input","properties","_unpackSize","distance","length","output","state","Array","i","j","idx","createDeltaDecoder","unpackSize"],"mappings":"AAAA,oEAAoE;AACpE,oEAAoE;AACpE,EAAE;AACF,wEAAwE;AACxE,0EAA0E;AAC1E,2DAA2D;AAE3D,SAASA,UAAU,QAAQ,wBAAwB;AAEnD,OAAOC,4BAA4B,8BAA8B;AAEjE;;;;;;;;CAQC,GACD,OAAO,SAASC,YAAYC,KAAa,EAAEC,UAAmB,EAAEC,WAAoB;IAClF,mCAAmC;IACnC,IAAIC,WAAW;IACf,IAAIF,cAAcA,WAAWG,MAAM,IAAI,GAAG;QACxC,0CAA0C;QAC1CD,WAAWF,UAAU,CAAC,EAAE,GAAG;IAC7B;IAEA,MAAMI,SAASR,WAAWG,QAAQ,gCAAgC;IAElE,uCAAuC;IACvC,MAAMM,QAAQ,IAAIC,MAAMJ;IACxB,IAAK,IAAIK,IAAI,GAAGA,IAAIL,UAAUK,IAAK;QACjCF,KAAK,CAACE,EAAE,GAAG;IACb;IAEA,IAAK,IAAIC,IAAI,GAAGA,IAAIJ,OAAOD,MAAM,EAAEK,IAAK;QACtC,MAAMC,MAAMD,IAAIN;QAChBG,KAAK,CAACI,IAAI,GAAG,AAACJ,KAAK,CAACI,IAAI,GAAGL,MAAM,CAACI,EAAE,GAAI;QACxCJ,MAAM,CAACI,EAAE,GAAGH,KAAK,CAACI,IAAI;IACxB;IAEA,OAAOL;AACT;AAEA;;CAEC,GACD,OAAO,SAASM,mBAAmBV,UAAmB,EAAEW,UAAmB;IACzE,OAAOd,uBAAuBC,aAAaE,YAAYW;AACzD"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Delta.ts"],"sourcesContent":["// Delta filter codec - stores differences between consecutive bytes\n// Useful for data with gradual changes (images, audio, sensor data)\n//\n// The Delta filter stores the difference between each byte and the byte\n// N positions before it, where N is the \"distance\" parameter (default 1).\n// This makes data with regular patterns more compressible.\n//\n// This implementation uses true streaming - processes data chunk by chunk\n// while maintaining state between chunks.\n\nimport { allocBuffer, bufferFrom, Transform } from 'extract-base-iterator';\n\n/**\n * Decode Delta filtered data (synchronous, for buffered use)\n * Reverses the delta transformation by adding previous values\n *\n * @param input - Delta filtered data\n * @param properties - Optional 1-byte properties (distance - 1)\n * @param _unpackSize - Unused for Delta\n * @returns Unfiltered data\n */\nexport function decodeDelta(input: Buffer, properties?: Buffer, _unpackSize?: number): Buffer {\n // Distance parameter: default is 1\n let distance = 1;\n if (properties && properties.length >= 1) {\n // Properties byte contains (distance - 1)\n distance = properties[0] + 1;\n }\n\n const output = bufferFrom(input); // Copy since we modify in place\n\n // State buffer for multi-byte distance\n const state: number[] = [];\n for (let i = 0; i < distance; i++) {\n state.push(0);\n }\n\n for (let j = 0; j < output.length; j++) {\n const idx = j % distance;\n state[idx] = (state[idx] + output[j]) & 0xff;\n output[j] = state[idx];\n }\n\n return output;\n}\n\n/**\n * Create a streaming Delta decoder Transform.\n * Processes data chunk by chunk, maintaining state between chunks.\n */\nexport function createDeltaDecoder(properties?: Buffer, _unpackSize?: number): InstanceType<typeof Transform> {\n // Distance parameter: default is 1\n let distance = 1;\n if (properties && properties.length >= 1) {\n distance = properties[0] + 1;\n }\n\n // State buffer for multi-byte distance\n const state: number[] = [];\n for (let i = 0; i < distance; i++) {\n state.push(0);\n }\n\n let byteIndex = 0;\n\n return new Transform({\n transform: (chunk: Buffer, _encoding: string, callback: (err?: Error | null, data?: Buffer) => void) => {\n const output = allocBuffer(chunk.length);\n\n for (let j = 0; j < chunk.length; j++) {\n const idx = byteIndex % distance;\n state[idx] = (state[idx] + chunk[j]) & 0xff;\n output[j] = state[idx];\n byteIndex++;\n }\n\n callback(null, output);\n },\n });\n}\n"],"names":["allocBuffer","bufferFrom","Transform","decodeDelta","input","properties","_unpackSize","distance","length","output","state","i","push","j","idx","createDeltaDecoder","byteIndex","transform","chunk","_encoding","callback"],"mappings":"AAAA,oEAAoE;AACpE,oEAAoE;AACpE,EAAE;AACF,wEAAwE;AACxE,0EAA0E;AAC1E,2DAA2D;AAC3D,EAAE;AACF,0EAA0E;AAC1E,0CAA0C;AAE1C,SAASA,WAAW,EAAEC,UAAU,EAAEC,SAAS,QAAQ,wBAAwB;AAE3E;;;;;;;;CAQC,GACD,OAAO,SAASC,YAAYC,KAAa,EAAEC,UAAmB,EAAEC,WAAoB;IAClF,mCAAmC;IACnC,IAAIC,WAAW;IACf,IAAIF,cAAcA,WAAWG,MAAM,IAAI,GAAG;QACxC,0CAA0C;QAC1CD,WAAWF,UAAU,CAAC,EAAE,GAAG;IAC7B;IAEA,MAAMI,SAASR,WAAWG,QAAQ,gCAAgC;IAElE,uCAAuC;IACvC,MAAMM,QAAkB,EAAE;IAC1B,IAAK,IAAIC,IAAI,GAAGA,IAAIJ,UAAUI,IAAK;QACjCD,MAAME,IAAI,CAAC;IACb;IAEA,IAAK,IAAIC,IAAI,GAAGA,IAAIJ,OAAOD,MAAM,EAAEK,IAAK;QACtC,MAAMC,MAAMD,IAAIN;QAChBG,KAAK,CAACI,IAAI,GAAG,AAACJ,KAAK,CAACI,IAAI,GAAGL,MAAM,CAACI,EAAE,GAAI;QACxCJ,MAAM,CAACI,EAAE,GAAGH,KAAK,CAACI,IAAI;IACxB;IAEA,OAAOL;AACT;AAEA;;;CAGC,GACD,OAAO,SAASM,mBAAmBV,UAAmB,EAAEC,WAAoB;IAC1E,mCAAmC;IACnC,IAAIC,WAAW;IACf,IAAIF,cAAcA,WAAWG,MAAM,IAAI,GAAG;QACxCD,WAAWF,UAAU,CAAC,EAAE,GAAG;IAC7B;IAEA,uCAAuC;IACvC,MAAMK,QAAkB,EAAE;IAC1B,IAAK,IAAIC,IAAI,GAAGA,IAAIJ,UAAUI,IAAK;QACjCD,MAAME,IAAI,CAAC;IACb;IAEA,IAAII,YAAY;IAEhB,OAAO,IAAId,UAAU;QACnBe,WAAW,CAACC,OAAeC,WAAmBC;YAC5C,MAAMX,SAAST,YAAYkB,MAAMV,MAAM;YAEvC,IAAK,IAAIK,IAAI,GAAGA,IAAIK,MAAMV,MAAM,EAAEK,IAAK;gBACrC,MAAMC,MAAME,YAAYT;gBACxBG,KAAK,CAACI,IAAI,GAAG,AAACJ,KAAK,CAACI,IAAI,GAAGI,KAAK,CAACL,EAAE,GAAI;gBACvCJ,MAAM,CAACI,EAAE,GAAGH,KAAK,CAACI,IAAI;gBACtBE;YACF;YAEAI,SAAS,MAAMX;QACjB;IACF;AACF"}
@@ -1,14 +1,17 @@
1
- import type { Transform } from 'readable-stream';
1
+ import type { Transform } from 'stream';
2
2
  /**
3
3
  * Decode LZMA compressed data to buffer
4
4
  *
5
5
  * @param input - LZMA compressed data
6
6
  * @param properties - Properties buffer (5 bytes: lc/lp/pb + dict size)
7
- * @param unpackSize - Expected output size (optional, -1 for unknown)
7
+ * @param unpackSize - Expected output size
8
8
  * @returns Decompressed data
9
9
  */
10
10
  export declare function decodeLzma(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer;
11
11
  /**
12
12
  * Create an LZMA decoder Transform stream
13
+ *
14
+ * Note: LZMA1 has no chunk boundaries, so this buffers all input
15
+ * and decompresses when the stream ends.
13
16
  */
14
17
  export declare function createLzmaDecoder(properties?: Buffer, unpackSize?: number): Transform;
@@ -1,40 +1,33 @@
1
- import Module from 'module';
2
- const _require = typeof require === 'undefined' ? Module.createRequire(import.meta.url) : require;
3
- import createBufferingDecoder from './createBufferingDecoder.js';
4
- import { createInputStream, createOutputStream } from './streams.js';
5
- // Import vendored lzma-purejs - provides raw LZMA decoder (patched for LZMA2 support)
6
- // Path accounts for build output in dist/esm/sevenz/codecs/
7
- const { LZMA } = _require('../../../../assets/lzma-purejs');
8
- const LzmaDecoder = LZMA.Decoder;
1
+ // LZMA codec using TypeScript LZMA decoder
2
+ // LZMA properties in 7z are 5 bytes: 1 byte lc/lp/pb + 4 bytes dictionary size (little-endian)
3
+ import { createLzmaDecoder as createLzmaTransform, decodeLzma as lzmaDecode } from '../../lzma/index.js';
9
4
  /**
10
5
  * Decode LZMA compressed data to buffer
11
6
  *
12
7
  * @param input - LZMA compressed data
13
8
  * @param properties - Properties buffer (5 bytes: lc/lp/pb + dict size)
14
- * @param unpackSize - Expected output size (optional, -1 for unknown)
9
+ * @param unpackSize - Expected output size
15
10
  * @returns Decompressed data
16
11
  */ export function decodeLzma(input, properties, unpackSize) {
17
12
  if (!properties || properties.length < 5) {
18
13
  throw new Error('LZMA requires 5-byte properties');
19
14
  }
20
- const decoder = new LzmaDecoder();
21
- // setDecoderProperties expects array-like with 5 bytes
22
- if (!decoder.setDecoderProperties(properties)) {
23
- throw new Error('Invalid LZMA properties');
15
+ if (typeof unpackSize !== 'number' || unpackSize < 0) {
16
+ throw new Error('LZMA requires known unpack size');
24
17
  }
25
- const inStream = createInputStream(input, 0, input.length);
26
- // Use -1 for unknown size (decoder will use end marker)
27
- const size = typeof unpackSize === 'number' ? unpackSize : -1;
28
- // Pre-allocate output stream if size is known (memory optimization)
29
- const outStream = createOutputStream(size > 0 ? size : undefined);
30
- const success = decoder.code(inStream, outStream, size);
31
- if (!success) {
32
- throw new Error('LZMA decompression failed');
33
- }
34
- return outStream.toBuffer();
18
+ return lzmaDecode(input, properties, unpackSize);
35
19
  }
36
20
  /**
37
21
  * Create an LZMA decoder Transform stream
22
+ *
23
+ * Note: LZMA1 has no chunk boundaries, so this buffers all input
24
+ * and decompresses when the stream ends.
38
25
  */ export function createLzmaDecoder(properties, unpackSize) {
39
- return createBufferingDecoder(decodeLzma, properties, unpackSize);
26
+ if (!properties || properties.length < 5) {
27
+ throw new Error('LZMA requires 5-byte properties');
28
+ }
29
+ if (typeof unpackSize !== 'number' || unpackSize < 0) {
30
+ throw new Error('LZMA requires known unpack size');
31
+ }
32
+ return createLzmaTransform(properties, unpackSize);
40
33
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Lzma.ts"],"sourcesContent":["import Module from 'module';\n\nconst _require = typeof require === 'undefined' ? Module.createRequire(import.meta.url) : require;\n\n// LZMA codec using lzma-purejs\n// LZMA properties in 7z are 5 bytes: 1 byte lc/lp/pb + 4 bytes dictionary size (little-endian)\n\nimport type { Transform } from 'readable-stream';\nimport createBufferingDecoder from './createBufferingDecoder.ts';\nimport { createInputStream, createOutputStream } from './streams.ts';\n\n// Import vendored lzma-purejs - provides raw LZMA decoder (patched for LZMA2 support)\n// Path accounts for build output in dist/esm/sevenz/codecs/\nconst { LZMA } = _require('../../../../assets/lzma-purejs');\nconst LzmaDecoder = LZMA.Decoder;\n\n/**\n * Decode LZMA compressed data to buffer\n *\n * @param input - LZMA compressed data\n * @param properties - Properties buffer (5 bytes: lc/lp/pb + dict size)\n * @param unpackSize - Expected output size (optional, -1 for unknown)\n * @returns Decompressed data\n */\nexport function decodeLzma(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer {\n if (!properties || properties.length < 5) {\n throw new Error('LZMA requires 5-byte properties');\n }\n\n const decoder = new LzmaDecoder();\n\n // setDecoderProperties expects array-like with 5 bytes\n if (!decoder.setDecoderProperties(properties)) {\n throw new Error('Invalid LZMA properties');\n }\n\n const inStream = createInputStream(input, 0, input.length);\n\n // Use -1 for unknown size (decoder will use end marker)\n const size = typeof unpackSize === 'number' ? unpackSize : -1;\n\n // Pre-allocate output stream if size is known (memory optimization)\n const outStream = createOutputStream(size > 0 ? size : undefined);\n\n const success = decoder.code(inStream, outStream, size);\n if (!success) {\n throw new Error('LZMA decompression failed');\n }\n\n return outStream.toBuffer();\n}\n\n/**\n * Create an LZMA decoder Transform stream\n */\nexport function createLzmaDecoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeLzma, properties, unpackSize);\n}\n"],"names":["Module","_require","require","createRequire","url","createBufferingDecoder","createInputStream","createOutputStream","LZMA","LzmaDecoder","Decoder","decodeLzma","input","properties","unpackSize","length","Error","decoder","setDecoderProperties","inStream","size","outStream","undefined","success","code","toBuffer","createLzmaDecoder"],"mappings":"AAAA,OAAOA,YAAY,SAAS;AAE5B,MAAMC,WAAW,OAAOC,YAAY,cAAcF,OAAOG,aAAa,CAAC,YAAYC,GAAG,IAAIF;AAM1F,OAAOG,4BAA4B,8BAA8B;AACjE,SAASC,iBAAiB,EAAEC,kBAAkB,QAAQ,eAAe;AAErE,sFAAsF;AACtF,4DAA4D;AAC5D,MAAM,EAAEC,IAAI,EAAE,GAAGP,SAAS;AAC1B,MAAMQ,cAAcD,KAAKE,OAAO;AAEhC;;;;;;;CAOC,GACD,OAAO,SAASC,WAAWC,KAAa,EAAEC,UAAmB,EAAEC,UAAmB;IAChF,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,MAAMC,UAAU,IAAIR;IAEpB,uDAAuD;IACvD,IAAI,CAACQ,QAAQC,oBAAoB,CAACL,aAAa;QAC7C,MAAM,IAAIG,MAAM;IAClB;IAEA,MAAMG,WAAWb,kBAAkBM,OAAO,GAAGA,MAAMG,MAAM;IAEzD,wDAAwD;IACxD,MAAMK,OAAO,OAAON,eAAe,WAAWA,aAAa,CAAC;IAE5D,oEAAoE;IACpE,MAAMO,YAAYd,mBAAmBa,OAAO,IAAIA,OAAOE;IAEvD,MAAMC,UAAUN,QAAQO,IAAI,CAACL,UAAUE,WAAWD;IAClD,IAAI,CAACG,SAAS;QACZ,MAAM,IAAIP,MAAM;IAClB;IAEA,OAAOK,UAAUI,QAAQ;AAC3B;AAEA;;CAEC,GACD,OAAO,SAASC,kBAAkBb,UAAmB,EAAEC,UAAmB;IACxE,OAAOT,uBAAuBM,YAAYE,YAAYC;AACxD"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Lzma.ts"],"sourcesContent":["// LZMA codec using TypeScript LZMA decoder\n// LZMA properties in 7z are 5 bytes: 1 byte lc/lp/pb + 4 bytes dictionary size (little-endian)\n\nimport type { Transform } from 'stream';\nimport { createLzmaDecoder as createLzmaTransform, decodeLzma as lzmaDecode } from '../../lzma/index.ts';\n\n/**\n * Decode LZMA compressed data to buffer\n *\n * @param input - LZMA compressed data\n * @param properties - Properties buffer (5 bytes: lc/lp/pb + dict size)\n * @param unpackSize - Expected output size\n * @returns Decompressed data\n */\nexport function decodeLzma(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer {\n if (!properties || properties.length < 5) {\n throw new Error('LZMA requires 5-byte properties');\n }\n\n if (typeof unpackSize !== 'number' || unpackSize < 0) {\n throw new Error('LZMA requires known unpack size');\n }\n\n return lzmaDecode(input, properties, unpackSize);\n}\n\n/**\n * Create an LZMA decoder Transform stream\n *\n * Note: LZMA1 has no chunk boundaries, so this buffers all input\n * and decompresses when the stream ends.\n */\nexport function createLzmaDecoder(properties?: Buffer, unpackSize?: number): Transform {\n if (!properties || properties.length < 5) {\n throw new Error('LZMA requires 5-byte properties');\n }\n\n if (typeof unpackSize !== 'number' || unpackSize < 0) {\n throw new Error('LZMA requires known unpack size');\n }\n\n return createLzmaTransform(properties, unpackSize) as Transform;\n}\n"],"names":["createLzmaDecoder","createLzmaTransform","decodeLzma","lzmaDecode","input","properties","unpackSize","length","Error"],"mappings":"AAAA,2CAA2C;AAC3C,+FAA+F;AAG/F,SAASA,qBAAqBC,mBAAmB,EAAEC,cAAcC,UAAU,QAAQ,sBAAsB;AAEzG;;;;;;;CAOC,GACD,OAAO,SAASD,WAAWE,KAAa,EAAEC,UAAmB,EAAEC,UAAmB;IAChF,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,IAAI,OAAOF,eAAe,YAAYA,aAAa,GAAG;QACpD,MAAM,IAAIE,MAAM;IAClB;IAEA,OAAOL,WAAWC,OAAOC,YAAYC;AACvC;AAEA;;;;;CAKC,GACD,OAAO,SAASN,kBAAkBK,UAAmB,EAAEC,UAAmB;IACxE,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,IAAI,OAAOF,eAAe,YAAYA,aAAa,GAAG;QACpD,MAAM,IAAIE,MAAM;IAClB;IAEA,OAAOP,oBAAoBI,YAAYC;AACzC"}
@@ -1,14 +1,20 @@
1
- import type { Transform } from 'readable-stream';
1
+ import type { Transform } from 'stream';
2
2
  /**
3
3
  * Decode LZMA2 compressed data to buffer
4
4
  *
5
5
  * @param input - LZMA2 compressed data
6
6
  * @param properties - Properties buffer (1 byte: dictionary size)
7
- * @param unpackSize - Expected output size (used for pre-allocation to reduce memory)
7
+ * @param unpackSize - Expected output size (optional, for pre-allocation)
8
8
  * @returns Decompressed data
9
9
  */
10
10
  export declare function decodeLzma2(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer;
11
11
  /**
12
12
  * Create an LZMA2 decoder Transform stream
13
+ *
14
+ * This is a true streaming decoder that processes LZMA2 chunks incrementally.
15
+ * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).
16
+ *
17
+ * LZMA2 chunks are up to ~2MB uncompressed, so memory is bounded regardless of
18
+ * total archive size.
13
19
  */
14
- export declare function createLzma2Decoder(properties?: Buffer, unpackSize?: number): Transform;
20
+ export declare function createLzma2Decoder(properties?: Buffer, _unpackSize?: number): Transform;