xz-compat 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. package/LICENSE +42 -0
  2. package/README.md +248 -0
  3. package/dist/cjs/compat.d.cts +1 -0
  4. package/dist/cjs/compat.d.ts +1 -0
  5. package/dist/cjs/compat.js +23 -0
  6. package/dist/cjs/compat.js.map +1 -0
  7. package/dist/cjs/filters/bcj/Bcj.d.cts +16 -0
  8. package/dist/cjs/filters/bcj/Bcj.d.ts +16 -0
  9. package/dist/cjs/filters/bcj/Bcj.js +192 -0
  10. package/dist/cjs/filters/bcj/Bcj.js.map +1 -0
  11. package/dist/cjs/filters/bcj/BcjArm.d.cts +16 -0
  12. package/dist/cjs/filters/bcj/BcjArm.d.ts +16 -0
  13. package/dist/cjs/filters/bcj/BcjArm.js +122 -0
  14. package/dist/cjs/filters/bcj/BcjArm.js.map +1 -0
  15. package/dist/cjs/filters/bcj/BcjArm64.d.cts +21 -0
  16. package/dist/cjs/filters/bcj/BcjArm64.d.ts +21 -0
  17. package/dist/cjs/filters/bcj/BcjArm64.js +65 -0
  18. package/dist/cjs/filters/bcj/BcjArm64.js.map +1 -0
  19. package/dist/cjs/filters/bcj/BcjArmt.d.cts +19 -0
  20. package/dist/cjs/filters/bcj/BcjArmt.d.ts +19 -0
  21. package/dist/cjs/filters/bcj/BcjArmt.js +76 -0
  22. package/dist/cjs/filters/bcj/BcjArmt.js.map +1 -0
  23. package/dist/cjs/filters/bcj/BcjIa64.d.cts +15 -0
  24. package/dist/cjs/filters/bcj/BcjIa64.d.ts +15 -0
  25. package/dist/cjs/filters/bcj/BcjIa64.js +141 -0
  26. package/dist/cjs/filters/bcj/BcjIa64.js.map +1 -0
  27. package/dist/cjs/filters/bcj/BcjPpc.d.cts +20 -0
  28. package/dist/cjs/filters/bcj/BcjPpc.d.ts +20 -0
  29. package/dist/cjs/filters/bcj/BcjPpc.js +64 -0
  30. package/dist/cjs/filters/bcj/BcjPpc.js.map +1 -0
  31. package/dist/cjs/filters/bcj/BcjSparc.d.cts +19 -0
  32. package/dist/cjs/filters/bcj/BcjSparc.d.ts +19 -0
  33. package/dist/cjs/filters/bcj/BcjSparc.js +69 -0
  34. package/dist/cjs/filters/bcj/BcjSparc.js.map +1 -0
  35. package/dist/cjs/filters/delta/Delta.d.cts +16 -0
  36. package/dist/cjs/filters/delta/Delta.d.ts +16 -0
  37. package/dist/cjs/filters/delta/Delta.js +74 -0
  38. package/dist/cjs/filters/delta/Delta.js.map +1 -0
  39. package/dist/cjs/filters/index.d.cts +8 -0
  40. package/dist/cjs/filters/index.d.ts +8 -0
  41. package/dist/cjs/filters/index.js +27 -0
  42. package/dist/cjs/filters/index.js.map +1 -0
  43. package/dist/cjs/index.d.cts +4 -0
  44. package/dist/cjs/index.d.ts +4 -0
  45. package/dist/cjs/index.js +58 -0
  46. package/dist/cjs/index.js.map +1 -0
  47. package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
  48. package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
  49. package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
  50. package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
  51. package/dist/cjs/lzma/index.d.cts +31 -0
  52. package/dist/cjs/lzma/index.d.ts +31 -0
  53. package/dist/cjs/lzma/index.js +83 -0
  54. package/dist/cjs/lzma/index.js.map +1 -0
  55. package/dist/cjs/lzma/stream/transforms.d.cts +46 -0
  56. package/dist/cjs/lzma/stream/transforms.d.ts +46 -0
  57. package/dist/cjs/lzma/stream/transforms.js +193 -0
  58. package/dist/cjs/lzma/stream/transforms.js.map +1 -0
  59. package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +63 -0
  60. package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +63 -0
  61. package/dist/cjs/lzma/sync/Lzma2Decoder.js +231 -0
  62. package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
  63. package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +97 -0
  64. package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +97 -0
  65. package/dist/cjs/lzma/sync/LzmaDecoder.js +582 -0
  66. package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
  67. package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
  68. package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
  69. package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
  70. package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
  71. package/dist/cjs/lzma/types.d.cts +117 -0
  72. package/dist/cjs/lzma/types.d.ts +117 -0
  73. package/dist/cjs/lzma/types.js +264 -0
  74. package/dist/cjs/lzma/types.js.map +1 -0
  75. package/dist/cjs/package.json +1 -0
  76. package/dist/cjs/utils/createBufferingDecoder.d.cts +10 -0
  77. package/dist/cjs/utils/createBufferingDecoder.d.ts +10 -0
  78. package/dist/cjs/utils/createBufferingDecoder.js +41 -0
  79. package/dist/cjs/utils/createBufferingDecoder.js.map +1 -0
  80. package/dist/cjs/xz/Decoder.d.cts +21 -0
  81. package/dist/cjs/xz/Decoder.d.ts +21 -0
  82. package/dist/cjs/xz/Decoder.js +325 -0
  83. package/dist/cjs/xz/Decoder.js.map +1 -0
  84. package/dist/esm/compat.d.ts +1 -0
  85. package/dist/esm/compat.js +7 -0
  86. package/dist/esm/compat.js.map +1 -0
  87. package/dist/esm/filters/bcj/Bcj.d.ts +16 -0
  88. package/dist/esm/filters/bcj/Bcj.js +184 -0
  89. package/dist/esm/filters/bcj/Bcj.js.map +1 -0
  90. package/dist/esm/filters/bcj/BcjArm.d.ts +16 -0
  91. package/dist/esm/filters/bcj/BcjArm.js +114 -0
  92. package/dist/esm/filters/bcj/BcjArm.js.map +1 -0
  93. package/dist/esm/filters/bcj/BcjArm64.d.ts +21 -0
  94. package/dist/esm/filters/bcj/BcjArm64.js +57 -0
  95. package/dist/esm/filters/bcj/BcjArm64.js.map +1 -0
  96. package/dist/esm/filters/bcj/BcjArmt.d.ts +19 -0
  97. package/dist/esm/filters/bcj/BcjArmt.js +66 -0
  98. package/dist/esm/filters/bcj/BcjArmt.js.map +1 -0
  99. package/dist/esm/filters/bcj/BcjIa64.d.ts +15 -0
  100. package/dist/esm/filters/bcj/BcjIa64.js +127 -0
  101. package/dist/esm/filters/bcj/BcjIa64.js.map +1 -0
  102. package/dist/esm/filters/bcj/BcjPpc.d.ts +20 -0
  103. package/dist/esm/filters/bcj/BcjPpc.js +55 -0
  104. package/dist/esm/filters/bcj/BcjPpc.js.map +1 -0
  105. package/dist/esm/filters/bcj/BcjSparc.d.ts +19 -0
  106. package/dist/esm/filters/bcj/BcjSparc.js +59 -0
  107. package/dist/esm/filters/bcj/BcjSparc.js.map +1 -0
  108. package/dist/esm/filters/delta/Delta.d.ts +16 -0
  109. package/dist/esm/filters/delta/Delta.js +66 -0
  110. package/dist/esm/filters/delta/Delta.js.map +1 -0
  111. package/dist/esm/filters/index.d.ts +8 -0
  112. package/dist/esm/filters/index.js +9 -0
  113. package/dist/esm/filters/index.js.map +1 -0
  114. package/dist/esm/index.d.ts +4 -0
  115. package/dist/esm/index.js +5 -0
  116. package/dist/esm/index.js.map +1 -0
  117. package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
  118. package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
  119. package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
  120. package/dist/esm/lzma/index.d.ts +31 -0
  121. package/dist/esm/lzma/index.js +44 -0
  122. package/dist/esm/lzma/index.js.map +1 -0
  123. package/dist/esm/lzma/stream/transforms.d.ts +46 -0
  124. package/dist/esm/lzma/stream/transforms.js +190 -0
  125. package/dist/esm/lzma/stream/transforms.js.map +1 -0
  126. package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +63 -0
  127. package/dist/esm/lzma/sync/Lzma2Decoder.js +211 -0
  128. package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
  129. package/dist/esm/lzma/sync/LzmaDecoder.d.ts +97 -0
  130. package/dist/esm/lzma/sync/LzmaDecoder.js +545 -0
  131. package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
  132. package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
  133. package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
  134. package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
  135. package/dist/esm/lzma/types.d.ts +117 -0
  136. package/dist/esm/lzma/types.js +154 -0
  137. package/dist/esm/lzma/types.js.map +1 -0
  138. package/dist/esm/package.json +1 -0
  139. package/dist/esm/utils/createBufferingDecoder.d.ts +10 -0
  140. package/dist/esm/utils/createBufferingDecoder.js +30 -0
  141. package/dist/esm/utils/createBufferingDecoder.js.map +1 -0
  142. package/dist/esm/xz/Decoder.d.ts +21 -0
  143. package/dist/esm/xz/Decoder.js +313 -0
  144. package/dist/esm/xz/Decoder.js.map +1 -0
  145. package/package.json +75 -0
@@ -0,0 +1,59 @@
1
+ // BCJ (SPARC) filter codec - converts SPARC branch instruction addresses
2
+ // This filter makes SPARC executables more compressible by LZMA
3
+ //
4
+ // SPARC is big-endian. CALL instructions use 30-bit signed offsets.
5
+ // The filter only transforms CALL instructions with specific byte patterns.
6
+ //
7
+ // Reference: https://github.com/kornelski/7z/blob/main/C/Bra.c
8
+ import { bufferFrom } from 'extract-base-iterator';
9
+ import createBufferingDecoder from '../../utils/createBufferingDecoder.js';
10
+ /**
11
+ * Decode SPARC BCJ filtered data
12
+ * Reverses the BCJ transformation by converting absolute addresses back to relative
13
+ *
14
+ * SPARC CALL instruction matching (big-endian):
15
+ * - First byte 0x40 and (second byte & 0xC0) == 0x00, OR
16
+ * - First byte 0x7F and (second byte & 0xC0) == 0xC0
17
+ *
18
+ * @param input - SPARC BCJ filtered data
19
+ * @param _properties - Unused for SPARC BCJ
20
+ * @param _unpackSize - Unused for SPARC BCJ
21
+ * @returns Unfiltered data
22
+ */ export function decodeBcjSparc(input, _properties, _unpackSize) {
23
+ const output = bufferFrom(input); // Copy since we modify in place
24
+ let pos = 0;
25
+ // Process 4-byte aligned positions
26
+ while(pos + 4 <= output.length){
27
+ const b0 = output[pos];
28
+ const b1 = output[pos + 1];
29
+ // Check for CALL instruction with specific byte patterns:
30
+ // (b0 == 0x40 && (b1 & 0xC0) == 0x00) || (b0 == 0x7F && (b1 & 0xC0) == 0xC0)
31
+ if (b0 === 0x40 && (b1 & 0xc0) === 0x00 || b0 === 0x7f && (b1 & 0xc0) === 0xc0) {
32
+ // Read 32-bit value (big-endian)
33
+ let src = b0 << 24 | b1 << 16 | output[pos + 2] << 8 | output[pos + 3];
34
+ // Shift left by 2 (multiply by 4 for word addressing)
35
+ src <<= 2;
36
+ // Decoding: subtract position
37
+ let dest = src - pos;
38
+ // Shift right by 2
39
+ dest >>>= 2;
40
+ // Reconstruct with sign extension and opcode
41
+ // (((0 - ((dest >> 22) & 1)) << 22) & 0x3FFFFFFF) | (dest & 0x3FFFFF) | 0x40000000
42
+ const signBit = dest >>> 22 & 1;
43
+ const signExtend = signBit ? 0x3fc00000 : 0;
44
+ dest = signExtend | dest & 0x3fffff | 0x40000000;
45
+ // Write back (big-endian)
46
+ output[pos] = dest >>> 24 & 0xff;
47
+ output[pos + 1] = dest >>> 16 & 0xff;
48
+ output[pos + 2] = dest >>> 8 & 0xff;
49
+ output[pos + 3] = dest & 0xff;
50
+ }
51
+ pos += 4;
52
+ }
53
+ return output;
54
+ }
55
+ /**
56
+ * Create a SPARC BCJ decoder Transform stream
57
+ */ export function createBcjSparcDecoder(properties, unpackSize) {
58
+ return createBufferingDecoder(decodeBcjSparc, properties, unpackSize);
59
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/filters/bcj/BcjSparc.ts"],"sourcesContent":["// BCJ (SPARC) filter codec - converts SPARC branch instruction addresses\n// This filter makes SPARC executables more compressible by LZMA\n//\n// SPARC is big-endian. CALL instructions use 30-bit signed offsets.\n// The filter only transforms CALL instructions with specific byte patterns.\n//\n// Reference: https://github.com/kornelski/7z/blob/main/C/Bra.c\n\nimport { bufferFrom } from 'extract-base-iterator';\nimport type { Transform } from 'stream';\nimport createBufferingDecoder from '../../utils/createBufferingDecoder.ts';\n\n/**\n * Decode SPARC BCJ filtered data\n * Reverses the BCJ transformation by converting absolute addresses back to relative\n *\n * SPARC CALL instruction matching (big-endian):\n * - First byte 0x40 and (second byte & 0xC0) == 0x00, OR\n * - First byte 0x7F and (second byte & 0xC0) == 0xC0\n *\n * @param input - SPARC BCJ filtered data\n * @param _properties - Unused for SPARC BCJ\n * @param _unpackSize - Unused for SPARC BCJ\n * @returns Unfiltered data\n */\nexport function decodeBcjSparc(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer {\n const output = bufferFrom(input); // Copy since we modify in place\n let pos = 0;\n\n // Process 4-byte aligned positions\n while (pos + 4 <= output.length) {\n const b0 = output[pos];\n const b1 = output[pos + 1];\n\n // Check for CALL instruction with specific byte patterns:\n // (b0 == 0x40 && (b1 & 0xC0) == 0x00) || (b0 == 0x7F && (b1 & 0xC0) == 0xC0)\n if ((b0 === 0x40 && (b1 & 0xc0) === 0x00) || (b0 === 0x7f && (b1 & 0xc0) === 0xc0)) {\n // Read 32-bit value (big-endian)\n let src = (b0 << 24) | (b1 << 16) | (output[pos + 2] << 8) | output[pos + 3];\n\n // Shift left by 2 (multiply by 4 for word addressing)\n src <<= 2;\n\n // Decoding: subtract position\n let dest = src - pos;\n\n // Shift right by 2\n dest >>>= 2;\n\n // Reconstruct with sign extension and opcode\n // (((0 - ((dest >> 22) & 1)) << 22) & 0x3FFFFFFF) | (dest & 0x3FFFFF) | 0x40000000\n const signBit = (dest >>> 22) & 1;\n const signExtend = signBit ? 0x3fc00000 : 0;\n dest = signExtend | (dest & 0x3fffff) | 0x40000000;\n\n // Write back (big-endian)\n output[pos] = (dest >>> 24) & 0xff;\n output[pos + 1] = (dest >>> 16) & 0xff;\n output[pos + 2] = (dest >>> 8) & 0xff;\n output[pos + 3] = dest & 0xff;\n }\n\n pos += 4;\n }\n\n return output;\n}\n\n/**\n * Create a SPARC BCJ decoder Transform stream\n */\nexport function createBcjSparcDecoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeBcjSparc, properties, unpackSize);\n}\n"],"names":["bufferFrom","createBufferingDecoder","decodeBcjSparc","input","_properties","_unpackSize","output","pos","length","b0","b1","src","dest","signBit","signExtend","createBcjSparcDecoder","properties","unpackSize"],"mappings":"AAAA,yEAAyE;AACzE,gEAAgE;AAChE,EAAE;AACF,oEAAoE;AACpE,4EAA4E;AAC5E,EAAE;AACF,+DAA+D;AAE/D,SAASA,UAAU,QAAQ,wBAAwB;AAEnD,OAAOC,4BAA4B,wCAAwC;AAE3E;;;;;;;;;;;;CAYC,GACD,OAAO,SAASC,eAAeC,KAAa,EAAEC,WAAoB,EAAEC,WAAoB;IACtF,MAAMC,SAASN,WAAWG,QAAQ,gCAAgC;IAClE,IAAII,MAAM;IAEV,mCAAmC;IACnC,MAAOA,MAAM,KAAKD,OAAOE,MAAM,CAAE;QAC/B,MAAMC,KAAKH,MAAM,CAACC,IAAI;QACtB,MAAMG,KAAKJ,MAAM,CAACC,MAAM,EAAE;QAE1B,0DAA0D;QAC1D,6EAA6E;QAC7E,IAAI,AAACE,OAAO,QAAQ,AAACC,CAAAA,KAAK,IAAG,MAAO,QAAUD,OAAO,QAAQ,AAACC,CAAAA,KAAK,IAAG,MAAO,MAAO;YAClF,iCAAiC;YACjC,IAAIC,MAAM,AAACF,MAAM,KAAOC,MAAM,KAAOJ,MAAM,CAACC,MAAM,EAAE,IAAI,IAAKD,MAAM,CAACC,MAAM,EAAE;YAE5E,sDAAsD;YACtDI,QAAQ;YAER,8BAA8B;YAC9B,IAAIC,OAAOD,MAAMJ;YAEjB,mBAAmB;YACnBK,UAAU;YAEV,6CAA6C;YAC7C,mFAAmF;YACnF,MAAMC,UAAU,AAACD,SAAS,KAAM;YAChC,MAAME,aAAaD,UAAU,aAAa;YAC1CD,OAAOE,aAAcF,OAAO,WAAY;YAExC,0BAA0B;YAC1BN,MAAM,CAACC,IAAI,GAAG,AAACK,SAAS,KAAM;YAC9BN,MAAM,CAACC,MAAM,EAAE,GAAG,AAACK,SAAS,KAAM;YAClCN,MAAM,CAACC,MAAM,EAAE,GAAG,AAACK,SAAS,IAAK;YACjCN,MAAM,CAACC,MAAM,EAAE,GAAGK,OAAO;QAC3B;QAEAL,OAAO;IACT;IAEA,OAAOD;AACT;AAEA;;CAEC,GACD,OAAO,SAASS,sBAAsBC,UAAmB,EAAEC,UAAmB;IAC5E,OAAOhB,uBAAuBC,gBAAgBc,YAAYC;AAC5D"}
@@ -0,0 +1,16 @@
1
+ import { Transform } from 'extract-base-iterator';
2
+ /**
3
+ * Decode Delta filtered data (synchronous, for buffered use)
4
+ * Reverses the delta transformation by adding previous values
5
+ *
6
+ * @param input - Delta filtered data
7
+ * @param properties - Optional 1-byte properties (distance - 1)
8
+ * @param _unpackSize - Unused for Delta
9
+ * @returns Unfiltered data
10
+ */
11
+ export declare function decodeDelta(input: Buffer, properties?: Buffer, _unpackSize?: number): Buffer;
12
+ /**
13
+ * Create a streaming Delta decoder Transform.
14
+ * Processes data chunk by chunk, maintaining state between chunks.
15
+ */
16
+ export declare function createDeltaDecoder(properties?: Buffer, _unpackSize?: number): InstanceType<typeof Transform>;
@@ -0,0 +1,66 @@
1
+ // Delta filter codec - stores differences between consecutive bytes
2
+ // Useful for data with gradual changes (images, audio, sensor data)
3
+ //
4
+ // The Delta filter stores the difference between each byte and the byte
5
+ // N positions before it, where N is the "distance" parameter (default 1).
6
+ // This makes data with regular patterns more compressible.
7
+ //
8
+ // This implementation uses true streaming - processes data chunk by chunk
9
+ // while maintaining state between chunks.
10
+ import { allocBuffer, bufferFrom, Transform } from 'extract-base-iterator';
11
+ /**
12
+ * Decode Delta filtered data (synchronous, for buffered use)
13
+ * Reverses the delta transformation by adding previous values
14
+ *
15
+ * @param input - Delta filtered data
16
+ * @param properties - Optional 1-byte properties (distance - 1)
17
+ * @param _unpackSize - Unused for Delta
18
+ * @returns Unfiltered data
19
+ */ export function decodeDelta(input, properties, _unpackSize) {
20
+ // Distance parameter: default is 1
21
+ let distance = 1;
22
+ if (properties && properties.length >= 1) {
23
+ // Properties byte contains (distance - 1)
24
+ distance = properties[0] + 1;
25
+ }
26
+ const output = bufferFrom(input); // Copy since we modify in place
27
+ // State buffer for multi-byte distance
28
+ const state = [];
29
+ for(let i = 0; i < distance; i++){
30
+ state.push(0);
31
+ }
32
+ for(let j = 0; j < output.length; j++){
33
+ const idx = j % distance;
34
+ state[idx] = state[idx] + output[j] & 0xff;
35
+ output[j] = state[idx];
36
+ }
37
+ return output;
38
+ }
39
+ /**
40
+ * Create a streaming Delta decoder Transform.
41
+ * Processes data chunk by chunk, maintaining state between chunks.
42
+ */ export function createDeltaDecoder(properties, _unpackSize) {
43
+ // Distance parameter: default is 1
44
+ let distance = 1;
45
+ if (properties && properties.length >= 1) {
46
+ distance = properties[0] + 1;
47
+ }
48
+ // State buffer for multi-byte distance
49
+ const state = [];
50
+ for(let i = 0; i < distance; i++){
51
+ state.push(0);
52
+ }
53
+ let byteIndex = 0;
54
+ return new Transform({
55
+ transform: (chunk, _encoding, callback)=>{
56
+ const output = allocBuffer(chunk.length);
57
+ for(let j = 0; j < chunk.length; j++){
58
+ const idx = byteIndex % distance;
59
+ state[idx] = state[idx] + chunk[j] & 0xff;
60
+ output[j] = state[idx];
61
+ byteIndex++;
62
+ }
63
+ callback(null, output);
64
+ }
65
+ });
66
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/filters/delta/Delta.ts"],"sourcesContent":["// Delta filter codec - stores differences between consecutive bytes\n// Useful for data with gradual changes (images, audio, sensor data)\n//\n// The Delta filter stores the difference between each byte and the byte\n// N positions before it, where N is the \"distance\" parameter (default 1).\n// This makes data with regular patterns more compressible.\n//\n// This implementation uses true streaming - processes data chunk by chunk\n// while maintaining state between chunks.\n\nimport { allocBuffer, bufferFrom, Transform } from 'extract-base-iterator';\n\n/**\n * Decode Delta filtered data (synchronous, for buffered use)\n * Reverses the delta transformation by adding previous values\n *\n * @param input - Delta filtered data\n * @param properties - Optional 1-byte properties (distance - 1)\n * @param _unpackSize - Unused for Delta\n * @returns Unfiltered data\n */\nexport function decodeDelta(input: Buffer, properties?: Buffer, _unpackSize?: number): Buffer {\n // Distance parameter: default is 1\n let distance = 1;\n if (properties && properties.length >= 1) {\n // Properties byte contains (distance - 1)\n distance = properties[0] + 1;\n }\n\n const output = bufferFrom(input); // Copy since we modify in place\n\n // State buffer for multi-byte distance\n const state: number[] = [];\n for (let i = 0; i < distance; i++) {\n state.push(0);\n }\n\n for (let j = 0; j < output.length; j++) {\n const idx = j % distance;\n state[idx] = (state[idx] + output[j]) & 0xff;\n output[j] = state[idx];\n }\n\n return output;\n}\n\n/**\n * Create a streaming Delta decoder Transform.\n * Processes data chunk by chunk, maintaining state between chunks.\n */\nexport function createDeltaDecoder(properties?: Buffer, _unpackSize?: number): InstanceType<typeof Transform> {\n // Distance parameter: default is 1\n let distance = 1;\n if (properties && properties.length >= 1) {\n distance = properties[0] + 1;\n }\n\n // State buffer for multi-byte distance\n const state: number[] = [];\n for (let i = 0; i < distance; i++) {\n state.push(0);\n }\n\n let byteIndex = 0;\n\n return new Transform({\n transform: (chunk: Buffer, _encoding: string, callback: (err?: Error | null, data?: Buffer) => void) => {\n const output = allocBuffer(chunk.length);\n\n for (let j = 0; j < chunk.length; j++) {\n const idx = byteIndex % distance;\n state[idx] = (state[idx] + chunk[j]) & 0xff;\n output[j] = state[idx];\n byteIndex++;\n }\n\n callback(null, output);\n },\n });\n}\n"],"names":["allocBuffer","bufferFrom","Transform","decodeDelta","input","properties","_unpackSize","distance","length","output","state","i","push","j","idx","createDeltaDecoder","byteIndex","transform","chunk","_encoding","callback"],"mappings":"AAAA,oEAAoE;AACpE,oEAAoE;AACpE,EAAE;AACF,wEAAwE;AACxE,0EAA0E;AAC1E,2DAA2D;AAC3D,EAAE;AACF,0EAA0E;AAC1E,0CAA0C;AAE1C,SAASA,WAAW,EAAEC,UAAU,EAAEC,SAAS,QAAQ,wBAAwB;AAE3E;;;;;;;;CAQC,GACD,OAAO,SAASC,YAAYC,KAAa,EAAEC,UAAmB,EAAEC,WAAoB;IAClF,mCAAmC;IACnC,IAAIC,WAAW;IACf,IAAIF,cAAcA,WAAWG,MAAM,IAAI,GAAG;QACxC,0CAA0C;QAC1CD,WAAWF,UAAU,CAAC,EAAE,GAAG;IAC7B;IAEA,MAAMI,SAASR,WAAWG,QAAQ,gCAAgC;IAElE,uCAAuC;IACvC,MAAMM,QAAkB,EAAE;IAC1B,IAAK,IAAIC,IAAI,GAAGA,IAAIJ,UAAUI,IAAK;QACjCD,MAAME,IAAI,CAAC;IACb;IAEA,IAAK,IAAIC,IAAI,GAAGA,IAAIJ,OAAOD,MAAM,EAAEK,IAAK;QACtC,MAAMC,MAAMD,IAAIN;QAChBG,KAAK,CAACI,IAAI,GAAG,AAACJ,KAAK,CAACI,IAAI,GAAGL,MAAM,CAACI,EAAE,GAAI;QACxCJ,MAAM,CAACI,EAAE,GAAGH,KAAK,CAACI,IAAI;IACxB;IAEA,OAAOL;AACT;AAEA;;;CAGC,GACD,OAAO,SAASM,mBAAmBV,UAAmB,EAAEC,WAAoB;IAC1E,mCAAmC;IACnC,IAAIC,WAAW;IACf,IAAIF,cAAcA,WAAWG,MAAM,IAAI,GAAG;QACxCD,WAAWF,UAAU,CAAC,EAAE,GAAG;IAC7B;IAEA,uCAAuC;IACvC,MAAMK,QAAkB,EAAE;IAC1B,IAAK,IAAIC,IAAI,GAAGA,IAAIJ,UAAUI,IAAK;QACjCD,MAAME,IAAI,CAAC;IACb;IAEA,IAAII,YAAY;IAEhB,OAAO,IAAId,UAAU;QACnBe,WAAW,CAACC,OAAeC,WAAmBC;YAC5C,MAAMX,SAAST,YAAYkB,MAAMV,MAAM;YAEvC,IAAK,IAAIK,IAAI,GAAGA,IAAIK,MAAMV,MAAM,EAAEK,IAAK;gBACrC,MAAMC,MAAME,YAAYT;gBACxBG,KAAK,CAACI,IAAI,GAAG,AAACJ,KAAK,CAACI,IAAI,GAAGI,KAAK,CAACL,EAAE,GAAI;gBACvCJ,MAAM,CAACI,EAAE,GAAGH,KAAK,CAACI,IAAI;gBACtBE;YACF;YAEAI,SAAS,MAAMX;QACjB;IACF;AACF"}
@@ -0,0 +1,8 @@
1
+ export * from './bcj/Bcj.js';
2
+ export * from './bcj/BcjArm.js';
3
+ export * from './bcj/BcjArm64.js';
4
+ export * from './bcj/BcjArmt.js';
5
+ export * from './bcj/BcjIa64.js';
6
+ export * from './bcj/BcjPpc.js';
7
+ export * from './bcj/BcjSparc.js';
8
+ export * from './delta/Delta.js';
@@ -0,0 +1,9 @@
1
+ // Filter implementations for XZ/LZMA
2
+ export * from './bcj/Bcj.js';
3
+ export * from './bcj/BcjArm.js';
4
+ export * from './bcj/BcjArm64.js';
5
+ export * from './bcj/BcjArmt.js';
6
+ export * from './bcj/BcjIa64.js';
7
+ export * from './bcj/BcjPpc.js';
8
+ export * from './bcj/BcjSparc.js';
9
+ export * from './delta/Delta.js';
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/filters/index.ts"],"sourcesContent":["// Filter implementations for XZ/LZMA\n\nexport * from './bcj/Bcj.ts';\nexport * from './bcj/BcjArm.ts';\nexport * from './bcj/BcjArm64.ts';\nexport * from './bcj/BcjArmt.ts';\nexport * from './bcj/BcjIa64.ts';\nexport * from './bcj/BcjPpc.ts';\nexport * from './bcj/BcjSparc.ts';\nexport * from './delta/Delta.ts';\n"],"names":[],"mappings":"AAAA,qCAAqC;AAErC,cAAc,eAAe;AAC7B,cAAc,kBAAkB;AAChC,cAAc,oBAAoB;AAClC,cAAc,mBAAmB;AACjC,cAAc,mBAAmB;AACjC,cAAc,kBAAkB;AAChC,cAAc,oBAAoB;AAClC,cAAc,mBAAmB"}
@@ -0,0 +1,4 @@
1
+ export * from './filters/index.js';
2
+ export type { OutputSink } from './lzma/index.js';
3
+ export { createLzma2Decoder, createLzmaDecoder, decodeLzma, decodeLzma2, detectLzmaFormat, Lzma2Decoder, LzmaDecoder, } from './lzma/index.js';
4
+ export { createXZDecoder, decodeXZ } from './xz/Decoder.js';
@@ -0,0 +1,5 @@
1
+ // XZ and LZMA decoders for external use
2
+ // Re-export filters for convenience
3
+ export * from './filters/index.js';
4
+ export { createLzma2Decoder, createLzmaDecoder, decodeLzma, decodeLzma2, detectLzmaFormat, Lzma2Decoder, LzmaDecoder } from './lzma/index.js';
5
+ export { createXZDecoder, decodeXZ } from './xz/Decoder.js';
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/index.ts"],"sourcesContent":["// XZ and LZMA decoders for external use\n\n// Re-export filters for convenience\nexport * from './filters/index.ts';\nexport type { OutputSink } from './lzma/index.ts';\nexport {\n createLzma2Decoder,\n createLzmaDecoder,\n decodeLzma,\n decodeLzma2,\n detectLzmaFormat,\n Lzma2Decoder,\n LzmaDecoder,\n} from './lzma/index.ts';\nexport { createXZDecoder, decodeXZ } from './xz/Decoder.ts';\n"],"names":["createLzma2Decoder","createLzmaDecoder","decodeLzma","decodeLzma2","detectLzmaFormat","Lzma2Decoder","LzmaDecoder","createXZDecoder","decodeXZ"],"mappings":"AAAA,wCAAwC;AAExC,oCAAoC;AACpC,cAAc,qBAAqB;AAEnC,SACEA,kBAAkB,EAClBC,iBAAiB,EACjBC,UAAU,EACVC,WAAW,EACXC,gBAAgB,EAChBC,YAAY,EACZC,WAAW,QACN,kBAAkB;AACzB,SAASC,eAAe,EAAEC,QAAQ,QAAQ,kBAAkB"}
@@ -0,0 +1,73 @@
1
+ /**
2
+ * LZMA2 Chunk Parser
3
+ *
4
+ * Shared parsing logic for LZMA2 chunk headers.
5
+ * Used by both synchronous and streaming decoders.
6
+ *
7
+ * LZMA2 control byte ranges:
8
+ * 0x00 = End of stream
9
+ * 0x01 = Uncompressed chunk, dictionary reset
10
+ * 0x02 = Uncompressed chunk, no dictionary reset
11
+ * 0x80-0x9F = LZMA chunk, no reset (solid mode)
12
+ * 0xA0-0xBF = LZMA chunk, reset state (probabilities)
13
+ * 0xC0-0xDF = LZMA chunk, reset state + new properties
14
+ * 0xE0-0xFF = LZMA chunk, reset dictionary + state + new properties
15
+ */
16
+ /**
17
+ * LZMA properties extracted from chunk header
18
+ */
19
+ export interface LzmaChunkProps {
20
+ lc: number;
21
+ lp: number;
22
+ pb: number;
23
+ }
24
+ /**
25
+ * Parsed LZMA2 chunk information
26
+ */
27
+ export interface Lzma2Chunk {
28
+ /** Chunk type */
29
+ type: 'end' | 'uncompressed' | 'lzma';
30
+ /** Total bytes consumed by header (including control byte) */
31
+ headerSize: number;
32
+ /** Whether to reset dictionary */
33
+ dictReset: boolean;
34
+ /** Whether to reset state/probabilities */
35
+ stateReset: boolean;
36
+ /** New LZMA properties (only for control >= 0xC0) */
37
+ newProps: LzmaChunkProps | null;
38
+ /** Uncompressed data size */
39
+ uncompSize: number;
40
+ /** Compressed data size (0 for uncompressed chunks) */
41
+ compSize: number;
42
+ }
43
+ /**
44
+ * Result of parsing attempt
45
+ */
46
+ export type ParseResult = {
47
+ success: true;
48
+ chunk: Lzma2Chunk;
49
+ } | {
50
+ success: false;
51
+ needBytes: number;
52
+ };
53
+ /**
54
+ * Parse an LZMA2 chunk header
55
+ *
56
+ * @param input - Input buffer
57
+ * @param offset - Offset to start parsing
58
+ * @returns Parsed chunk info or number of bytes needed
59
+ */
60
+ export declare function parseLzma2ChunkHeader(input: Buffer, offset: number): ParseResult;
61
+ /** Result type for hasCompleteChunk with totalSize included on success */
62
+ export type CompleteChunkResult = {
63
+ success: true;
64
+ chunk: Lzma2Chunk;
65
+ totalSize: number;
66
+ } | {
67
+ success: false;
68
+ needBytes: number;
69
+ };
70
+ /**
71
+ * Check if we have enough data for the complete chunk (header + data)
72
+ */
73
+ export declare function hasCompleteChunk(input: Buffer, offset: number): CompleteChunkResult;
@@ -0,0 +1,137 @@
1
+ /**
2
+ * LZMA2 Chunk Parser
3
+ *
4
+ * Shared parsing logic for LZMA2 chunk headers.
5
+ * Used by both synchronous and streaming decoders.
6
+ *
7
+ * LZMA2 control byte ranges:
8
+ * 0x00 = End of stream
9
+ * 0x01 = Uncompressed chunk, dictionary reset
10
+ * 0x02 = Uncompressed chunk, no dictionary reset
11
+ * 0x80-0x9F = LZMA chunk, no reset (solid mode)
12
+ * 0xA0-0xBF = LZMA chunk, reset state (probabilities)
13
+ * 0xC0-0xDF = LZMA chunk, reset state + new properties
14
+ * 0xE0-0xFF = LZMA chunk, reset dictionary + state + new properties
15
+ */ /**
16
+ * LZMA properties extracted from chunk header
17
+ */ /**
18
+ * Parse an LZMA2 chunk header
19
+ *
20
+ * @param input - Input buffer
21
+ * @param offset - Offset to start parsing
22
+ * @returns Parsed chunk info or number of bytes needed
23
+ */ export function parseLzma2ChunkHeader(input, offset) {
24
+ if (offset >= input.length) {
25
+ return {
26
+ success: false,
27
+ needBytes: 1
28
+ };
29
+ }
30
+ const control = input[offset];
31
+ // End of stream
32
+ if (control === 0x00) {
33
+ return {
34
+ success: true,
35
+ chunk: {
36
+ type: 'end',
37
+ headerSize: 1,
38
+ dictReset: false,
39
+ stateReset: false,
40
+ newProps: null,
41
+ uncompSize: 0,
42
+ compSize: 0
43
+ }
44
+ };
45
+ }
46
+ // Uncompressed chunk
47
+ if (control === 0x01 || control === 0x02) {
48
+ // Need 3 bytes: control + 2 size bytes
49
+ if (offset + 3 > input.length) {
50
+ return {
51
+ success: false,
52
+ needBytes: 3 - (input.length - offset)
53
+ };
54
+ }
55
+ const uncompSize = (input[offset + 1] << 8 | input[offset + 2]) + 1;
56
+ return {
57
+ success: true,
58
+ chunk: {
59
+ type: 'uncompressed',
60
+ headerSize: 3,
61
+ dictReset: control === 0x01,
62
+ stateReset: false,
63
+ newProps: null,
64
+ uncompSize,
65
+ compSize: 0
66
+ }
67
+ };
68
+ }
69
+ // LZMA compressed chunk
70
+ if (control >= 0x80) {
71
+ const hasNewProps = control >= 0xc0;
72
+ const minHeaderSize = hasNewProps ? 6 : 5; // control + 2 uncomp + 2 comp + (1 props)
73
+ if (offset + minHeaderSize > input.length) {
74
+ return {
75
+ success: false,
76
+ needBytes: minHeaderSize - (input.length - offset)
77
+ };
78
+ }
79
+ // Parse sizes
80
+ const uncompHigh = control & 0x1f;
81
+ const uncompSize = (uncompHigh << 16 | input[offset + 1] << 8 | input[offset + 2]) + 1;
82
+ const compSize = (input[offset + 3] << 8 | input[offset + 4]) + 1;
83
+ // Parse properties if present
84
+ let newProps = null;
85
+ if (hasNewProps) {
86
+ const propsByte = input[offset + 5];
87
+ const lc = propsByte % 9;
88
+ const remainder = ~~(propsByte / 9);
89
+ const lp = remainder % 5;
90
+ const pb = ~~(remainder / 5);
91
+ newProps = {
92
+ lc,
93
+ lp,
94
+ pb
95
+ };
96
+ }
97
+ return {
98
+ success: true,
99
+ chunk: {
100
+ type: 'lzma',
101
+ headerSize: minHeaderSize,
102
+ dictReset: control >= 0xe0,
103
+ stateReset: control >= 0xa0,
104
+ newProps,
105
+ uncompSize,
106
+ compSize
107
+ }
108
+ };
109
+ }
110
+ // Invalid control byte
111
+ throw new Error(`Invalid LZMA2 control byte: 0x${control.toString(16)}`);
112
+ }
113
+ /**
114
+ * Check if we have enough data for the complete chunk (header + data)
115
+ */ export function hasCompleteChunk(input, offset) {
116
+ const result = parseLzma2ChunkHeader(input, offset);
117
+ if (result.success === false) {
118
+ return {
119
+ success: false,
120
+ needBytes: result.needBytes
121
+ };
122
+ }
123
+ const { chunk } = result;
124
+ const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;
125
+ const totalSize = chunk.headerSize + dataSize;
126
+ if (offset + totalSize > input.length) {
127
+ return {
128
+ success: false,
129
+ needBytes: totalSize - (input.length - offset)
130
+ };
131
+ }
132
+ return {
133
+ success: true,
134
+ chunk,
135
+ totalSize
136
+ };
137
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/Lzma2ChunkParser.ts"],"sourcesContent":["/**\n * LZMA2 Chunk Parser\n *\n * Shared parsing logic for LZMA2 chunk headers.\n * Used by both synchronous and streaming decoders.\n *\n * LZMA2 control byte ranges:\n * 0x00 = End of stream\n * 0x01 = Uncompressed chunk, dictionary reset\n * 0x02 = Uncompressed chunk, no dictionary reset\n * 0x80-0x9F = LZMA chunk, no reset (solid mode)\n * 0xA0-0xBF = LZMA chunk, reset state (probabilities)\n * 0xC0-0xDF = LZMA chunk, reset state + new properties\n * 0xE0-0xFF = LZMA chunk, reset dictionary + state + new properties\n */\n\n/**\n * LZMA properties extracted from chunk header\n */\nexport interface LzmaChunkProps {\n lc: number;\n lp: number;\n pb: number;\n}\n\n/**\n * Parsed LZMA2 chunk information\n */\nexport interface Lzma2Chunk {\n /** Chunk type */\n type: 'end' | 'uncompressed' | 'lzma';\n /** Total bytes consumed by header (including control byte) */\n headerSize: number;\n /** Whether to reset dictionary */\n dictReset: boolean;\n /** Whether to reset state/probabilities */\n stateReset: boolean;\n /** New LZMA properties (only for control >= 0xC0) */\n newProps: LzmaChunkProps | null;\n /** Uncompressed data size */\n uncompSize: number;\n /** Compressed data size (0 for uncompressed chunks) */\n compSize: number;\n}\n\n/**\n * Result of parsing attempt\n */\nexport type ParseResult = { success: true; chunk: Lzma2Chunk } | { success: false; needBytes: number };\n\n/**\n * Parse an LZMA2 chunk header\n *\n * @param input - Input buffer\n * @param offset - Offset to start parsing\n * @returns Parsed chunk info or number of bytes needed\n */\nexport function parseLzma2ChunkHeader(input: Buffer, offset: number): ParseResult {\n if (offset >= input.length) {\n return { success: false, needBytes: 1 };\n }\n\n const control = input[offset];\n\n // End of stream\n if (control === 0x00) {\n return {\n success: true,\n chunk: {\n type: 'end',\n headerSize: 1,\n dictReset: false,\n stateReset: false,\n newProps: null,\n uncompSize: 0,\n compSize: 0,\n },\n };\n }\n\n // Uncompressed chunk\n if (control === 0x01 || control === 0x02) {\n // Need 3 bytes: control + 2 size bytes\n if (offset + 3 > input.length) {\n return { success: false, needBytes: 3 - (input.length - offset) };\n }\n\n const uncompSize = ((input[offset + 1] << 8) | input[offset + 2]) + 1;\n\n return {\n success: true,\n chunk: {\n type: 'uncompressed',\n headerSize: 3,\n dictReset: control === 0x01,\n stateReset: false,\n newProps: null,\n uncompSize,\n compSize: 0,\n },\n };\n }\n\n // LZMA compressed chunk\n if (control >= 0x80) {\n const hasNewProps = control >= 0xc0;\n const minHeaderSize = hasNewProps ? 6 : 5; // control + 2 uncomp + 2 comp + (1 props)\n\n if (offset + minHeaderSize > input.length) {\n return { success: false, needBytes: minHeaderSize - (input.length - offset) };\n }\n\n // Parse sizes\n const uncompHigh = control & 0x1f;\n const uncompSize = ((uncompHigh << 16) | (input[offset + 1] << 8) | input[offset + 2]) + 1;\n const compSize = ((input[offset + 3] << 8) | input[offset + 4]) + 1;\n\n // Parse properties if present\n let newProps: LzmaChunkProps | null = null;\n if (hasNewProps) {\n const propsByte = input[offset + 5];\n const lc = propsByte % 9;\n const remainder = ~~(propsByte / 9);\n const lp = remainder % 5;\n const pb = ~~(remainder / 5);\n newProps = { lc, lp, pb };\n }\n\n return {\n success: true,\n chunk: {\n type: 'lzma',\n headerSize: minHeaderSize,\n dictReset: control >= 0xe0,\n stateReset: control >= 0xa0,\n newProps,\n uncompSize,\n compSize,\n },\n };\n }\n\n // Invalid control byte\n throw new Error(`Invalid LZMA2 control byte: 0x${control.toString(16)}`);\n}\n\n/** Result type for hasCompleteChunk with totalSize included on success */\nexport type CompleteChunkResult = { success: true; chunk: Lzma2Chunk; totalSize: number } | { success: false; needBytes: number };\n\n/**\n * Check if we have enough data for the complete chunk (header + data)\n */\nexport function hasCompleteChunk(input: Buffer, offset: number): CompleteChunkResult {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (result.success === false) {\n return { success: false, needBytes: result.needBytes };\n }\n\n const { chunk } = result;\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n const totalSize = chunk.headerSize + dataSize;\n\n if (offset + totalSize > input.length) {\n return { success: false, needBytes: totalSize - (input.length - offset) };\n }\n\n return { success: true, chunk, totalSize };\n}\n"],"names":["parseLzma2ChunkHeader","input","offset","length","success","needBytes","control","chunk","type","headerSize","dictReset","stateReset","newProps","uncompSize","compSize","hasNewProps","minHeaderSize","uncompHigh","propsByte","lc","remainder","lp","pb","Error","toString","hasCompleteChunk","result","dataSize","totalSize"],"mappings":"AAAA;;;;;;;;;;;;;;CAcC,GAED;;CAEC,GAgCD;;;;;;CAMC,GACD,OAAO,SAASA,sBAAsBC,KAAa,EAAEC,MAAc;IACjE,IAAIA,UAAUD,MAAME,MAAM,EAAE;QAC1B,OAAO;YAAEC,SAAS;YAAOC,WAAW;QAAE;IACxC;IAEA,MAAMC,UAAUL,KAAK,CAACC,OAAO;IAE7B,gBAAgB;IAChB,IAAII,YAAY,MAAM;QACpB,OAAO;YACLF,SAAS;YACTG,OAAO;gBACLC,MAAM;gBACNC,YAAY;gBACZC,WAAW;gBACXC,YAAY;gBACZC,UAAU;gBACVC,YAAY;gBACZC,UAAU;YACZ;QACF;IACF;IAEA,qBAAqB;IACrB,IAAIR,YAAY,QAAQA,YAAY,MAAM;QACxC,uCAAuC;QACvC,IAAIJ,SAAS,IAAID,MAAME,MAAM,EAAE;YAC7B,OAAO;gBAAEC,SAAS;gBAAOC,WAAW,IAAKJ,CAAAA,MAAME,MAAM,GAAGD,MAAK;YAAG;QAClE;QAEA,MAAMW,aAAa,AAAC,CAAA,AAACZ,KAAK,CAACC,SAAS,EAAE,IAAI,IAAKD,KAAK,CAACC,SAAS,EAAE,AAAD,IAAK;QAEpE,OAAO;YACLE,SAAS;YACTG,OAAO;gBACLC,MAAM;gBACNC,YAAY;gBACZC,WAAWJ,YAAY;gBACvBK,YAAY;gBACZC,UAAU;gBACVC;gBACAC,UAAU;YACZ;QACF;IACF;IAEA,wBAAwB;IACxB,IAAIR,WAAW,MAAM;QACnB,MAAMS,cAAcT,WAAW;QAC/B,MAAMU,gBAAgBD,cAAc,IAAI,GAAG,0CAA0C;QAErF,IAAIb,SAASc,gBAAgBf,MAAME,MAAM,EAAE;YACzC,OAAO;gBAAEC,SAAS;gBAAOC,WAAWW,gBAAiBf,CAAAA,MAAME,MAAM,GAAGD,MAAK;YAAG;QAC9E;QAEA,cAAc;QACd,MAAMe,aAAaX,UAAU;QAC7B,MAAMO,aAAa,AAAC,CAAA,AAACI,cAAc,KAAOhB,KAAK,CAACC,SAAS,EAAE,IAAI,IAAKD,KAAK,CAACC,SAAS,EAAE,AAAD,IAAK;QACzF,MAAMY,WAAW,AAAC,CAAA,AAACb,KAAK,CAACC,SAAS,EAAE,IAAI,IAAKD,KAAK,CAACC,SAAS,EAAE,AAAD,IAAK;QAElE,8BAA8B;QAC9B,IAAIU,WAAkC;QACtC,IAAIG,aAAa;YACf,MAAMG,YAAYjB,KAAK,CAACC,SAAS,EAAE;YACnC,MAAMiB,KAAKD,YAAY;YACvB,MAAME,YAAY,CAAC,CAAEF,CAAAA,YAAY,CAAA;YACjC,MAAMG,KAAKD,YAAY;YACvB,MAAME,KAAK,CAAC,CAAEF,CAAAA,YAAY,CAAA;YAC1BR,WAAW;gBAAEO;gBAAIE;gBAAIC;YAAG;QAC1B;QAEA,OAAO;YACLlB,SAAS;YACTG,OAAO;gBACLC,MAAM;gBACNC,YAAYO;gBACZN,WAAWJ,WAAW;gBACtBK,YAAYL,WAAW;gBACvBM;gBACAC;gBACAC;YACF;QACF;IACF;IAEA,uBAAuB;IACvB,MAAM,IAAIS,MAAM,CAAC,8BAA8B,EAAEjB,QAAQkB,QAAQ,CAAC,KAAK;AACzE;AAKA;;CAEC,GACD,OAAO,SAASC,iBAAiBxB,KAAa,EAAEC,MAAc;IAC5D,MAAMwB,SAAS1B,sBAAsBC,OAAOC;IAE5C,IAAIwB,OAAOtB,OAAO,KAAK,OAAO;QAC5B,OAAO;YAAEA,SAAS;YAAOC,WAAWqB,OAAOrB,SAAS;QAAC;IACvD;IAEA,MAAM,EAAEE,KAAK,EAAE,GAAGmB;IAClB,MAAMC,WAAWpB,MAAMC,IAAI,KAAK,iBAAiBD,MAAMM,UAAU,GAAGN,MAAMO,QAAQ;IAClF,MAAMc,YAAYrB,MAAME,UAAU,GAAGkB;IAErC,IAAIzB,SAAS0B,YAAY3B,MAAME,MAAM,EAAE;QACrC,OAAO;YAAEC,SAAS;YAAOC,WAAWuB,YAAa3B,CAAAA,MAAME,MAAM,GAAGD,MAAK;QAAG;IAC1E;IAEA,OAAO;QAAEE,SAAS;QAAMG;QAAOqB;IAAU;AAC3C"}
@@ -0,0 +1,31 @@
1
+ /**
2
+ * LZMA Decoder Module
3
+ *
4
+ * Provides both synchronous and streaming LZMA1/LZMA2 decoders.
5
+ *
6
+ * Synchronous API: Use when input is a complete Buffer
7
+ * Streaming API: Use with Transform streams for memory-efficient decompression
8
+ *
9
+ * LZMA1 vs LZMA2:
10
+ * - LZMA2 is chunked and supports true streaming with bounded memory
11
+ * - LZMA1 has no chunk boundaries and requires buffering all input for streaming
12
+ */
13
+ export { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.js';
14
+ export { decodeLzma2, Lzma2Decoder } from './sync/Lzma2Decoder.js';
15
+ export { decodeLzma, LzmaDecoder } from './sync/LzmaDecoder.js';
16
+ export { BitTreeDecoder, RangeDecoder } from './sync/RangeDecoder.js';
17
+ export * from './types.js';
18
+ /**
19
+ * Detect LZMA format from compressed data
20
+ *
21
+ * LZMA2 uses chunk-based framing with control bytes:
22
+ * - 0x00: End of stream
23
+ * - 0x01-0x02: Uncompressed chunks
24
+ * - 0x80-0xFF: LZMA compressed chunks
25
+ *
26
+ * LZMA1 is raw LZMA-compressed data (no framing)
27
+ *
28
+ * @param data - Compressed data to analyze
29
+ * @returns 'lzma1' for LZMA1, 'lzma2' for LZMA2
30
+ */
31
+ export declare function detectLzmaFormat(data: Buffer): 'lzma1' | 'lzma2';
@@ -0,0 +1,44 @@
1
+ /**
2
+ * LZMA Decoder Module
3
+ *
4
+ * Provides both synchronous and streaming LZMA1/LZMA2 decoders.
5
+ *
6
+ * Synchronous API: Use when input is a complete Buffer
7
+ * Streaming API: Use with Transform streams for memory-efficient decompression
8
+ *
9
+ * LZMA1 vs LZMA2:
10
+ * - LZMA2 is chunked and supports true streaming with bounded memory
11
+ * - LZMA1 has no chunk boundaries and requires buffering all input for streaming
12
+ */ // Streaming decoders (Transform streams)
13
+ export { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.js';
14
+ export { decodeLzma2, Lzma2Decoder } from './sync/Lzma2Decoder.js';
15
+ // Synchronous decoders (for Buffer input)
16
+ export { decodeLzma, LzmaDecoder } from './sync/LzmaDecoder.js';
17
+ export { BitTreeDecoder, RangeDecoder } from './sync/RangeDecoder.js';
18
+ // Type exports
19
+ export * from './types.js';
20
+ /**
21
+ * Detect LZMA format from compressed data
22
+ *
23
+ * LZMA2 uses chunk-based framing with control bytes:
24
+ * - 0x00: End of stream
25
+ * - 0x01-0x02: Uncompressed chunks
26
+ * - 0x80-0xFF: LZMA compressed chunks
27
+ *
28
+ * LZMA1 is raw LZMA-compressed data (no framing)
29
+ *
30
+ * @param data - Compressed data to analyze
31
+ * @returns 'lzma1' for LZMA1, 'lzma2' for LZMA2
32
+ */ export function detectLzmaFormat(data) {
33
+ if (data.length === 0) {
34
+ // Default to LZMA2 for empty data (matches LZMA2 decoder behavior)
35
+ return 'lzma2';
36
+ }
37
+ const firstByte = data[0];
38
+ // LZMA2 control bytes: 0x00, 0x01, 0x02, or 0x80-0xFF
39
+ if (firstByte === 0x00 || firstByte === 0x01 || firstByte === 0x02 || firstByte >= 0x80 && firstByte <= 0xff) {
40
+ return 'lzma2';
41
+ }
42
+ // All other values indicate LZMA1 (raw LZMA data)
43
+ return 'lzma1';
44
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/index.ts"],"sourcesContent":["/**\n * LZMA Decoder Module\n *\n * Provides both synchronous and streaming LZMA1/LZMA2 decoders.\n *\n * Synchronous API: Use when input is a complete Buffer\n * Streaming API: Use with Transform streams for memory-efficient decompression\n *\n * LZMA1 vs LZMA2:\n * - LZMA2 is chunked and supports true streaming with bounded memory\n * - LZMA1 has no chunk boundaries and requires buffering all input for streaming\n */\n\n// Streaming decoders (Transform streams)\nexport { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.ts';\nexport { decodeLzma2, Lzma2Decoder } from './sync/Lzma2Decoder.ts';\n// Synchronous decoders (for Buffer input)\nexport { decodeLzma, LzmaDecoder } from './sync/LzmaDecoder.ts';\nexport { BitTreeDecoder, RangeDecoder } from './sync/RangeDecoder.ts';\n// Type exports\nexport * from './types.ts';\n\n/**\n * Detect LZMA format from compressed data\n *\n * LZMA2 uses chunk-based framing with control bytes:\n * - 0x00: End of stream\n * - 0x01-0x02: Uncompressed chunks\n * - 0x80-0xFF: LZMA compressed chunks\n *\n * LZMA1 is raw LZMA-compressed data (no framing)\n *\n * @param data - Compressed data to analyze\n * @returns 'lzma1' for LZMA1, 'lzma2' for LZMA2\n */\nexport function detectLzmaFormat(data: Buffer): 'lzma1' | 'lzma2' {\n if (data.length === 0) {\n // Default to LZMA2 for empty data (matches LZMA2 decoder behavior)\n return 'lzma2';\n }\n\n const firstByte = data[0];\n\n // LZMA2 control bytes: 0x00, 0x01, 0x02, or 0x80-0xFF\n if (firstByte === 0x00 || firstByte === 0x01 || firstByte === 0x02 || (firstByte >= 0x80 && firstByte <= 0xff)) {\n return 'lzma2';\n }\n\n // All other values indicate LZMA1 (raw LZMA data)\n return 'lzma1';\n}\n"],"names":["createLzma2Decoder","createLzmaDecoder","decodeLzma2","Lzma2Decoder","decodeLzma","LzmaDecoder","BitTreeDecoder","RangeDecoder","detectLzmaFormat","data","length","firstByte"],"mappings":"AAAA;;;;;;;;;;;CAWC,GAED,yCAAyC;AACzC,SAASA,kBAAkB,EAAEC,iBAAiB,QAAQ,yBAAyB;AAC/E,SAASC,WAAW,EAAEC,YAAY,QAAQ,yBAAyB;AACnE,0CAA0C;AAC1C,SAASC,UAAU,EAAEC,WAAW,QAAQ,wBAAwB;AAChE,SAASC,cAAc,EAAEC,YAAY,QAAQ,yBAAyB;AACtE,eAAe;AACf,cAAc,aAAa;AAE3B;;;;;;;;;;;;CAYC,GACD,OAAO,SAASC,iBAAiBC,IAAY;IAC3C,IAAIA,KAAKC,MAAM,KAAK,GAAG;QACrB,mEAAmE;QACnE,OAAO;IACT;IAEA,MAAMC,YAAYF,IAAI,CAAC,EAAE;IAEzB,sDAAsD;IACtD,IAAIE,cAAc,QAAQA,cAAc,QAAQA,cAAc,QAASA,aAAa,QAAQA,aAAa,MAAO;QAC9G,OAAO;IACT;IAEA,kDAAkD;IAClD,OAAO;AACT"}
@@ -0,0 +1,46 @@
1
+ /**
2
+ * LZMA Transform Stream Wrappers
3
+ *
4
+ * Provides Transform streams for LZMA1 and LZMA2 decompression.
5
+ *
6
+ * LZMA2 streaming works by buffering until a complete chunk is available,
7
+ * then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max
8
+ * uncompressed), so memory usage is predictable and bounded.
9
+ *
10
+ * Performance Optimization:
11
+ * - Uses OutputSink pattern for zero-copy output during decode
12
+ * - Each decoded byte written directly to stream (not buffered then copied)
13
+ * - ~4x faster than previous buffering approach
14
+ *
15
+ * True byte-by-byte async LZMA streaming would require rewriting the entire
16
+ * decoder with continuation-passing style, which is complex and not worth
17
+ * the effort given LZMA2's chunked format.
18
+ */
19
+ import { Transform } from 'extract-base-iterator';
20
+ /**
21
+ * Create an LZMA2 decoder Transform stream
22
+ *
23
+ * This is a streaming decoder that processes LZMA2 chunks incrementally.
24
+ * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).
25
+ *
26
+ * @param properties - 1-byte LZMA2 properties (dictionary size)
27
+ * @returns Transform stream that decompresses LZMA2 data
28
+ */
29
+ export declare function createLzma2Decoder(properties: Buffer | Uint8Array): InstanceType<typeof Transform>;
30
+ /**
31
+ * Create an LZMA1 decoder Transform stream
32
+ *
33
+ * Note: LZMA1 has no chunk boundaries, so this requires knowing the
34
+ * uncompressed size upfront. The stream buffers all input, then
35
+ * decompresses when complete.
36
+ *
37
+ * For true streaming, use LZMA2 which has built-in chunking.
38
+ *
39
+ * Optimization: Pre-allocates input buffer and copies chunks once,
40
+ * avoiding the double-buffering of Buffer.concat().
41
+ *
42
+ * @param properties - 5-byte LZMA properties
43
+ * @param unpackSize - Expected uncompressed size
44
+ * @returns Transform stream that decompresses LZMA1 data
45
+ */
46
+ export declare function createLzmaDecoder(properties: Buffer | Uint8Array, unpackSize: number): InstanceType<typeof Transform>;