xz-compat 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. package/LICENSE +42 -0
  2. package/README.md +248 -0
  3. package/dist/cjs/compat.d.cts +1 -0
  4. package/dist/cjs/compat.d.ts +1 -0
  5. package/dist/cjs/compat.js +23 -0
  6. package/dist/cjs/compat.js.map +1 -0
  7. package/dist/cjs/filters/bcj/Bcj.d.cts +16 -0
  8. package/dist/cjs/filters/bcj/Bcj.d.ts +16 -0
  9. package/dist/cjs/filters/bcj/Bcj.js +192 -0
  10. package/dist/cjs/filters/bcj/Bcj.js.map +1 -0
  11. package/dist/cjs/filters/bcj/BcjArm.d.cts +16 -0
  12. package/dist/cjs/filters/bcj/BcjArm.d.ts +16 -0
  13. package/dist/cjs/filters/bcj/BcjArm.js +122 -0
  14. package/dist/cjs/filters/bcj/BcjArm.js.map +1 -0
  15. package/dist/cjs/filters/bcj/BcjArm64.d.cts +21 -0
  16. package/dist/cjs/filters/bcj/BcjArm64.d.ts +21 -0
  17. package/dist/cjs/filters/bcj/BcjArm64.js +65 -0
  18. package/dist/cjs/filters/bcj/BcjArm64.js.map +1 -0
  19. package/dist/cjs/filters/bcj/BcjArmt.d.cts +19 -0
  20. package/dist/cjs/filters/bcj/BcjArmt.d.ts +19 -0
  21. package/dist/cjs/filters/bcj/BcjArmt.js +76 -0
  22. package/dist/cjs/filters/bcj/BcjArmt.js.map +1 -0
  23. package/dist/cjs/filters/bcj/BcjIa64.d.cts +15 -0
  24. package/dist/cjs/filters/bcj/BcjIa64.d.ts +15 -0
  25. package/dist/cjs/filters/bcj/BcjIa64.js +141 -0
  26. package/dist/cjs/filters/bcj/BcjIa64.js.map +1 -0
  27. package/dist/cjs/filters/bcj/BcjPpc.d.cts +20 -0
  28. package/dist/cjs/filters/bcj/BcjPpc.d.ts +20 -0
  29. package/dist/cjs/filters/bcj/BcjPpc.js +64 -0
  30. package/dist/cjs/filters/bcj/BcjPpc.js.map +1 -0
  31. package/dist/cjs/filters/bcj/BcjSparc.d.cts +19 -0
  32. package/dist/cjs/filters/bcj/BcjSparc.d.ts +19 -0
  33. package/dist/cjs/filters/bcj/BcjSparc.js +69 -0
  34. package/dist/cjs/filters/bcj/BcjSparc.js.map +1 -0
  35. package/dist/cjs/filters/delta/Delta.d.cts +16 -0
  36. package/dist/cjs/filters/delta/Delta.d.ts +16 -0
  37. package/dist/cjs/filters/delta/Delta.js +74 -0
  38. package/dist/cjs/filters/delta/Delta.js.map +1 -0
  39. package/dist/cjs/filters/index.d.cts +8 -0
  40. package/dist/cjs/filters/index.d.ts +8 -0
  41. package/dist/cjs/filters/index.js +27 -0
  42. package/dist/cjs/filters/index.js.map +1 -0
  43. package/dist/cjs/index.d.cts +4 -0
  44. package/dist/cjs/index.d.ts +4 -0
  45. package/dist/cjs/index.js +58 -0
  46. package/dist/cjs/index.js.map +1 -0
  47. package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
  48. package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
  49. package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
  50. package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
  51. package/dist/cjs/lzma/index.d.cts +31 -0
  52. package/dist/cjs/lzma/index.d.ts +31 -0
  53. package/dist/cjs/lzma/index.js +83 -0
  54. package/dist/cjs/lzma/index.js.map +1 -0
  55. package/dist/cjs/lzma/stream/transforms.d.cts +46 -0
  56. package/dist/cjs/lzma/stream/transforms.d.ts +46 -0
  57. package/dist/cjs/lzma/stream/transforms.js +193 -0
  58. package/dist/cjs/lzma/stream/transforms.js.map +1 -0
  59. package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +63 -0
  60. package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +63 -0
  61. package/dist/cjs/lzma/sync/Lzma2Decoder.js +231 -0
  62. package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
  63. package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +97 -0
  64. package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +97 -0
  65. package/dist/cjs/lzma/sync/LzmaDecoder.js +582 -0
  66. package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
  67. package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
  68. package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
  69. package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
  70. package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
  71. package/dist/cjs/lzma/types.d.cts +117 -0
  72. package/dist/cjs/lzma/types.d.ts +117 -0
  73. package/dist/cjs/lzma/types.js +264 -0
  74. package/dist/cjs/lzma/types.js.map +1 -0
  75. package/dist/cjs/package.json +1 -0
  76. package/dist/cjs/utils/createBufferingDecoder.d.cts +10 -0
  77. package/dist/cjs/utils/createBufferingDecoder.d.ts +10 -0
  78. package/dist/cjs/utils/createBufferingDecoder.js +41 -0
  79. package/dist/cjs/utils/createBufferingDecoder.js.map +1 -0
  80. package/dist/cjs/xz/Decoder.d.cts +21 -0
  81. package/dist/cjs/xz/Decoder.d.ts +21 -0
  82. package/dist/cjs/xz/Decoder.js +325 -0
  83. package/dist/cjs/xz/Decoder.js.map +1 -0
  84. package/dist/esm/compat.d.ts +1 -0
  85. package/dist/esm/compat.js +7 -0
  86. package/dist/esm/compat.js.map +1 -0
  87. package/dist/esm/filters/bcj/Bcj.d.ts +16 -0
  88. package/dist/esm/filters/bcj/Bcj.js +184 -0
  89. package/dist/esm/filters/bcj/Bcj.js.map +1 -0
  90. package/dist/esm/filters/bcj/BcjArm.d.ts +16 -0
  91. package/dist/esm/filters/bcj/BcjArm.js +114 -0
  92. package/dist/esm/filters/bcj/BcjArm.js.map +1 -0
  93. package/dist/esm/filters/bcj/BcjArm64.d.ts +21 -0
  94. package/dist/esm/filters/bcj/BcjArm64.js +57 -0
  95. package/dist/esm/filters/bcj/BcjArm64.js.map +1 -0
  96. package/dist/esm/filters/bcj/BcjArmt.d.ts +19 -0
  97. package/dist/esm/filters/bcj/BcjArmt.js +66 -0
  98. package/dist/esm/filters/bcj/BcjArmt.js.map +1 -0
  99. package/dist/esm/filters/bcj/BcjIa64.d.ts +15 -0
  100. package/dist/esm/filters/bcj/BcjIa64.js +127 -0
  101. package/dist/esm/filters/bcj/BcjIa64.js.map +1 -0
  102. package/dist/esm/filters/bcj/BcjPpc.d.ts +20 -0
  103. package/dist/esm/filters/bcj/BcjPpc.js +55 -0
  104. package/dist/esm/filters/bcj/BcjPpc.js.map +1 -0
  105. package/dist/esm/filters/bcj/BcjSparc.d.ts +19 -0
  106. package/dist/esm/filters/bcj/BcjSparc.js +59 -0
  107. package/dist/esm/filters/bcj/BcjSparc.js.map +1 -0
  108. package/dist/esm/filters/delta/Delta.d.ts +16 -0
  109. package/dist/esm/filters/delta/Delta.js +66 -0
  110. package/dist/esm/filters/delta/Delta.js.map +1 -0
  111. package/dist/esm/filters/index.d.ts +8 -0
  112. package/dist/esm/filters/index.js +9 -0
  113. package/dist/esm/filters/index.js.map +1 -0
  114. package/dist/esm/index.d.ts +4 -0
  115. package/dist/esm/index.js +5 -0
  116. package/dist/esm/index.js.map +1 -0
  117. package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
  118. package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
  119. package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
  120. package/dist/esm/lzma/index.d.ts +31 -0
  121. package/dist/esm/lzma/index.js +44 -0
  122. package/dist/esm/lzma/index.js.map +1 -0
  123. package/dist/esm/lzma/stream/transforms.d.ts +46 -0
  124. package/dist/esm/lzma/stream/transforms.js +190 -0
  125. package/dist/esm/lzma/stream/transforms.js.map +1 -0
  126. package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +63 -0
  127. package/dist/esm/lzma/sync/Lzma2Decoder.js +211 -0
  128. package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
  129. package/dist/esm/lzma/sync/LzmaDecoder.d.ts +97 -0
  130. package/dist/esm/lzma/sync/LzmaDecoder.js +545 -0
  131. package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
  132. package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
  133. package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
  134. package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
  135. package/dist/esm/lzma/types.d.ts +117 -0
  136. package/dist/esm/lzma/types.js +154 -0
  137. package/dist/esm/lzma/types.js.map +1 -0
  138. package/dist/esm/package.json +1 -0
  139. package/dist/esm/utils/createBufferingDecoder.d.ts +10 -0
  140. package/dist/esm/utils/createBufferingDecoder.js +30 -0
  141. package/dist/esm/utils/createBufferingDecoder.js.map +1 -0
  142. package/dist/esm/xz/Decoder.d.ts +21 -0
  143. package/dist/esm/xz/Decoder.js +313 -0
  144. package/dist/esm/xz/Decoder.js.map +1 -0
  145. package/package.json +75 -0
@@ -0,0 +1,148 @@
1
+ /**
2
+ * LZMA2 Chunk Parser
3
+ *
4
+ * Shared parsing logic for LZMA2 chunk headers.
5
+ * Used by both synchronous and streaming decoders.
6
+ *
7
+ * LZMA2 control byte ranges:
8
+ * 0x00 = End of stream
9
+ * 0x01 = Uncompressed chunk, dictionary reset
10
+ * 0x02 = Uncompressed chunk, no dictionary reset
11
+ * 0x80-0x9F = LZMA chunk, no reset (solid mode)
12
+ * 0xA0-0xBF = LZMA chunk, reset state (probabilities)
13
+ * 0xC0-0xDF = LZMA chunk, reset state + new properties
14
+ * 0xE0-0xFF = LZMA chunk, reset dictionary + state + new properties
15
+ */ /**
16
+ * LZMA properties extracted from chunk header
17
+ */ "use strict";
18
+ Object.defineProperty(exports, "__esModule", {
19
+ value: true
20
+ });
21
+ function _export(target, all) {
22
+ for(var name in all)Object.defineProperty(target, name, {
23
+ enumerable: true,
24
+ get: Object.getOwnPropertyDescriptor(all, name).get
25
+ });
26
+ }
27
+ _export(exports, {
28
+ get hasCompleteChunk () {
29
+ return hasCompleteChunk;
30
+ },
31
+ get parseLzma2ChunkHeader () {
32
+ return parseLzma2ChunkHeader;
33
+ }
34
+ });
35
+ function parseLzma2ChunkHeader(input, offset) {
36
+ if (offset >= input.length) {
37
+ return {
38
+ success: false,
39
+ needBytes: 1
40
+ };
41
+ }
42
+ var control = input[offset];
43
+ // End of stream
44
+ if (control === 0x00) {
45
+ return {
46
+ success: true,
47
+ chunk: {
48
+ type: 'end',
49
+ headerSize: 1,
50
+ dictReset: false,
51
+ stateReset: false,
52
+ newProps: null,
53
+ uncompSize: 0,
54
+ compSize: 0
55
+ }
56
+ };
57
+ }
58
+ // Uncompressed chunk
59
+ if (control === 0x01 || control === 0x02) {
60
+ // Need 3 bytes: control + 2 size bytes
61
+ if (offset + 3 > input.length) {
62
+ return {
63
+ success: false,
64
+ needBytes: 3 - (input.length - offset)
65
+ };
66
+ }
67
+ var uncompSize = (input[offset + 1] << 8 | input[offset + 2]) + 1;
68
+ return {
69
+ success: true,
70
+ chunk: {
71
+ type: 'uncompressed',
72
+ headerSize: 3,
73
+ dictReset: control === 0x01,
74
+ stateReset: false,
75
+ newProps: null,
76
+ uncompSize: uncompSize,
77
+ compSize: 0
78
+ }
79
+ };
80
+ }
81
+ // LZMA compressed chunk
82
+ if (control >= 0x80) {
83
+ var hasNewProps = control >= 0xc0;
84
+ var minHeaderSize = hasNewProps ? 6 : 5; // control + 2 uncomp + 2 comp + (1 props)
85
+ if (offset + minHeaderSize > input.length) {
86
+ return {
87
+ success: false,
88
+ needBytes: minHeaderSize - (input.length - offset)
89
+ };
90
+ }
91
+ // Parse sizes
92
+ var uncompHigh = control & 0x1f;
93
+ var uncompSize1 = (uncompHigh << 16 | input[offset + 1] << 8 | input[offset + 2]) + 1;
94
+ var compSize = (input[offset + 3] << 8 | input[offset + 4]) + 1;
95
+ // Parse properties if present
96
+ var newProps = null;
97
+ if (hasNewProps) {
98
+ var propsByte = input[offset + 5];
99
+ var lc = propsByte % 9;
100
+ var remainder = ~~(propsByte / 9);
101
+ var lp = remainder % 5;
102
+ var pb = ~~(remainder / 5);
103
+ newProps = {
104
+ lc: lc,
105
+ lp: lp,
106
+ pb: pb
107
+ };
108
+ }
109
+ return {
110
+ success: true,
111
+ chunk: {
112
+ type: 'lzma',
113
+ headerSize: minHeaderSize,
114
+ dictReset: control >= 0xe0,
115
+ stateReset: control >= 0xa0,
116
+ newProps: newProps,
117
+ uncompSize: uncompSize1,
118
+ compSize: compSize
119
+ }
120
+ };
121
+ }
122
+ // Invalid control byte
123
+ throw new Error("Invalid LZMA2 control byte: 0x".concat(control.toString(16)));
124
+ }
125
+ function hasCompleteChunk(input, offset) {
126
+ var result = parseLzma2ChunkHeader(input, offset);
127
+ if (result.success === false) {
128
+ return {
129
+ success: false,
130
+ needBytes: result.needBytes
131
+ };
132
+ }
133
+ var chunk = result.chunk;
134
+ var dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;
135
+ var totalSize = chunk.headerSize + dataSize;
136
+ if (offset + totalSize > input.length) {
137
+ return {
138
+ success: false,
139
+ needBytes: totalSize - (input.length - offset)
140
+ };
141
+ }
142
+ return {
143
+ success: true,
144
+ chunk: chunk,
145
+ totalSize: totalSize
146
+ };
147
+ }
148
+ /* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/Lzma2ChunkParser.ts"],"sourcesContent":["/**\n * LZMA2 Chunk Parser\n *\n * Shared parsing logic for LZMA2 chunk headers.\n * Used by both synchronous and streaming decoders.\n *\n * LZMA2 control byte ranges:\n * 0x00 = End of stream\n * 0x01 = Uncompressed chunk, dictionary reset\n * 0x02 = Uncompressed chunk, no dictionary reset\n * 0x80-0x9F = LZMA chunk, no reset (solid mode)\n * 0xA0-0xBF = LZMA chunk, reset state (probabilities)\n * 0xC0-0xDF = LZMA chunk, reset state + new properties\n * 0xE0-0xFF = LZMA chunk, reset dictionary + state + new properties\n */\n\n/**\n * LZMA properties extracted from chunk header\n */\nexport interface LzmaChunkProps {\n lc: number;\n lp: number;\n pb: number;\n}\n\n/**\n * Parsed LZMA2 chunk information\n */\nexport interface Lzma2Chunk {\n /** Chunk type */\n type: 'end' | 'uncompressed' | 'lzma';\n /** Total bytes consumed by header (including control byte) */\n headerSize: number;\n /** Whether to reset dictionary */\n dictReset: boolean;\n /** Whether to reset state/probabilities */\n stateReset: boolean;\n /** New LZMA properties (only for control >= 0xC0) */\n newProps: LzmaChunkProps | null;\n /** Uncompressed data size */\n uncompSize: number;\n /** Compressed data size (0 for uncompressed chunks) */\n compSize: number;\n}\n\n/**\n * Result of parsing attempt\n */\nexport type ParseResult = { success: true; chunk: Lzma2Chunk } | { success: false; needBytes: number };\n\n/**\n * Parse an LZMA2 chunk header\n *\n * @param input - Input buffer\n * @param offset - Offset to start parsing\n * @returns Parsed chunk info or number of bytes needed\n */\nexport function parseLzma2ChunkHeader(input: Buffer, offset: number): ParseResult {\n if (offset >= input.length) {\n return { success: false, needBytes: 1 };\n }\n\n const control = input[offset];\n\n // End of stream\n if (control === 0x00) {\n return {\n success: true,\n chunk: {\n type: 'end',\n headerSize: 1,\n dictReset: false,\n stateReset: false,\n newProps: null,\n uncompSize: 0,\n compSize: 0,\n },\n };\n }\n\n // Uncompressed chunk\n if (control === 0x01 || control === 0x02) {\n // Need 3 bytes: control + 2 size bytes\n if (offset + 3 > input.length) {\n return { success: false, needBytes: 3 - (input.length - offset) };\n }\n\n const uncompSize = ((input[offset + 1] << 8) | input[offset + 2]) + 1;\n\n return {\n success: true,\n chunk: {\n type: 'uncompressed',\n headerSize: 3,\n dictReset: control === 0x01,\n stateReset: false,\n newProps: null,\n uncompSize,\n compSize: 0,\n },\n };\n }\n\n // LZMA compressed chunk\n if (control >= 0x80) {\n const hasNewProps = control >= 0xc0;\n const minHeaderSize = hasNewProps ? 6 : 5; // control + 2 uncomp + 2 comp + (1 props)\n\n if (offset + minHeaderSize > input.length) {\n return { success: false, needBytes: minHeaderSize - (input.length - offset) };\n }\n\n // Parse sizes\n const uncompHigh = control & 0x1f;\n const uncompSize = ((uncompHigh << 16) | (input[offset + 1] << 8) | input[offset + 2]) + 1;\n const compSize = ((input[offset + 3] << 8) | input[offset + 4]) + 1;\n\n // Parse properties if present\n let newProps: LzmaChunkProps | null = null;\n if (hasNewProps) {\n const propsByte = input[offset + 5];\n const lc = propsByte % 9;\n const remainder = ~~(propsByte / 9);\n const lp = remainder % 5;\n const pb = ~~(remainder / 5);\n newProps = { lc, lp, pb };\n }\n\n return {\n success: true,\n chunk: {\n type: 'lzma',\n headerSize: minHeaderSize,\n dictReset: control >= 0xe0,\n stateReset: control >= 0xa0,\n newProps,\n uncompSize,\n compSize,\n },\n };\n }\n\n // Invalid control byte\n throw new Error(`Invalid LZMA2 control byte: 0x${control.toString(16)}`);\n}\n\n/** Result type for hasCompleteChunk with totalSize included on success */\nexport type CompleteChunkResult = { success: true; chunk: Lzma2Chunk; totalSize: number } | { success: false; needBytes: number };\n\n/**\n * Check if we have enough data for the complete chunk (header + data)\n */\nexport function hasCompleteChunk(input: Buffer, offset: number): CompleteChunkResult {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (result.success === false) {\n return { success: false, needBytes: result.needBytes };\n }\n\n const { chunk } = result;\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n const totalSize = chunk.headerSize + dataSize;\n\n if (offset + totalSize > input.length) {\n return { success: false, needBytes: totalSize - (input.length - offset) };\n }\n\n return { success: true, chunk, totalSize };\n}\n"],"names":["hasCompleteChunk","parseLzma2ChunkHeader","input","offset","length","success","needBytes","control","chunk","type","headerSize","dictReset","stateReset","newProps","uncompSize","compSize","hasNewProps","minHeaderSize","uncompHigh","propsByte","lc","remainder","lp","pb","Error","toString","result","dataSize","totalSize"],"mappings":"AAAA;;;;;;;;;;;;;;CAcC,GAED;;CAEC;;;;;;;;;;;QAsIeA;eAAAA;;QA/FAC;eAAAA;;;AAAT,SAASA,sBAAsBC,KAAa,EAAEC,MAAc;IACjE,IAAIA,UAAUD,MAAME,MAAM,EAAE;QAC1B,OAAO;YAAEC,SAAS;YAAOC,WAAW;QAAE;IACxC;IAEA,IAAMC,UAAUL,KAAK,CAACC,OAAO;IAE7B,gBAAgB;IAChB,IAAII,YAAY,MAAM;QACpB,OAAO;YACLF,SAAS;YACTG,OAAO;gBACLC,MAAM;gBACNC,YAAY;gBACZC,WAAW;gBACXC,YAAY;gBACZC,UAAU;gBACVC,YAAY;gBACZC,UAAU;YACZ;QACF;IACF;IAEA,qBAAqB;IACrB,IAAIR,YAAY,QAAQA,YAAY,MAAM;QACxC,uCAAuC;QACvC,IAAIJ,SAAS,IAAID,MAAME,MAAM,EAAE;YAC7B,OAAO;gBAAEC,SAAS;gBAAOC,WAAW,IAAKJ,CAAAA,MAAME,MAAM,GAAGD,MAAK;YAAG;QAClE;QAEA,IAAMW,aAAa,AAAC,CAAA,AAACZ,KAAK,CAACC,SAAS,EAAE,IAAI,IAAKD,KAAK,CAACC,SAAS,EAAE,AAAD,IAAK;QAEpE,OAAO;YACLE,SAAS;YACTG,OAAO;gBACLC,MAAM;gBACNC,YAAY;gBACZC,WAAWJ,YAAY;gBACvBK,YAAY;gBACZC,UAAU;gBACVC,YAAAA;gBACAC,UAAU;YACZ;QACF;IACF;IAEA,wBAAwB;IACxB,IAAIR,WAAW,MAAM;QACnB,IAAMS,cAAcT,WAAW;QAC/B,IAAMU,gBAAgBD,cAAc,IAAI,GAAG,0CAA0C;QAErF,IAAIb,SAASc,gBAAgBf,MAAME,MAAM,EAAE;YACzC,OAAO;gBAAEC,SAAS;gBAAOC,WAAWW,gBAAiBf,CAAAA,MAAME,MAAM,GAAGD,MAAK;YAAG;QAC9E;QAEA,cAAc;QACd,IAAMe,aAAaX,UAAU;QAC7B,IAAMO,cAAa,AAAC,CAAA,AAACI,cAAc,KAAOhB,KAAK,CAACC,SAAS,EAAE,IAAI,IAAKD,KAAK,CAACC,SAAS,EAAE,AAAD,IAAK;QACzF,IAAMY,WAAW,AAAC,CAAA,AAACb,KAAK,CAACC,SAAS,EAAE,IAAI,IAAKD,KAAK,CAACC,SAAS,EAAE,AAAD,IAAK;QAElE,8BAA8B;QAC9B,IAAIU,WAAkC;QACtC,IAAIG,aAAa;YACf,IAAMG,YAAYjB,KAAK,CAACC,SAAS,EAAE;YACnC,IAAMiB,KAAKD,YAAY;YACvB,IAAME,YAAY,CAAC,CAAEF,CAAAA,YAAY,CAAA;YACjC,IAAMG,KAAKD,YAAY;YACvB,IAAME,KAAK,CAAC,CAAEF,CAAAA,YAAY,CAAA;YAC1BR,WAAW;gBAAEO,IAAAA;gBAAIE,IAAAA;gBAAIC,IAAAA;YAAG;QAC1B;QAEA,OAAO;YACLlB,SAAS;YACTG,OAAO;gBACLC,MAAM;gBACNC,YAAYO;gBACZN,WAAWJ,WAAW;gBACtBK,YAAYL,WAAW;gBACvBM,UAAAA;gBACAC,YAAAA;gBACAC,UAAAA;YACF;QACF;IACF;IAEA,uBAAuB;IACvB,MAAM,IAAIS,MAAM,AAAC,iCAAqD,OAArBjB,QAAQkB,QAAQ,CAAC;AACpE;AAQO,SAASzB,iBAAiBE,KAAa,EAAEC,MAAc;IAC5D,IAAMuB,SAASzB,sBAAsBC,OAAOC;IAE5C,IAAIuB,OAAOrB,OAAO,KAAK,OAAO;QAC5B,OAAO;YAAEA,SAAS;YAAOC,WAAWoB,OAAOpB,SAAS;QAAC;IACvD;IAEA,IAAM,AAAEE,QAAUkB,OAAVlB;IACR,IAAMmB,WAAWnB,MAAMC,IAAI,KAAK,iBAAiBD,MAAMM,UAAU,GAAGN,MAAMO,QAAQ;IAClF,IAAMa,YAAYpB,MAAME,UAAU,GAAGiB;IAErC,IAAIxB,SAASyB,YAAY1B,MAAME,MAAM,EAAE;QACrC,OAAO;YAAEC,SAAS;YAAOC,WAAWsB,YAAa1B,CAAAA,MAAME,MAAM,GAAGD,MAAK;QAAG;IAC1E;IAEA,OAAO;QAAEE,SAAS;QAAMG,OAAAA;QAAOoB,WAAAA;IAAU;AAC3C"}
@@ -0,0 +1,31 @@
1
+ /**
2
+ * LZMA Decoder Module
3
+ *
4
+ * Provides both synchronous and streaming LZMA1/LZMA2 decoders.
5
+ *
6
+ * Synchronous API: Use when input is a complete Buffer
7
+ * Streaming API: Use with Transform streams for memory-efficient decompression
8
+ *
9
+ * LZMA1 vs LZMA2:
10
+ * - LZMA2 is chunked and supports true streaming with bounded memory
11
+ * - LZMA1 has no chunk boundaries and requires buffering all input for streaming
12
+ */
13
+ export { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.js';
14
+ export { decodeLzma2, Lzma2Decoder } from './sync/Lzma2Decoder.js';
15
+ export { decodeLzma, LzmaDecoder } from './sync/LzmaDecoder.js';
16
+ export { BitTreeDecoder, RangeDecoder } from './sync/RangeDecoder.js';
17
+ export * from './types.js';
18
+ /**
19
+ * Detect LZMA format from compressed data
20
+ *
21
+ * LZMA2 uses chunk-based framing with control bytes:
22
+ * - 0x00: End of stream
23
+ * - 0x01-0x02: Uncompressed chunks
24
+ * - 0x80-0xFF: LZMA compressed chunks
25
+ *
26
+ * LZMA1 is raw LZMA-compressed data (no framing)
27
+ *
28
+ * @param data - Compressed data to analyze
29
+ * @returns 'lzma1' for LZMA1, 'lzma2' for LZMA2
30
+ */
31
+ export declare function detectLzmaFormat(data: Buffer): 'lzma1' | 'lzma2';
@@ -0,0 +1,31 @@
1
+ /**
2
+ * LZMA Decoder Module
3
+ *
4
+ * Provides both synchronous and streaming LZMA1/LZMA2 decoders.
5
+ *
6
+ * Synchronous API: Use when input is a complete Buffer
7
+ * Streaming API: Use with Transform streams for memory-efficient decompression
8
+ *
9
+ * LZMA1 vs LZMA2:
10
+ * - LZMA2 is chunked and supports true streaming with bounded memory
11
+ * - LZMA1 has no chunk boundaries and requires buffering all input for streaming
12
+ */
13
+ export { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.js';
14
+ export { decodeLzma2, Lzma2Decoder } from './sync/Lzma2Decoder.js';
15
+ export { decodeLzma, LzmaDecoder } from './sync/LzmaDecoder.js';
16
+ export { BitTreeDecoder, RangeDecoder } from './sync/RangeDecoder.js';
17
+ export * from './types.js';
18
+ /**
19
+ * Detect LZMA format from compressed data
20
+ *
21
+ * LZMA2 uses chunk-based framing with control bytes:
22
+ * - 0x00: End of stream
23
+ * - 0x01-0x02: Uncompressed chunks
24
+ * - 0x80-0xFF: LZMA compressed chunks
25
+ *
26
+ * LZMA1 is raw LZMA-compressed data (no framing)
27
+ *
28
+ * @param data - Compressed data to analyze
29
+ * @returns 'lzma1' for LZMA1, 'lzma2' for LZMA2
30
+ */
31
+ export declare function detectLzmaFormat(data: Buffer): 'lzma1' | 'lzma2';
@@ -0,0 +1,83 @@
1
+ /**
2
+ * LZMA Decoder Module
3
+ *
4
+ * Provides both synchronous and streaming LZMA1/LZMA2 decoders.
5
+ *
6
+ * Synchronous API: Use when input is a complete Buffer
7
+ * Streaming API: Use with Transform streams for memory-efficient decompression
8
+ *
9
+ * LZMA1 vs LZMA2:
10
+ * - LZMA2 is chunked and supports true streaming with bounded memory
11
+ * - LZMA1 has no chunk boundaries and requires buffering all input for streaming
12
+ */ // Streaming decoders (Transform streams)
13
+ "use strict";
14
+ Object.defineProperty(exports, "__esModule", {
15
+ value: true
16
+ });
17
+ function _export(target, all) {
18
+ for(var name in all)Object.defineProperty(target, name, {
19
+ enumerable: true,
20
+ get: Object.getOwnPropertyDescriptor(all, name).get
21
+ });
22
+ }
23
+ _export(exports, {
24
+ get BitTreeDecoder () {
25
+ return _RangeDecoderts.BitTreeDecoder;
26
+ },
27
+ get Lzma2Decoder () {
28
+ return _Lzma2Decoderts.Lzma2Decoder;
29
+ },
30
+ get LzmaDecoder () {
31
+ return _LzmaDecoderts.LzmaDecoder;
32
+ },
33
+ get RangeDecoder () {
34
+ return _RangeDecoderts.RangeDecoder;
35
+ },
36
+ get createLzma2Decoder () {
37
+ return _transformsts.createLzma2Decoder;
38
+ },
39
+ get createLzmaDecoder () {
40
+ return _transformsts.createLzmaDecoder;
41
+ },
42
+ get decodeLzma () {
43
+ return _LzmaDecoderts.decodeLzma;
44
+ },
45
+ get decodeLzma2 () {
46
+ return _Lzma2Decoderts.decodeLzma2;
47
+ },
48
+ get detectLzmaFormat () {
49
+ return detectLzmaFormat;
50
+ }
51
+ });
52
+ var _transformsts = require("./stream/transforms.js");
53
+ var _Lzma2Decoderts = require("./sync/Lzma2Decoder.js");
54
+ var _LzmaDecoderts = require("./sync/LzmaDecoder.js");
55
+ var _RangeDecoderts = require("./sync/RangeDecoder.js");
56
+ _export_star(require("./types.js"), exports);
57
+ function _export_star(from, to) {
58
+ Object.keys(from).forEach(function(k) {
59
+ if (k !== "default" && !Object.prototype.hasOwnProperty.call(to, k)) {
60
+ Object.defineProperty(to, k, {
61
+ enumerable: true,
62
+ get: function() {
63
+ return from[k];
64
+ }
65
+ });
66
+ }
67
+ });
68
+ return from;
69
+ }
70
+ function detectLzmaFormat(data) {
71
+ if (data.length === 0) {
72
+ // Default to LZMA2 for empty data (matches LZMA2 decoder behavior)
73
+ return 'lzma2';
74
+ }
75
+ var firstByte = data[0];
76
+ // LZMA2 control bytes: 0x00, 0x01, 0x02, or 0x80-0xFF
77
+ if (firstByte === 0x00 || firstByte === 0x01 || firstByte === 0x02 || firstByte >= 0x80 && firstByte <= 0xff) {
78
+ return 'lzma2';
79
+ }
80
+ // All other values indicate LZMA1 (raw LZMA data)
81
+ return 'lzma1';
82
+ }
83
+ /* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/index.ts"],"sourcesContent":["/**\n * LZMA Decoder Module\n *\n * Provides both synchronous and streaming LZMA1/LZMA2 decoders.\n *\n * Synchronous API: Use when input is a complete Buffer\n * Streaming API: Use with Transform streams for memory-efficient decompression\n *\n * LZMA1 vs LZMA2:\n * - LZMA2 is chunked and supports true streaming with bounded memory\n * - LZMA1 has no chunk boundaries and requires buffering all input for streaming\n */\n\n// Streaming decoders (Transform streams)\nexport { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.ts';\nexport { decodeLzma2, Lzma2Decoder } from './sync/Lzma2Decoder.ts';\n// Synchronous decoders (for Buffer input)\nexport { decodeLzma, LzmaDecoder } from './sync/LzmaDecoder.ts';\nexport { BitTreeDecoder, RangeDecoder } from './sync/RangeDecoder.ts';\n// Type exports\nexport * from './types.ts';\n\n/**\n * Detect LZMA format from compressed data\n *\n * LZMA2 uses chunk-based framing with control bytes:\n * - 0x00: End of stream\n * - 0x01-0x02: Uncompressed chunks\n * - 0x80-0xFF: LZMA compressed chunks\n *\n * LZMA1 is raw LZMA-compressed data (no framing)\n *\n * @param data - Compressed data to analyze\n * @returns 'lzma1' for LZMA1, 'lzma2' for LZMA2\n */\nexport function detectLzmaFormat(data: Buffer): 'lzma1' | 'lzma2' {\n if (data.length === 0) {\n // Default to LZMA2 for empty data (matches LZMA2 decoder behavior)\n return 'lzma2';\n }\n\n const firstByte = data[0];\n\n // LZMA2 control bytes: 0x00, 0x01, 0x02, or 0x80-0xFF\n if (firstByte === 0x00 || firstByte === 0x01 || firstByte === 0x02 || (firstByte >= 0x80 && firstByte <= 0xff)) {\n return 'lzma2';\n }\n\n // All other values indicate LZMA1 (raw LZMA data)\n return 'lzma1';\n}\n"],"names":["BitTreeDecoder","Lzma2Decoder","LzmaDecoder","RangeDecoder","createLzma2Decoder","createLzmaDecoder","decodeLzma","decodeLzma2","detectLzmaFormat","data","length","firstByte"],"mappings":"AAAA;;;;;;;;;;;CAWC,GAED,yCAAyC;;;;;;;;;;;;QAKhCA;eAAAA,8BAAc;;QAHDC;eAAAA,4BAAY;;QAEbC;eAAAA,0BAAW;;QACPC;eAAAA,4BAAY;;QAJ5BC;eAAAA,gCAAkB;;QAAEC;eAAAA,+BAAiB;;QAGrCC;eAAAA,yBAAU;;QAFVC;eAAAA,2BAAW;;QAoBJC;eAAAA;;;4BArBsC;8BACZ;6BAEF;8BACK;qBAE/B;;;;;;;;;;;;;;AAeP,SAASA,iBAAiBC,IAAY;IAC3C,IAAIA,KAAKC,MAAM,KAAK,GAAG;QACrB,mEAAmE;QACnE,OAAO;IACT;IAEA,IAAMC,YAAYF,IAAI,CAAC,EAAE;IAEzB,sDAAsD;IACtD,IAAIE,cAAc,QAAQA,cAAc,QAAQA,cAAc,QAASA,aAAa,QAAQA,aAAa,MAAO;QAC9G,OAAO;IACT;IAEA,kDAAkD;IAClD,OAAO;AACT"}
@@ -0,0 +1,46 @@
1
+ /**
2
+ * LZMA Transform Stream Wrappers
3
+ *
4
+ * Provides Transform streams for LZMA1 and LZMA2 decompression.
5
+ *
6
+ * LZMA2 streaming works by buffering until a complete chunk is available,
7
+ * then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max
8
+ * uncompressed), so memory usage is predictable and bounded.
9
+ *
10
+ * Performance Optimization:
11
+ * - Uses OutputSink pattern for zero-copy output during decode
12
+ * - Each decoded byte written directly to stream (not buffered then copied)
13
+ * - ~4x faster than previous buffering approach
14
+ *
15
+ * True byte-by-byte async LZMA streaming would require rewriting the entire
16
+ * decoder with continuation-passing style, which is complex and not worth
17
+ * the effort given LZMA2's chunked format.
18
+ */
19
+ import { Transform } from 'extract-base-iterator';
20
+ /**
21
+ * Create an LZMA2 decoder Transform stream
22
+ *
23
+ * This is a streaming decoder that processes LZMA2 chunks incrementally.
24
+ * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).
25
+ *
26
+ * @param properties - 1-byte LZMA2 properties (dictionary size)
27
+ * @returns Transform stream that decompresses LZMA2 data
28
+ */
29
+ export declare function createLzma2Decoder(properties: Buffer | Uint8Array): InstanceType<typeof Transform>;
30
+ /**
31
+ * Create an LZMA1 decoder Transform stream
32
+ *
33
+ * Note: LZMA1 has no chunk boundaries, so this requires knowing the
34
+ * uncompressed size upfront. The stream buffers all input, then
35
+ * decompresses when complete.
36
+ *
37
+ * For true streaming, use LZMA2 which has built-in chunking.
38
+ *
39
+ * Optimization: Pre-allocates input buffer and copies chunks once,
40
+ * avoiding the double-buffering of Buffer.concat().
41
+ *
42
+ * @param properties - 5-byte LZMA properties
43
+ * @param unpackSize - Expected uncompressed size
44
+ * @returns Transform stream that decompresses LZMA1 data
45
+ */
46
+ export declare function createLzmaDecoder(properties: Buffer | Uint8Array, unpackSize: number): InstanceType<typeof Transform>;
@@ -0,0 +1,46 @@
1
+ /**
2
+ * LZMA Transform Stream Wrappers
3
+ *
4
+ * Provides Transform streams for LZMA1 and LZMA2 decompression.
5
+ *
6
+ * LZMA2 streaming works by buffering until a complete chunk is available,
7
+ * then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max
8
+ * uncompressed), so memory usage is predictable and bounded.
9
+ *
10
+ * Performance Optimization:
11
+ * - Uses OutputSink pattern for zero-copy output during decode
12
+ * - Each decoded byte written directly to stream (not buffered then copied)
13
+ * - ~4x faster than previous buffering approach
14
+ *
15
+ * True byte-by-byte async LZMA streaming would require rewriting the entire
16
+ * decoder with continuation-passing style, which is complex and not worth
17
+ * the effort given LZMA2's chunked format.
18
+ */
19
+ import { Transform } from 'extract-base-iterator';
20
+ /**
21
+ * Create an LZMA2 decoder Transform stream
22
+ *
23
+ * This is a streaming decoder that processes LZMA2 chunks incrementally.
24
+ * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).
25
+ *
26
+ * @param properties - 1-byte LZMA2 properties (dictionary size)
27
+ * @returns Transform stream that decompresses LZMA2 data
28
+ */
29
+ export declare function createLzma2Decoder(properties: Buffer | Uint8Array): InstanceType<typeof Transform>;
30
+ /**
31
+ * Create an LZMA1 decoder Transform stream
32
+ *
33
+ * Note: LZMA1 has no chunk boundaries, so this requires knowing the
34
+ * uncompressed size upfront. The stream buffers all input, then
35
+ * decompresses when complete.
36
+ *
37
+ * For true streaming, use LZMA2 which has built-in chunking.
38
+ *
39
+ * Optimization: Pre-allocates input buffer and copies chunks once,
40
+ * avoiding the double-buffering of Buffer.concat().
41
+ *
42
+ * @param properties - 5-byte LZMA properties
43
+ * @param unpackSize - Expected uncompressed size
44
+ * @returns Transform stream that decompresses LZMA1 data
45
+ */
46
+ export declare function createLzmaDecoder(properties: Buffer | Uint8Array, unpackSize: number): InstanceType<typeof Transform>;
@@ -0,0 +1,193 @@
1
+ /**
2
+ * LZMA Transform Stream Wrappers
3
+ *
4
+ * Provides Transform streams for LZMA1 and LZMA2 decompression.
5
+ *
6
+ * LZMA2 streaming works by buffering until a complete chunk is available,
7
+ * then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max
8
+ * uncompressed), so memory usage is predictable and bounded.
9
+ *
10
+ * Performance Optimization:
11
+ * - Uses OutputSink pattern for zero-copy output during decode
12
+ * - Each decoded byte written directly to stream (not buffered then copied)
13
+ * - ~4x faster than previous buffering approach
14
+ *
15
+ * True byte-by-byte async LZMA streaming would require rewriting the entire
16
+ * decoder with continuation-passing style, which is complex and not worth
17
+ * the effort given LZMA2's chunked format.
18
+ */ "use strict";
19
+ Object.defineProperty(exports, "__esModule", {
20
+ value: true
21
+ });
22
+ function _export(target, all) {
23
+ for(var name in all)Object.defineProperty(target, name, {
24
+ enumerable: true,
25
+ get: Object.getOwnPropertyDescriptor(all, name).get
26
+ });
27
+ }
28
+ _export(exports, {
29
+ get createLzma2Decoder () {
30
+ return createLzma2Decoder;
31
+ },
32
+ get createLzmaDecoder () {
33
+ return createLzmaDecoder;
34
+ }
35
+ });
36
+ var _extractbaseiterator = require("extract-base-iterator");
37
+ var _Lzma2ChunkParserts = require("../Lzma2ChunkParser.js");
38
+ var _LzmaDecoderts = require("../sync/LzmaDecoder.js");
39
+ var _typests = require("../types.js");
40
+ function createLzma2Decoder(properties) {
41
+ if (!properties || properties.length < 1) {
42
+ throw new Error('LZMA2 requires properties byte');
43
+ }
44
+ var dictSize = (0, _typests.parseLzma2DictionarySize)(properties[0]);
45
+ // LZMA decoder instance - reused across chunks for solid mode
46
+ var decoder = new _LzmaDecoderts.LzmaDecoder();
47
+ decoder.setDictionarySize(dictSize);
48
+ // Track current LZMA properties
49
+ var propsSet = false;
50
+ // Buffer for incomplete chunk data
51
+ var pending = null;
52
+ var finished = false;
53
+ return new _extractbaseiterator.Transform({
54
+ transform: function transform(chunk, _encoding, callback) {
55
+ var _this = this;
56
+ if (finished) {
57
+ callback(null);
58
+ return;
59
+ }
60
+ // Combine with pending data
61
+ var input;
62
+ if (pending && pending.length > 0) {
63
+ input = Buffer.concat([
64
+ pending,
65
+ chunk
66
+ ]);
67
+ pending = null;
68
+ } else {
69
+ input = chunk;
70
+ }
71
+ var offset = 0;
72
+ try {
73
+ while(offset < input.length && !finished){
74
+ var result = (0, _Lzma2ChunkParserts.hasCompleteChunk)(input, offset);
75
+ if (!result.success) {
76
+ // Need more data
77
+ pending = input.slice(offset);
78
+ break;
79
+ }
80
+ var chunkInfo = result.chunk, totalSize = result.totalSize;
81
+ if (chunkInfo.type === 'end') {
82
+ finished = true;
83
+ break;
84
+ }
85
+ // Handle dictionary reset
86
+ if (chunkInfo.dictReset) {
87
+ decoder.resetDictionary();
88
+ }
89
+ var dataOffset = offset + chunkInfo.headerSize;
90
+ if (chunkInfo.type === 'uncompressed') {
91
+ var uncompData = input.slice(dataOffset, dataOffset + chunkInfo.uncompSize);
92
+ this.push(uncompData);
93
+ // Feed uncompressed data to dictionary for subsequent LZMA chunks
94
+ decoder.feedUncompressed(uncompData);
95
+ } else {
96
+ // LZMA compressed chunk
97
+ // Variables to store properties (used for both decoders)
98
+ var lc = void 0;
99
+ var lp = void 0;
100
+ var pb = void 0;
101
+ // Apply new properties if present
102
+ if (chunkInfo.newProps) {
103
+ var ref;
104
+ ref = chunkInfo.newProps, lc = ref.lc, lp = ref.lp, pb = ref.pb, ref;
105
+ if (!decoder.setLcLpPb(lc, lp, pb)) {
106
+ throw new Error("Invalid LZMA properties: lc=".concat(lc, " lp=").concat(lp, " pb=").concat(pb));
107
+ }
108
+ propsSet = true;
109
+ }
110
+ if (!propsSet) {
111
+ throw new Error('LZMA chunk without properties');
112
+ }
113
+ // Reset probabilities if state reset
114
+ if (chunkInfo.stateReset) {
115
+ decoder.resetProbabilities();
116
+ }
117
+ // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)
118
+ var useSolid = !chunkInfo.stateReset || chunkInfo.stateReset && !chunkInfo.dictReset;
119
+ var compData = input.slice(dataOffset, dataOffset + chunkInfo.compSize);
120
+ // Enhanced: Use OutputSink for direct emission (zero-copy)
121
+ // Create a decoder with direct stream emission
122
+ var streamDecoder = new _LzmaDecoderts.LzmaDecoder({
123
+ write: function(chunk) {
124
+ return _this.push(chunk);
125
+ }
126
+ });
127
+ streamDecoder.setDictionarySize(dictSize);
128
+ // Preserve properties from main decoder
129
+ streamDecoder.setLcLpPb(lc, lp, pb);
130
+ // Use solid mode based on chunk properties
131
+ streamDecoder.decodeWithSink(compData, 0, chunkInfo.uncompSize, useSolid);
132
+ // Flush any remaining data in the OutWindow
133
+ streamDecoder.flushOutWindow();
134
+ }
135
+ offset += totalSize;
136
+ }
137
+ callback(null);
138
+ } catch (err) {
139
+ callback(err);
140
+ }
141
+ },
142
+ flush: function flush(callback) {
143
+ if (pending && pending.length > 0 && !finished) {
144
+ callback(new Error('Truncated LZMA2 stream'));
145
+ } else {
146
+ callback(null);
147
+ }
148
+ }
149
+ });
150
+ }
151
+ function createLzmaDecoder(properties, unpackSize) {
152
+ var decoder = new _LzmaDecoderts.LzmaDecoder();
153
+ decoder.setDecoderProperties(properties);
154
+ var chunks = [];
155
+ var totalSize = 0;
156
+ return new _extractbaseiterator.Transform({
157
+ transform: function transform(chunk, _encoding, callback) {
158
+ chunks.push(chunk);
159
+ totalSize += chunk.length;
160
+ callback(null);
161
+ },
162
+ flush: function flush(callback) {
163
+ var _this = this;
164
+ try {
165
+ // Optimization: Pre-allocate single buffer instead of Buffer.concat()
166
+ // This reduces peak memory usage by ~50% during concatenation
167
+ var input = (0, _extractbaseiterator.allocBufferUnsafe)(totalSize);
168
+ var offset = 0;
169
+ // Copy each chunk into the pre-allocated buffer
170
+ for(var i = 0; i < chunks.length; i++){
171
+ var chunk = chunks[i];
172
+ chunk.copy(input, offset);
173
+ offset += chunk.length;
174
+ }
175
+ // Enhanced: Use OutputSink for direct emission (zero-copy)
176
+ // Create a decoder with direct stream emission
177
+ var streamDecoder = new _LzmaDecoderts.LzmaDecoder({
178
+ write: function(chunk) {
179
+ return _this.push(chunk);
180
+ }
181
+ });
182
+ streamDecoder.setDecoderProperties(properties);
183
+ streamDecoder.decodeWithSink(input, 0, unpackSize, false);
184
+ // Flush any remaining data in the OutWindow
185
+ streamDecoder.flushOutWindow();
186
+ callback(null);
187
+ } catch (err) {
188
+ callback(err);
189
+ }
190
+ }
191
+ });
192
+ }
193
+ /* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/stream/transforms.ts"],"sourcesContent":["/**\n * LZMA Transform Stream Wrappers\n *\n * Provides Transform streams for LZMA1 and LZMA2 decompression.\n *\n * LZMA2 streaming works by buffering until a complete chunk is available,\n * then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max\n * uncompressed), so memory usage is predictable and bounded.\n *\n * Performance Optimization:\n * - Uses OutputSink pattern for zero-copy output during decode\n * - Each decoded byte written directly to stream (not buffered then copied)\n * - ~4x faster than previous buffering approach\n *\n * True byte-by-byte async LZMA streaming would require rewriting the entire\n * decoder with continuation-passing style, which is complex and not worth\n * the effort given LZMA2's chunked format.\n */\n\nimport { allocBufferUnsafe, Transform } from 'extract-base-iterator';\nimport { hasCompleteChunk } from '../Lzma2ChunkParser.ts';\nimport { LzmaDecoder } from '../sync/LzmaDecoder.ts';\nimport { parseLzma2DictionarySize } from '../types.ts';\n\n/**\n * Create an LZMA2 decoder Transform stream\n *\n * This is a streaming decoder that processes LZMA2 chunks incrementally.\n * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).\n *\n * @param properties - 1-byte LZMA2 properties (dictionary size)\n * @returns Transform stream that decompresses LZMA2 data\n */\nexport function createLzma2Decoder(properties: Buffer | Uint8Array): InstanceType<typeof Transform> {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n const dictSize = parseLzma2DictionarySize(properties[0]);\n\n // LZMA decoder instance - reused across chunks for solid mode\n const decoder = new LzmaDecoder();\n decoder.setDictionarySize(dictSize);\n\n // Track current LZMA properties\n let propsSet = false;\n\n // Buffer for incomplete chunk data\n let pending: Buffer | null = null;\n let finished = false;\n\n return new Transform({\n transform: function (this: InstanceType<typeof Transform>, chunk: Buffer, _encoding: string, callback: (err?: Error | null) => void) {\n if (finished) {\n callback(null);\n return;\n }\n\n // Combine with pending data\n let input: Buffer;\n if (pending && pending.length > 0) {\n input = Buffer.concat([pending, chunk]);\n pending = null;\n } else {\n input = chunk;\n }\n\n let offset = 0;\n\n try {\n while (offset < input.length && !finished) {\n const result = hasCompleteChunk(input, offset);\n\n if (!result.success) {\n // Need more data\n pending = input.slice(offset);\n break;\n }\n\n const { chunk: chunkInfo, totalSize } = result;\n\n if (chunkInfo.type === 'end') {\n finished = true;\n break;\n }\n\n // Handle dictionary reset\n if (chunkInfo.dictReset) {\n decoder.resetDictionary();\n }\n\n const dataOffset = offset + chunkInfo.headerSize;\n\n if (chunkInfo.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunkInfo.uncompSize);\n this.push(uncompData);\n\n // Feed uncompressed data to dictionary for subsequent LZMA chunks\n decoder.feedUncompressed(uncompData);\n } else {\n // LZMA compressed chunk\n\n // Variables to store properties (used for both decoders)\n let lc: number;\n let lp: number;\n let pb: number;\n\n // Apply new properties if present\n if (chunkInfo.newProps) {\n ({ lc, lp, pb } = chunkInfo.newProps);\n if (!decoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n propsSet = true;\n }\n\n if (!propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunkInfo.stateReset) {\n decoder.resetProbabilities();\n }\n\n // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)\n const useSolid = !chunkInfo.stateReset || (chunkInfo.stateReset && !chunkInfo.dictReset);\n\n const compData = input.slice(dataOffset, dataOffset + chunkInfo.compSize);\n\n // Enhanced: Use OutputSink for direct emission (zero-copy)\n // Create a decoder with direct stream emission\n const streamDecoder = new LzmaDecoder({\n write: (chunk: Buffer) => this.push(chunk),\n });\n streamDecoder.setDictionarySize(dictSize);\n // Preserve properties from main decoder\n streamDecoder.setLcLpPb(lc, lp, pb);\n\n // Use solid mode based on chunk properties\n streamDecoder.decodeWithSink(compData, 0, chunkInfo.uncompSize, useSolid);\n\n // Flush any remaining data in the OutWindow\n streamDecoder.flushOutWindow();\n }\n\n offset += totalSize;\n }\n\n callback(null);\n } catch (err) {\n callback(err as Error);\n }\n },\n\n flush: function (this: InstanceType<typeof Transform>, callback: (err?: Error | null) => void) {\n if (pending && pending.length > 0 && !finished) {\n callback(new Error('Truncated LZMA2 stream'));\n } else {\n callback(null);\n }\n },\n });\n}\n\n/**\n * Create an LZMA1 decoder Transform stream\n *\n * Note: LZMA1 has no chunk boundaries, so this requires knowing the\n * uncompressed size upfront. The stream buffers all input, then\n * decompresses when complete.\n *\n * For true streaming, use LZMA2 which has built-in chunking.\n *\n * Optimization: Pre-allocates input buffer and copies chunks once,\n * avoiding the double-buffering of Buffer.concat().\n *\n * @param properties - 5-byte LZMA properties\n * @param unpackSize - Expected uncompressed size\n * @returns Transform stream that decompresses LZMA1 data\n */\nexport function createLzmaDecoder(properties: Buffer | Uint8Array, unpackSize: number): InstanceType<typeof Transform> {\n const decoder = new LzmaDecoder();\n decoder.setDecoderProperties(properties);\n\n const chunks: Buffer[] = [];\n let totalSize = 0;\n\n return new Transform({\n transform: function (this: InstanceType<typeof Transform>, chunk: Buffer, _encoding: string, callback: (err?: Error | null) => void) {\n chunks.push(chunk);\n totalSize += chunk.length;\n callback(null);\n },\n\n flush: function (this: InstanceType<typeof Transform>, callback: (err?: Error | null) => void) {\n try {\n // Optimization: Pre-allocate single buffer instead of Buffer.concat()\n // This reduces peak memory usage by ~50% during concatenation\n const input = allocBufferUnsafe(totalSize);\n let offset = 0;\n\n // Copy each chunk into the pre-allocated buffer\n for (let i = 0; i < chunks.length; i++) {\n const chunk = chunks[i];\n chunk.copy(input, offset);\n offset += chunk.length;\n }\n\n // Enhanced: Use OutputSink for direct emission (zero-copy)\n // Create a decoder with direct stream emission\n const streamDecoder = new LzmaDecoder({\n write: (chunk: Buffer) => this.push(chunk),\n });\n streamDecoder.setDecoderProperties(properties);\n streamDecoder.decodeWithSink(input, 0, unpackSize, false);\n\n // Flush any remaining data in the OutWindow\n streamDecoder.flushOutWindow();\n\n callback(null);\n } catch (err) {\n callback(err as Error);\n }\n },\n });\n}\n"],"names":["createLzma2Decoder","createLzmaDecoder","properties","length","Error","dictSize","parseLzma2DictionarySize","decoder","LzmaDecoder","setDictionarySize","propsSet","pending","finished","Transform","transform","chunk","_encoding","callback","input","Buffer","concat","offset","result","hasCompleteChunk","success","slice","chunkInfo","totalSize","type","dictReset","resetDictionary","dataOffset","headerSize","uncompData","uncompSize","push","feedUncompressed","lc","lp","pb","newProps","setLcLpPb","stateReset","resetProbabilities","useSolid","compData","compSize","streamDecoder","write","decodeWithSink","flushOutWindow","err","flush","unpackSize","setDecoderProperties","chunks","allocBufferUnsafe","i","copy"],"mappings":"AAAA;;;;;;;;;;;;;;;;;CAiBC;;;;;;;;;;;QAgBeA;eAAAA;;QAoJAC;eAAAA;;;mCAlK6B;kCACZ;6BACL;uBACa;AAWlC,SAASD,mBAAmBE,UAA+B;IAChE,IAAI,CAACA,cAAcA,WAAWC,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,IAAMC,WAAWC,IAAAA,iCAAwB,EAACJ,UAAU,CAAC,EAAE;IAEvD,8DAA8D;IAC9D,IAAMK,UAAU,IAAIC,0BAAW;IAC/BD,QAAQE,iBAAiB,CAACJ;IAE1B,gCAAgC;IAChC,IAAIK,WAAW;IAEf,mCAAmC;IACnC,IAAIC,UAAyB;IAC7B,IAAIC,WAAW;IAEf,OAAO,IAAIC,8BAAS,CAAC;QACnBC,WAAW,SAAXA,UAA2DC,KAAa,EAAEC,SAAiB,EAAEC,QAAsC;;YACjI,IAAIL,UAAU;gBACZK,SAAS;gBACT;YACF;YAEA,4BAA4B;YAC5B,IAAIC;YACJ,IAAIP,WAAWA,QAAQR,MAAM,GAAG,GAAG;gBACjCe,QAAQC,OAAOC,MAAM,CAAC;oBAACT;oBAASI;iBAAM;gBACtCJ,UAAU;YACZ,OAAO;gBACLO,QAAQH;YACV;YAEA,IAAIM,SAAS;YAEb,IAAI;gBACF,MAAOA,SAASH,MAAMf,MAAM,IAAI,CAACS,SAAU;oBACzC,IAAMU,SAASC,IAAAA,oCAAgB,EAACL,OAAOG;oBAEvC,IAAI,CAACC,OAAOE,OAAO,EAAE;wBACnB,iBAAiB;wBACjBb,UAAUO,MAAMO,KAAK,CAACJ;wBACtB;oBACF;oBAEA,IAAQN,AAAOW,YAAyBJ,OAAhCP,OAAkBY,YAAcL,OAAdK;oBAE1B,IAAID,UAAUE,IAAI,KAAK,OAAO;wBAC5BhB,WAAW;wBACX;oBACF;oBAEA,0BAA0B;oBAC1B,IAAIc,UAAUG,SAAS,EAAE;wBACvBtB,QAAQuB,eAAe;oBACzB;oBAEA,IAAMC,aAAaV,SAASK,UAAUM,UAAU;oBAEhD,IAAIN,UAAUE,IAAI,KAAK,gBAAgB;wBACrC,IAAMK,aAAaf,MAAMO,KAAK,CAACM,YAAYA,aAAaL,UAAUQ,UAAU;wBAC5E,IAAI,CAACC,IAAI,CAACF;wBAEV,kEAAkE;wBAClE1B,QAAQ6B,gBAAgB,CAACH;oBAC3B,OAAO;wBACL,wBAAwB;wBAExB,yDAAyD;wBACzD,IAAII,KAAAA,KAAAA;wBACJ,IAAIC,KAAAA,KAAAA;wBACJ,IAAIC,KAAAA,KAAAA;wBAEJ,kCAAkC;wBAClC,IAAIb,UAAUc,QAAQ,EAAE;;kCACJd,UAAUc,QAAQ,EAAjCH,SAAAA,IAAIC,SAAAA,IAAIC,SAAAA;4BACX,IAAI,CAAChC,QAAQkC,SAAS,CAACJ,IAAIC,IAAIC,KAAK;gCAClC,MAAM,IAAInC,MAAM,AAAC,+BAAuCkC,OAATD,IAAG,QAAeE,OAATD,IAAG,QAAS,OAAHC;4BACnE;4BACA7B,WAAW;wBACb;wBAEA,IAAI,CAACA,UAAU;4BACb,MAAM,IAAIN,MAAM;wBAClB;wBAEA,qCAAqC;wBACrC,IAAIsB,UAAUgB,UAAU,EAAE;4BACxBnC,QAAQoC,kBAAkB;wBAC5B;wBAEA,0GAA0G;wBAC1G,IAAMC,WAAW,CAAClB,UAAUgB,UAAU,IAAKhB,UAAUgB,UAAU,IAAI,CAAChB,UAAUG,SAAS;wBAEvF,IAAMgB,WAAW3B,MAAMO,KAAK,CAACM,YAAYA,aAAaL,UAAUoB,QAAQ;wBAExE,2DAA2D;wBAC3D,+CAA+C;wBAC/C,IAAMC,gBAAgB,IAAIvC,0BAAW,CAAC;4BACpCwC,OAAO,SAACjC;uCAAkB,MAAKoB,IAAI,CAACpB;;wBACtC;wBACAgC,cAActC,iBAAiB,CAACJ;wBAChC,wCAAwC;wBACxC0C,cAAcN,SAAS,CAACJ,IAAIC,IAAIC;wBAEhC,2CAA2C;wBAC3CQ,cAAcE,cAAc,CAACJ,UAAU,GAAGnB,UAAUQ,UAAU,EAAEU;wBAEhE,4CAA4C;wBAC5CG,cAAcG,cAAc;oBAC9B;oBAEA7B,UAAUM;gBACZ;gBAEAV,SAAS;YACX,EAAE,OAAOkC,KAAK;gBACZlC,SAASkC;YACX;QACF;QAEAC,OAAO,SAAPA,MAAuDnC,QAAsC;YAC3F,IAAIN,WAAWA,QAAQR,MAAM,GAAG,KAAK,CAACS,UAAU;gBAC9CK,SAAS,IAAIb,MAAM;YACrB,OAAO;gBACLa,SAAS;YACX;QACF;IACF;AACF;AAkBO,SAAShB,kBAAkBC,UAA+B,EAAEmD,UAAkB;IACnF,IAAM9C,UAAU,IAAIC,0BAAW;IAC/BD,QAAQ+C,oBAAoB,CAACpD;IAE7B,IAAMqD,SAAmB,EAAE;IAC3B,IAAI5B,YAAY;IAEhB,OAAO,IAAId,8BAAS,CAAC;QACnBC,WAAW,SAAXA,UAA2DC,KAAa,EAAEC,SAAiB,EAAEC,QAAsC;YACjIsC,OAAOpB,IAAI,CAACpB;YACZY,aAAaZ,MAAMZ,MAAM;YACzBc,SAAS;QACX;QAEAmC,OAAO,SAAPA,MAAuDnC,QAAsC;;YAC3F,IAAI;gBACF,sEAAsE;gBACtE,8DAA8D;gBAC9D,IAAMC,QAAQsC,IAAAA,sCAAiB,EAAC7B;gBAChC,IAAIN,SAAS;gBAEb,gDAAgD;gBAChD,IAAK,IAAIoC,IAAI,GAAGA,IAAIF,OAAOpD,MAAM,EAAEsD,IAAK;oBACtC,IAAM1C,QAAQwC,MAAM,CAACE,EAAE;oBACvB1C,MAAM2C,IAAI,CAACxC,OAAOG;oBAClBA,UAAUN,MAAMZ,MAAM;gBACxB;gBAEA,2DAA2D;gBAC3D,+CAA+C;gBAC/C,IAAM4C,gBAAgB,IAAIvC,0BAAW,CAAC;oBACpCwC,OAAO,SAACjC;+BAAkB,MAAKoB,IAAI,CAACpB;;gBACtC;gBACAgC,cAAcO,oBAAoB,CAACpD;gBACnC6C,cAAcE,cAAc,CAAC/B,OAAO,GAAGmC,YAAY;gBAEnD,4CAA4C;gBAC5CN,cAAcG,cAAc;gBAE5BjC,SAAS;YACX,EAAE,OAAOkC,KAAK;gBACZlC,SAASkC;YACX;QACF;IACF;AACF"}