xz-compat 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +42 -0
- package/README.md +248 -0
- package/dist/cjs/compat.d.cts +1 -0
- package/dist/cjs/compat.d.ts +1 -0
- package/dist/cjs/compat.js +23 -0
- package/dist/cjs/compat.js.map +1 -0
- package/dist/cjs/filters/bcj/Bcj.d.cts +16 -0
- package/dist/cjs/filters/bcj/Bcj.d.ts +16 -0
- package/dist/cjs/filters/bcj/Bcj.js +192 -0
- package/dist/cjs/filters/bcj/Bcj.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjArm.d.cts +16 -0
- package/dist/cjs/filters/bcj/BcjArm.d.ts +16 -0
- package/dist/cjs/filters/bcj/BcjArm.js +122 -0
- package/dist/cjs/filters/bcj/BcjArm.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjArm64.d.cts +21 -0
- package/dist/cjs/filters/bcj/BcjArm64.d.ts +21 -0
- package/dist/cjs/filters/bcj/BcjArm64.js +65 -0
- package/dist/cjs/filters/bcj/BcjArm64.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjArmt.d.cts +19 -0
- package/dist/cjs/filters/bcj/BcjArmt.d.ts +19 -0
- package/dist/cjs/filters/bcj/BcjArmt.js +76 -0
- package/dist/cjs/filters/bcj/BcjArmt.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjIa64.d.cts +15 -0
- package/dist/cjs/filters/bcj/BcjIa64.d.ts +15 -0
- package/dist/cjs/filters/bcj/BcjIa64.js +141 -0
- package/dist/cjs/filters/bcj/BcjIa64.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjPpc.d.cts +20 -0
- package/dist/cjs/filters/bcj/BcjPpc.d.ts +20 -0
- package/dist/cjs/filters/bcj/BcjPpc.js +64 -0
- package/dist/cjs/filters/bcj/BcjPpc.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjSparc.d.cts +19 -0
- package/dist/cjs/filters/bcj/BcjSparc.d.ts +19 -0
- package/dist/cjs/filters/bcj/BcjSparc.js +69 -0
- package/dist/cjs/filters/bcj/BcjSparc.js.map +1 -0
- package/dist/cjs/filters/delta/Delta.d.cts +16 -0
- package/dist/cjs/filters/delta/Delta.d.ts +16 -0
- package/dist/cjs/filters/delta/Delta.js +74 -0
- package/dist/cjs/filters/delta/Delta.js.map +1 -0
- package/dist/cjs/filters/index.d.cts +8 -0
- package/dist/cjs/filters/index.d.ts +8 -0
- package/dist/cjs/filters/index.js +27 -0
- package/dist/cjs/filters/index.js.map +1 -0
- package/dist/cjs/index.d.cts +4 -0
- package/dist/cjs/index.d.ts +4 -0
- package/dist/cjs/index.js +58 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/cjs/lzma/index.d.cts +31 -0
- package/dist/cjs/lzma/index.d.ts +31 -0
- package/dist/cjs/lzma/index.js +83 -0
- package/dist/cjs/lzma/index.js.map +1 -0
- package/dist/cjs/lzma/stream/transforms.d.cts +46 -0
- package/dist/cjs/lzma/stream/transforms.d.ts +46 -0
- package/dist/cjs/lzma/stream/transforms.js +193 -0
- package/dist/cjs/lzma/stream/transforms.js.map +1 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +63 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +63 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js +231 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +97 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +97 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js +582 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/cjs/lzma/types.d.cts +117 -0
- package/dist/cjs/lzma/types.d.ts +117 -0
- package/dist/cjs/lzma/types.js +264 -0
- package/dist/cjs/lzma/types.js.map +1 -0
- package/dist/cjs/package.json +1 -0
- package/dist/cjs/utils/createBufferingDecoder.d.cts +10 -0
- package/dist/cjs/utils/createBufferingDecoder.d.ts +10 -0
- package/dist/cjs/utils/createBufferingDecoder.js +41 -0
- package/dist/cjs/utils/createBufferingDecoder.js.map +1 -0
- package/dist/cjs/xz/Decoder.d.cts +21 -0
- package/dist/cjs/xz/Decoder.d.ts +21 -0
- package/dist/cjs/xz/Decoder.js +325 -0
- package/dist/cjs/xz/Decoder.js.map +1 -0
- package/dist/esm/compat.d.ts +1 -0
- package/dist/esm/compat.js +7 -0
- package/dist/esm/compat.js.map +1 -0
- package/dist/esm/filters/bcj/Bcj.d.ts +16 -0
- package/dist/esm/filters/bcj/Bcj.js +184 -0
- package/dist/esm/filters/bcj/Bcj.js.map +1 -0
- package/dist/esm/filters/bcj/BcjArm.d.ts +16 -0
- package/dist/esm/filters/bcj/BcjArm.js +114 -0
- package/dist/esm/filters/bcj/BcjArm.js.map +1 -0
- package/dist/esm/filters/bcj/BcjArm64.d.ts +21 -0
- package/dist/esm/filters/bcj/BcjArm64.js +57 -0
- package/dist/esm/filters/bcj/BcjArm64.js.map +1 -0
- package/dist/esm/filters/bcj/BcjArmt.d.ts +19 -0
- package/dist/esm/filters/bcj/BcjArmt.js +66 -0
- package/dist/esm/filters/bcj/BcjArmt.js.map +1 -0
- package/dist/esm/filters/bcj/BcjIa64.d.ts +15 -0
- package/dist/esm/filters/bcj/BcjIa64.js +127 -0
- package/dist/esm/filters/bcj/BcjIa64.js.map +1 -0
- package/dist/esm/filters/bcj/BcjPpc.d.ts +20 -0
- package/dist/esm/filters/bcj/BcjPpc.js +55 -0
- package/dist/esm/filters/bcj/BcjPpc.js.map +1 -0
- package/dist/esm/filters/bcj/BcjSparc.d.ts +19 -0
- package/dist/esm/filters/bcj/BcjSparc.js +59 -0
- package/dist/esm/filters/bcj/BcjSparc.js.map +1 -0
- package/dist/esm/filters/delta/Delta.d.ts +16 -0
- package/dist/esm/filters/delta/Delta.js +66 -0
- package/dist/esm/filters/delta/Delta.js.map +1 -0
- package/dist/esm/filters/index.d.ts +8 -0
- package/dist/esm/filters/index.js +9 -0
- package/dist/esm/filters/index.js.map +1 -0
- package/dist/esm/index.d.ts +4 -0
- package/dist/esm/index.js +5 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/esm/lzma/index.d.ts +31 -0
- package/dist/esm/lzma/index.js +44 -0
- package/dist/esm/lzma/index.js.map +1 -0
- package/dist/esm/lzma/stream/transforms.d.ts +46 -0
- package/dist/esm/lzma/stream/transforms.js +190 -0
- package/dist/esm/lzma/stream/transforms.js.map +1 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +63 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js +211 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/esm/lzma/sync/LzmaDecoder.d.ts +97 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js +545 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
- package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/esm/lzma/types.d.ts +117 -0
- package/dist/esm/lzma/types.js +154 -0
- package/dist/esm/lzma/types.js.map +1 -0
- package/dist/esm/package.json +1 -0
- package/dist/esm/utils/createBufferingDecoder.d.ts +10 -0
- package/dist/esm/utils/createBufferingDecoder.js +30 -0
- package/dist/esm/utils/createBufferingDecoder.js.map +1 -0
- package/dist/esm/xz/Decoder.d.ts +21 -0
- package/dist/esm/xz/Decoder.js +313 -0
- package/dist/esm/xz/Decoder.js.map +1 -0
- package/package.json +75 -0
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LZMA Transform Stream Wrappers
|
|
3
|
+
*
|
|
4
|
+
* Provides Transform streams for LZMA1 and LZMA2 decompression.
|
|
5
|
+
*
|
|
6
|
+
* LZMA2 streaming works by buffering until a complete chunk is available,
|
|
7
|
+
* then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max
|
|
8
|
+
* uncompressed), so memory usage is predictable and bounded.
|
|
9
|
+
*
|
|
10
|
+
* Performance Optimization:
|
|
11
|
+
* - Uses OutputSink pattern for zero-copy output during decode
|
|
12
|
+
* - Each decoded byte written directly to stream (not buffered then copied)
|
|
13
|
+
* - ~4x faster than previous buffering approach
|
|
14
|
+
*
|
|
15
|
+
* True byte-by-byte async LZMA streaming would require rewriting the entire
|
|
16
|
+
* decoder with continuation-passing style, which is complex and not worth
|
|
17
|
+
* the effort given LZMA2's chunked format.
|
|
18
|
+
*/ import { allocBufferUnsafe, Transform } from 'extract-base-iterator';
|
|
19
|
+
import { hasCompleteChunk } from '../Lzma2ChunkParser.js';
|
|
20
|
+
import { LzmaDecoder } from '../sync/LzmaDecoder.js';
|
|
21
|
+
import { parseLzma2DictionarySize } from '../types.js';
|
|
22
|
+
/**
|
|
23
|
+
* Create an LZMA2 decoder Transform stream
|
|
24
|
+
*
|
|
25
|
+
* This is a streaming decoder that processes LZMA2 chunks incrementally.
|
|
26
|
+
* Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).
|
|
27
|
+
*
|
|
28
|
+
* @param properties - 1-byte LZMA2 properties (dictionary size)
|
|
29
|
+
* @returns Transform stream that decompresses LZMA2 data
|
|
30
|
+
*/ export function createLzma2Decoder(properties) {
|
|
31
|
+
if (!properties || properties.length < 1) {
|
|
32
|
+
throw new Error('LZMA2 requires properties byte');
|
|
33
|
+
}
|
|
34
|
+
const dictSize = parseLzma2DictionarySize(properties[0]);
|
|
35
|
+
// LZMA decoder instance - reused across chunks for solid mode
|
|
36
|
+
const decoder = new LzmaDecoder();
|
|
37
|
+
decoder.setDictionarySize(dictSize);
|
|
38
|
+
// Track current LZMA properties
|
|
39
|
+
let propsSet = false;
|
|
40
|
+
// Buffer for incomplete chunk data
|
|
41
|
+
let pending = null;
|
|
42
|
+
let finished = false;
|
|
43
|
+
return new Transform({
|
|
44
|
+
transform: function(chunk, _encoding, callback) {
|
|
45
|
+
if (finished) {
|
|
46
|
+
callback(null);
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
// Combine with pending data
|
|
50
|
+
let input;
|
|
51
|
+
if (pending && pending.length > 0) {
|
|
52
|
+
input = Buffer.concat([
|
|
53
|
+
pending,
|
|
54
|
+
chunk
|
|
55
|
+
]);
|
|
56
|
+
pending = null;
|
|
57
|
+
} else {
|
|
58
|
+
input = chunk;
|
|
59
|
+
}
|
|
60
|
+
let offset = 0;
|
|
61
|
+
try {
|
|
62
|
+
while(offset < input.length && !finished){
|
|
63
|
+
const result = hasCompleteChunk(input, offset);
|
|
64
|
+
if (!result.success) {
|
|
65
|
+
// Need more data
|
|
66
|
+
pending = input.slice(offset);
|
|
67
|
+
break;
|
|
68
|
+
}
|
|
69
|
+
const { chunk: chunkInfo, totalSize } = result;
|
|
70
|
+
if (chunkInfo.type === 'end') {
|
|
71
|
+
finished = true;
|
|
72
|
+
break;
|
|
73
|
+
}
|
|
74
|
+
// Handle dictionary reset
|
|
75
|
+
if (chunkInfo.dictReset) {
|
|
76
|
+
decoder.resetDictionary();
|
|
77
|
+
}
|
|
78
|
+
const dataOffset = offset + chunkInfo.headerSize;
|
|
79
|
+
if (chunkInfo.type === 'uncompressed') {
|
|
80
|
+
const uncompData = input.slice(dataOffset, dataOffset + chunkInfo.uncompSize);
|
|
81
|
+
this.push(uncompData);
|
|
82
|
+
// Feed uncompressed data to dictionary for subsequent LZMA chunks
|
|
83
|
+
decoder.feedUncompressed(uncompData);
|
|
84
|
+
} else {
|
|
85
|
+
// LZMA compressed chunk
|
|
86
|
+
// Variables to store properties (used for both decoders)
|
|
87
|
+
let lc;
|
|
88
|
+
let lp;
|
|
89
|
+
let pb;
|
|
90
|
+
// Apply new properties if present
|
|
91
|
+
if (chunkInfo.newProps) {
|
|
92
|
+
({ lc, lp, pb } = chunkInfo.newProps);
|
|
93
|
+
if (!decoder.setLcLpPb(lc, lp, pb)) {
|
|
94
|
+
throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);
|
|
95
|
+
}
|
|
96
|
+
propsSet = true;
|
|
97
|
+
}
|
|
98
|
+
if (!propsSet) {
|
|
99
|
+
throw new Error('LZMA chunk without properties');
|
|
100
|
+
}
|
|
101
|
+
// Reset probabilities if state reset
|
|
102
|
+
if (chunkInfo.stateReset) {
|
|
103
|
+
decoder.resetProbabilities();
|
|
104
|
+
}
|
|
105
|
+
// Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)
|
|
106
|
+
const useSolid = !chunkInfo.stateReset || chunkInfo.stateReset && !chunkInfo.dictReset;
|
|
107
|
+
const compData = input.slice(dataOffset, dataOffset + chunkInfo.compSize);
|
|
108
|
+
// Enhanced: Use OutputSink for direct emission (zero-copy)
|
|
109
|
+
// Create a decoder with direct stream emission
|
|
110
|
+
const streamDecoder = new LzmaDecoder({
|
|
111
|
+
write: (chunk)=>this.push(chunk)
|
|
112
|
+
});
|
|
113
|
+
streamDecoder.setDictionarySize(dictSize);
|
|
114
|
+
// Preserve properties from main decoder
|
|
115
|
+
streamDecoder.setLcLpPb(lc, lp, pb);
|
|
116
|
+
// Use solid mode based on chunk properties
|
|
117
|
+
streamDecoder.decodeWithSink(compData, 0, chunkInfo.uncompSize, useSolid);
|
|
118
|
+
// Flush any remaining data in the OutWindow
|
|
119
|
+
streamDecoder.flushOutWindow();
|
|
120
|
+
}
|
|
121
|
+
offset += totalSize;
|
|
122
|
+
}
|
|
123
|
+
callback(null);
|
|
124
|
+
} catch (err) {
|
|
125
|
+
callback(err);
|
|
126
|
+
}
|
|
127
|
+
},
|
|
128
|
+
flush: function(callback) {
|
|
129
|
+
if (pending && pending.length > 0 && !finished) {
|
|
130
|
+
callback(new Error('Truncated LZMA2 stream'));
|
|
131
|
+
} else {
|
|
132
|
+
callback(null);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Create an LZMA1 decoder Transform stream
|
|
139
|
+
*
|
|
140
|
+
* Note: LZMA1 has no chunk boundaries, so this requires knowing the
|
|
141
|
+
* uncompressed size upfront. The stream buffers all input, then
|
|
142
|
+
* decompresses when complete.
|
|
143
|
+
*
|
|
144
|
+
* For true streaming, use LZMA2 which has built-in chunking.
|
|
145
|
+
*
|
|
146
|
+
* Optimization: Pre-allocates input buffer and copies chunks once,
|
|
147
|
+
* avoiding the double-buffering of Buffer.concat().
|
|
148
|
+
*
|
|
149
|
+
* @param properties - 5-byte LZMA properties
|
|
150
|
+
* @param unpackSize - Expected uncompressed size
|
|
151
|
+
* @returns Transform stream that decompresses LZMA1 data
|
|
152
|
+
*/ export function createLzmaDecoder(properties, unpackSize) {
|
|
153
|
+
const decoder = new LzmaDecoder();
|
|
154
|
+
decoder.setDecoderProperties(properties);
|
|
155
|
+
const chunks = [];
|
|
156
|
+
let totalSize = 0;
|
|
157
|
+
return new Transform({
|
|
158
|
+
transform: function(chunk, _encoding, callback) {
|
|
159
|
+
chunks.push(chunk);
|
|
160
|
+
totalSize += chunk.length;
|
|
161
|
+
callback(null);
|
|
162
|
+
},
|
|
163
|
+
flush: function(callback) {
|
|
164
|
+
try {
|
|
165
|
+
// Optimization: Pre-allocate single buffer instead of Buffer.concat()
|
|
166
|
+
// This reduces peak memory usage by ~50% during concatenation
|
|
167
|
+
const input = allocBufferUnsafe(totalSize);
|
|
168
|
+
let offset = 0;
|
|
169
|
+
// Copy each chunk into the pre-allocated buffer
|
|
170
|
+
for(let i = 0; i < chunks.length; i++){
|
|
171
|
+
const chunk = chunks[i];
|
|
172
|
+
chunk.copy(input, offset);
|
|
173
|
+
offset += chunk.length;
|
|
174
|
+
}
|
|
175
|
+
// Enhanced: Use OutputSink for direct emission (zero-copy)
|
|
176
|
+
// Create a decoder with direct stream emission
|
|
177
|
+
const streamDecoder = new LzmaDecoder({
|
|
178
|
+
write: (chunk)=>this.push(chunk)
|
|
179
|
+
});
|
|
180
|
+
streamDecoder.setDecoderProperties(properties);
|
|
181
|
+
streamDecoder.decodeWithSink(input, 0, unpackSize, false);
|
|
182
|
+
// Flush any remaining data in the OutWindow
|
|
183
|
+
streamDecoder.flushOutWindow();
|
|
184
|
+
callback(null);
|
|
185
|
+
} catch (err) {
|
|
186
|
+
callback(err);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
});
|
|
190
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/stream/transforms.ts"],"sourcesContent":["/**\n * LZMA Transform Stream Wrappers\n *\n * Provides Transform streams for LZMA1 and LZMA2 decompression.\n *\n * LZMA2 streaming works by buffering until a complete chunk is available,\n * then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max\n * uncompressed), so memory usage is predictable and bounded.\n *\n * Performance Optimization:\n * - Uses OutputSink pattern for zero-copy output during decode\n * - Each decoded byte written directly to stream (not buffered then copied)\n * - ~4x faster than previous buffering approach\n *\n * True byte-by-byte async LZMA streaming would require rewriting the entire\n * decoder with continuation-passing style, which is complex and not worth\n * the effort given LZMA2's chunked format.\n */\n\nimport { allocBufferUnsafe, Transform } from 'extract-base-iterator';\nimport { hasCompleteChunk } from '../Lzma2ChunkParser.ts';\nimport { LzmaDecoder } from '../sync/LzmaDecoder.ts';\nimport { parseLzma2DictionarySize } from '../types.ts';\n\n/**\n * Create an LZMA2 decoder Transform stream\n *\n * This is a streaming decoder that processes LZMA2 chunks incrementally.\n * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).\n *\n * @param properties - 1-byte LZMA2 properties (dictionary size)\n * @returns Transform stream that decompresses LZMA2 data\n */\nexport function createLzma2Decoder(properties: Buffer | Uint8Array): InstanceType<typeof Transform> {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n const dictSize = parseLzma2DictionarySize(properties[0]);\n\n // LZMA decoder instance - reused across chunks for solid mode\n const decoder = new LzmaDecoder();\n decoder.setDictionarySize(dictSize);\n\n // Track current LZMA properties\n let propsSet = false;\n\n // Buffer for incomplete chunk data\n let pending: Buffer | null = null;\n let finished = false;\n\n return new Transform({\n transform: function (this: InstanceType<typeof Transform>, chunk: Buffer, _encoding: string, callback: (err?: Error | null) => void) {\n if (finished) {\n callback(null);\n return;\n }\n\n // Combine with pending data\n let input: Buffer;\n if (pending && pending.length > 0) {\n input = Buffer.concat([pending, chunk]);\n pending = null;\n } else {\n input = chunk;\n }\n\n let offset = 0;\n\n try {\n while (offset < input.length && !finished) {\n const result = hasCompleteChunk(input, offset);\n\n if (!result.success) {\n // Need more data\n pending = input.slice(offset);\n break;\n }\n\n const { chunk: chunkInfo, totalSize } = result;\n\n if (chunkInfo.type === 'end') {\n finished = true;\n break;\n }\n\n // Handle dictionary reset\n if (chunkInfo.dictReset) {\n decoder.resetDictionary();\n }\n\n const dataOffset = offset + chunkInfo.headerSize;\n\n if (chunkInfo.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunkInfo.uncompSize);\n this.push(uncompData);\n\n // Feed uncompressed data to dictionary for subsequent LZMA chunks\n decoder.feedUncompressed(uncompData);\n } else {\n // LZMA compressed chunk\n\n // Variables to store properties (used for both decoders)\n let lc: number;\n let lp: number;\n let pb: number;\n\n // Apply new properties if present\n if (chunkInfo.newProps) {\n ({ lc, lp, pb } = chunkInfo.newProps);\n if (!decoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n propsSet = true;\n }\n\n if (!propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunkInfo.stateReset) {\n decoder.resetProbabilities();\n }\n\n // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)\n const useSolid = !chunkInfo.stateReset || (chunkInfo.stateReset && !chunkInfo.dictReset);\n\n const compData = input.slice(dataOffset, dataOffset + chunkInfo.compSize);\n\n // Enhanced: Use OutputSink for direct emission (zero-copy)\n // Create a decoder with direct stream emission\n const streamDecoder = new LzmaDecoder({\n write: (chunk: Buffer) => this.push(chunk),\n });\n streamDecoder.setDictionarySize(dictSize);\n // Preserve properties from main decoder\n streamDecoder.setLcLpPb(lc, lp, pb);\n\n // Use solid mode based on chunk properties\n streamDecoder.decodeWithSink(compData, 0, chunkInfo.uncompSize, useSolid);\n\n // Flush any remaining data in the OutWindow\n streamDecoder.flushOutWindow();\n }\n\n offset += totalSize;\n }\n\n callback(null);\n } catch (err) {\n callback(err as Error);\n }\n },\n\n flush: function (this: InstanceType<typeof Transform>, callback: (err?: Error | null) => void) {\n if (pending && pending.length > 0 && !finished) {\n callback(new Error('Truncated LZMA2 stream'));\n } else {\n callback(null);\n }\n },\n });\n}\n\n/**\n * Create an LZMA1 decoder Transform stream\n *\n * Note: LZMA1 has no chunk boundaries, so this requires knowing the\n * uncompressed size upfront. The stream buffers all input, then\n * decompresses when complete.\n *\n * For true streaming, use LZMA2 which has built-in chunking.\n *\n * Optimization: Pre-allocates input buffer and copies chunks once,\n * avoiding the double-buffering of Buffer.concat().\n *\n * @param properties - 5-byte LZMA properties\n * @param unpackSize - Expected uncompressed size\n * @returns Transform stream that decompresses LZMA1 data\n */\nexport function createLzmaDecoder(properties: Buffer | Uint8Array, unpackSize: number): InstanceType<typeof Transform> {\n const decoder = new LzmaDecoder();\n decoder.setDecoderProperties(properties);\n\n const chunks: Buffer[] = [];\n let totalSize = 0;\n\n return new Transform({\n transform: function (this: InstanceType<typeof Transform>, chunk: Buffer, _encoding: string, callback: (err?: Error | null) => void) {\n chunks.push(chunk);\n totalSize += chunk.length;\n callback(null);\n },\n\n flush: function (this: InstanceType<typeof Transform>, callback: (err?: Error | null) => void) {\n try {\n // Optimization: Pre-allocate single buffer instead of Buffer.concat()\n // This reduces peak memory usage by ~50% during concatenation\n const input = allocBufferUnsafe(totalSize);\n let offset = 0;\n\n // Copy each chunk into the pre-allocated buffer\n for (let i = 0; i < chunks.length; i++) {\n const chunk = chunks[i];\n chunk.copy(input, offset);\n offset += chunk.length;\n }\n\n // Enhanced: Use OutputSink for direct emission (zero-copy)\n // Create a decoder with direct stream emission\n const streamDecoder = new LzmaDecoder({\n write: (chunk: Buffer) => this.push(chunk),\n });\n streamDecoder.setDecoderProperties(properties);\n streamDecoder.decodeWithSink(input, 0, unpackSize, false);\n\n // Flush any remaining data in the OutWindow\n streamDecoder.flushOutWindow();\n\n callback(null);\n } catch (err) {\n callback(err as Error);\n }\n },\n });\n}\n"],"names":["allocBufferUnsafe","Transform","hasCompleteChunk","LzmaDecoder","parseLzma2DictionarySize","createLzma2Decoder","properties","length","Error","dictSize","decoder","setDictionarySize","propsSet","pending","finished","transform","chunk","_encoding","callback","input","Buffer","concat","offset","result","success","slice","chunkInfo","totalSize","type","dictReset","resetDictionary","dataOffset","headerSize","uncompData","uncompSize","push","feedUncompressed","lc","lp","pb","newProps","setLcLpPb","stateReset","resetProbabilities","useSolid","compData","compSize","streamDecoder","write","decodeWithSink","flushOutWindow","err","flush","createLzmaDecoder","unpackSize","setDecoderProperties","chunks","i","copy"],"mappings":"AAAA;;;;;;;;;;;;;;;;;CAiBC,GAED,SAASA,iBAAiB,EAAEC,SAAS,QAAQ,wBAAwB;AACrE,SAASC,gBAAgB,QAAQ,yBAAyB;AAC1D,SAASC,WAAW,QAAQ,yBAAyB;AACrD,SAASC,wBAAwB,QAAQ,cAAc;AAEvD;;;;;;;;CAQC,GACD,OAAO,SAASC,mBAAmBC,UAA+B;IAChE,IAAI,CAACA,cAAcA,WAAWC,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,MAAMC,WAAWL,yBAAyBE,UAAU,CAAC,EAAE;IAEvD,8DAA8D;IAC9D,MAAMI,UAAU,IAAIP;IACpBO,QAAQC,iBAAiB,CAACF;IAE1B,gCAAgC;IAChC,IAAIG,WAAW;IAEf,mCAAmC;IACnC,IAAIC,UAAyB;IAC7B,IAAIC,WAAW;IAEf,OAAO,IAAIb,UAAU;QACnBc,WAAW,SAAgDC,KAAa,EAAEC,SAAiB,EAAEC,QAAsC;YACjI,IAAIJ,UAAU;gBACZI,SAAS;gBACT;YACF;YAEA,4BAA4B;YAC5B,IAAIC;YACJ,IAAIN,WAAWA,QAAQN,MAAM,GAAG,GAAG;gBACjCY,QAAQC,OAAOC,MAAM,CAAC;oBAACR;oBAASG;iBAAM;gBACtCH,UAAU;YACZ,OAAO;gBACLM,QAAQH;YACV;YAEA,IAAIM,SAAS;YAEb,IAAI;gBACF,MAAOA,SAASH,MAAMZ,MAAM,IAAI,CAACO,SAAU;oBACzC,MAAMS,SAASrB,iBAAiBiB,OAAOG;oBAEvC,IAAI,CAACC,OAAOC,OAAO,EAAE;wBACnB,iBAAiB;wBACjBX,UAAUM,MAAMM,KAAK,CAACH;wBACtB;oBACF;oBAEA,MAAM,EAAEN,OAAOU,SAAS,EAAEC,SAAS,EAAE,GAAGJ;oBAExC,IAAIG,UAAUE,IAAI,KAAK,OAAO;wBAC5Bd,WAAW;wBACX;oBACF;oBAEA,0BAA0B;oBAC1B,IAAIY,UAAUG,SAAS,EAAE;wBACvBnB,QAAQoB,eAAe;oBACzB;oBAEA,MAAMC,aAAaT,SAASI,UAAUM,UAAU;oBAEhD,IAAIN,UAAUE,IAAI,KAAK,gBAAgB;wBACrC,MAAMK,aAAad,MAAMM,KAAK,CAACM,YAAYA,aAAaL,UAAUQ,UAAU;wBAC5E,IAAI,CAACC,IAAI,CAACF;wBAEV,kEAAkE;wBAClEvB,QAAQ0B,gBAAgB,CAACH;oBAC3B,OAAO;wBACL,wBAAwB;wBAExB,yDAAyD;wBACzD,IAAII;wBACJ,IAAIC;wBACJ,IAAIC;wBAEJ,kCAAkC;wBAClC,IAAIb,UAAUc,QAAQ,EAAE;4BACrB,CAAA,EAAEH,EAAE,EAAEC,EAAE,EAAEC,EAAE,EAAE,GAAGb,UAAUc,QAAQ,AAAD;4BACnC,IAAI,CAAC9B,QAAQ+B,SAAS,CAACJ,IAAIC,IAAIC,KAAK;gCAClC,MAAM,IAAI/B,MAAM,CAAC,4BAA4B,EAAE6B,GAAG,IAAI,EAAEC,GAAG,IAAI,EAAEC,IAAI;4BACvE;4BACA3B,WAAW;wBACb;wBAEA,IAAI,CAACA,UAAU;4BACb,MAAM,IAAIJ,MAAM;wBAClB;wBAEA,qCAAqC;wBACrC,IAAIkB,UAAUgB,UAAU,EAAE;4BACxBhC,QAAQiC,kBAAkB;wBAC5B;wBAEA,0GAA0G;wBAC1G,MAAMC,WAAW,CAAClB,UAAUgB,UAAU,IAAKhB,UAAUgB,UAAU,IAAI,CAAChB,UAAUG,SAAS;wBAEvF,MAAMgB,WAAW1B,MAAMM,KAAK,CAACM,YAAYA,aAAaL,UAAUoB,QAAQ;wBAExE,2DAA2D;wBAC3D,+CAA+C;wBAC/C,MAAMC,gBAAgB,IAAI5C,YAAY;4BACpC6C,OAAO,CAAChC,QAAkB,IAAI,CAACmB,IAAI,CAACnB;wBACtC;wBACA+B,cAAcpC,iBAAiB,CAACF;wBAChC,wCAAwC;wBACxCsC,cAAcN,SAAS,CAACJ,IAAIC,IAAIC;wBAEhC,2CAA2C;wBAC3CQ,cAAcE,cAAc,CAACJ,UAAU,GAAGnB,UAAUQ,UAAU,EAAEU;wBAEhE,4CAA4C;wBAC5CG,cAAcG,cAAc;oBAC9B;oBAEA5B,UAAUK;gBACZ;gBAEAT,SAAS;YACX,EAAE,OAAOiC,KAAK;gBACZjC,SAASiC;YACX;QACF;QAEAC,OAAO,SAAgDlC,QAAsC;YAC3F,IAAIL,WAAWA,QAAQN,MAAM,GAAG,KAAK,CAACO,UAAU;gBAC9CI,SAAS,IAAIV,MAAM;YACrB,OAAO;gBACLU,SAAS;YACX;QACF;IACF;AACF;AAEA;;;;;;;;;;;;;;;CAeC,GACD,OAAO,SAASmC,kBAAkB/C,UAA+B,EAAEgD,UAAkB;IACnF,MAAM5C,UAAU,IAAIP;IACpBO,QAAQ6C,oBAAoB,CAACjD;IAE7B,MAAMkD,SAAmB,EAAE;IAC3B,IAAI7B,YAAY;IAEhB,OAAO,IAAI1B,UAAU;QACnBc,WAAW,SAAgDC,KAAa,EAAEC,SAAiB,EAAEC,QAAsC;YACjIsC,OAAOrB,IAAI,CAACnB;YACZW,aAAaX,MAAMT,MAAM;YACzBW,SAAS;QACX;QAEAkC,OAAO,SAAgDlC,QAAsC;YAC3F,IAAI;gBACF,sEAAsE;gBACtE,8DAA8D;gBAC9D,MAAMC,QAAQnB,kBAAkB2B;gBAChC,IAAIL,SAAS;gBAEb,gDAAgD;gBAChD,IAAK,IAAImC,IAAI,GAAGA,IAAID,OAAOjD,MAAM,EAAEkD,IAAK;oBACtC,MAAMzC,QAAQwC,MAAM,CAACC,EAAE;oBACvBzC,MAAM0C,IAAI,CAACvC,OAAOG;oBAClBA,UAAUN,MAAMT,MAAM;gBACxB;gBAEA,2DAA2D;gBAC3D,+CAA+C;gBAC/C,MAAMwC,gBAAgB,IAAI5C,YAAY;oBACpC6C,OAAO,CAAChC,QAAkB,IAAI,CAACmB,IAAI,CAACnB;gBACtC;gBACA+B,cAAcQ,oBAAoB,CAACjD;gBACnCyC,cAAcE,cAAc,CAAC9B,OAAO,GAAGmC,YAAY;gBAEnD,4CAA4C;gBAC5CP,cAAcG,cAAc;gBAE5BhC,SAAS;YACX,EAAE,OAAOiC,KAAK;gBACZjC,SAASiC;YACX;QACF;IACF;AACF"}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Synchronous LZMA2 Decoder
|
|
3
|
+
*
|
|
4
|
+
* LZMA2 is a container format that wraps LZMA chunks with framing.
|
|
5
|
+
* Decodes LZMA2 data from a buffer.
|
|
6
|
+
*/
|
|
7
|
+
import { type OutputSink } from '../types.js';
|
|
8
|
+
/**
|
|
9
|
+
* Synchronous LZMA2 decoder
|
|
10
|
+
*/
|
|
11
|
+
export declare class Lzma2Decoder {
|
|
12
|
+
private lzmaDecoder;
|
|
13
|
+
private dictionarySize;
|
|
14
|
+
private propsSet;
|
|
15
|
+
constructor(properties: Buffer | Uint8Array, outputSink?: OutputSink);
|
|
16
|
+
/**
|
|
17
|
+
* Reset the dictionary (for stream boundaries)
|
|
18
|
+
*/
|
|
19
|
+
resetDictionary(): void;
|
|
20
|
+
/**
|
|
21
|
+
* Reset all probability models (for stream boundaries)
|
|
22
|
+
*/
|
|
23
|
+
resetProbabilities(): void;
|
|
24
|
+
/**
|
|
25
|
+
* Set LZMA properties
|
|
26
|
+
*/
|
|
27
|
+
setLcLpPb(lc: number, lp: number, pb: number): boolean;
|
|
28
|
+
/**
|
|
29
|
+
* Feed uncompressed data to the dictionary (for subsequent LZMA chunks)
|
|
30
|
+
*/
|
|
31
|
+
feedUncompressed(data: Buffer): void;
|
|
32
|
+
/**
|
|
33
|
+
* Decode raw LZMA data (used internally for LZMA2 chunks)
|
|
34
|
+
* @param input - LZMA compressed data
|
|
35
|
+
* @param offset - Input offset
|
|
36
|
+
* @param outSize - Expected output size
|
|
37
|
+
* @param solid - Use solid mode
|
|
38
|
+
* @returns Decompressed data
|
|
39
|
+
*/
|
|
40
|
+
decodeLzmaData(input: Buffer, offset: number, outSize: number, solid?: boolean): Buffer;
|
|
41
|
+
/**
|
|
42
|
+
* Decode LZMA2 data with streaming output
|
|
43
|
+
* @param input - LZMA2 compressed data
|
|
44
|
+
* @returns Total number of bytes written to sink
|
|
45
|
+
*/
|
|
46
|
+
decodeWithSink(input: Buffer): number;
|
|
47
|
+
/**
|
|
48
|
+
* Decode LZMA2 data
|
|
49
|
+
* @param input - LZMA2 compressed data
|
|
50
|
+
* @param unpackSize - Expected output size (optional, for pre-allocation)
|
|
51
|
+
* @returns Decompressed data
|
|
52
|
+
*/
|
|
53
|
+
decode(input: Buffer, unpackSize?: number): Buffer;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Decode LZMA2 data synchronously
|
|
57
|
+
* @param input - LZMA2 compressed data
|
|
58
|
+
* @param properties - 1-byte properties (dictionary size)
|
|
59
|
+
* @param unpackSize - Expected output size (optional, autodetects if not provided)
|
|
60
|
+
* @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)
|
|
61
|
+
* @returns Decompressed data (or bytes written if outputSink provided)
|
|
62
|
+
*/
|
|
63
|
+
export declare function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number, outputSink?: OutputSink): Buffer | number;
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Synchronous LZMA2 Decoder
|
|
3
|
+
*
|
|
4
|
+
* LZMA2 is a container format that wraps LZMA chunks with framing.
|
|
5
|
+
* Decodes LZMA2 data from a buffer.
|
|
6
|
+
*/ import { allocBufferUnsafe } from 'extract-base-iterator';
|
|
7
|
+
import { parseLzma2ChunkHeader } from '../Lzma2ChunkParser.js';
|
|
8
|
+
import { parseLzma2DictionarySize } from '../types.js';
|
|
9
|
+
import { LzmaDecoder } from './LzmaDecoder.js';
|
|
10
|
+
/**
|
|
11
|
+
* Synchronous LZMA2 decoder
|
|
12
|
+
*/ export class Lzma2Decoder {
|
|
13
|
+
/**
|
|
14
|
+
* Reset the dictionary (for stream boundaries)
|
|
15
|
+
*/ resetDictionary() {
|
|
16
|
+
this.lzmaDecoder.resetDictionary();
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Reset all probability models (for stream boundaries)
|
|
20
|
+
*/ resetProbabilities() {
|
|
21
|
+
this.lzmaDecoder.resetProbabilities();
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Set LZMA properties
|
|
25
|
+
*/ setLcLpPb(lc, lp, pb) {
|
|
26
|
+
return this.lzmaDecoder.setLcLpPb(lc, lp, pb);
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Feed uncompressed data to the dictionary (for subsequent LZMA chunks)
|
|
30
|
+
*/ feedUncompressed(data) {
|
|
31
|
+
this.lzmaDecoder.feedUncompressed(data);
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Decode raw LZMA data (used internally for LZMA2 chunks)
|
|
35
|
+
* @param input - LZMA compressed data
|
|
36
|
+
* @param offset - Input offset
|
|
37
|
+
* @param outSize - Expected output size
|
|
38
|
+
* @param solid - Use solid mode
|
|
39
|
+
* @returns Decompressed data
|
|
40
|
+
*/ decodeLzmaData(input, offset, outSize, solid = false) {
|
|
41
|
+
return this.lzmaDecoder.decode(input, offset, outSize, solid);
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Decode LZMA2 data with streaming output
|
|
45
|
+
* @param input - LZMA2 compressed data
|
|
46
|
+
* @returns Total number of bytes written to sink
|
|
47
|
+
*/ decodeWithSink(input) {
|
|
48
|
+
let totalBytes = 0;
|
|
49
|
+
let offset = 0;
|
|
50
|
+
while(offset < input.length){
|
|
51
|
+
const result = parseLzma2ChunkHeader(input, offset);
|
|
52
|
+
if (!result.success) {
|
|
53
|
+
throw new Error('Truncated LZMA2 chunk header');
|
|
54
|
+
}
|
|
55
|
+
const chunk = result.chunk;
|
|
56
|
+
if (chunk.type === 'end') {
|
|
57
|
+
break;
|
|
58
|
+
}
|
|
59
|
+
// Validate we have enough data for the chunk
|
|
60
|
+
const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;
|
|
61
|
+
if (offset + chunk.headerSize + dataSize > input.length) {
|
|
62
|
+
throw new Error(`Truncated LZMA2 ${chunk.type} data`);
|
|
63
|
+
}
|
|
64
|
+
// Handle dictionary reset
|
|
65
|
+
if (chunk.dictReset) {
|
|
66
|
+
this.lzmaDecoder.resetDictionary();
|
|
67
|
+
}
|
|
68
|
+
const dataOffset = offset + chunk.headerSize;
|
|
69
|
+
if (chunk.type === 'uncompressed') {
|
|
70
|
+
const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);
|
|
71
|
+
// Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it
|
|
72
|
+
this.lzmaDecoder.feedUncompressed(uncompData);
|
|
73
|
+
totalBytes += uncompData.length;
|
|
74
|
+
offset = dataOffset + chunk.uncompSize;
|
|
75
|
+
} else {
|
|
76
|
+
// LZMA compressed chunk
|
|
77
|
+
// Apply new properties if present
|
|
78
|
+
if (chunk.newProps) {
|
|
79
|
+
const { lc, lp, pb } = chunk.newProps;
|
|
80
|
+
if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {
|
|
81
|
+
throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);
|
|
82
|
+
}
|
|
83
|
+
this.propsSet = true;
|
|
84
|
+
}
|
|
85
|
+
if (!this.propsSet) {
|
|
86
|
+
throw new Error('LZMA chunk without properties');
|
|
87
|
+
}
|
|
88
|
+
// Reset probabilities if state reset
|
|
89
|
+
if (chunk.stateReset) {
|
|
90
|
+
this.lzmaDecoder.resetProbabilities();
|
|
91
|
+
}
|
|
92
|
+
// Determine solid mode
|
|
93
|
+
const useSolid = !chunk.stateReset || chunk.stateReset && !chunk.dictReset;
|
|
94
|
+
// Decode LZMA chunk directly to sink
|
|
95
|
+
totalBytes += this.lzmaDecoder.decodeWithSink(input, dataOffset, chunk.uncompSize, useSolid);
|
|
96
|
+
offset = dataOffset + chunk.compSize;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
// Flush any remaining data in the OutWindow
|
|
100
|
+
this.lzmaDecoder.flushOutWindow();
|
|
101
|
+
return totalBytes;
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Decode LZMA2 data
|
|
105
|
+
* @param input - LZMA2 compressed data
|
|
106
|
+
* @param unpackSize - Expected output size (optional, for pre-allocation)
|
|
107
|
+
* @returns Decompressed data
|
|
108
|
+
*/ decode(input, unpackSize) {
|
|
109
|
+
// Pre-allocate output buffer if size is known
|
|
110
|
+
let outputBuffer = null;
|
|
111
|
+
let outputPos = 0;
|
|
112
|
+
const outputChunks = [];
|
|
113
|
+
if (unpackSize && unpackSize > 0) {
|
|
114
|
+
outputBuffer = allocBufferUnsafe(unpackSize);
|
|
115
|
+
}
|
|
116
|
+
let offset = 0;
|
|
117
|
+
while(offset < input.length){
|
|
118
|
+
const result = parseLzma2ChunkHeader(input, offset);
|
|
119
|
+
if (!result.success) {
|
|
120
|
+
throw new Error('Truncated LZMA2 chunk header');
|
|
121
|
+
}
|
|
122
|
+
const chunk = result.chunk;
|
|
123
|
+
if (chunk.type === 'end') {
|
|
124
|
+
break;
|
|
125
|
+
}
|
|
126
|
+
// Validate we have enough data for the chunk
|
|
127
|
+
const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;
|
|
128
|
+
if (offset + chunk.headerSize + dataSize > input.length) {
|
|
129
|
+
throw new Error(`Truncated LZMA2 ${chunk.type} data`);
|
|
130
|
+
}
|
|
131
|
+
// Handle dictionary reset
|
|
132
|
+
if (chunk.dictReset) {
|
|
133
|
+
this.lzmaDecoder.resetDictionary();
|
|
134
|
+
}
|
|
135
|
+
const dataOffset = offset + chunk.headerSize;
|
|
136
|
+
if (chunk.type === 'uncompressed') {
|
|
137
|
+
const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);
|
|
138
|
+
// Copy to output
|
|
139
|
+
if (outputBuffer) {
|
|
140
|
+
uncompData.copy(outputBuffer, outputPos);
|
|
141
|
+
outputPos += uncompData.length;
|
|
142
|
+
} else {
|
|
143
|
+
outputChunks.push(uncompData);
|
|
144
|
+
}
|
|
145
|
+
// Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it
|
|
146
|
+
this.lzmaDecoder.feedUncompressed(uncompData);
|
|
147
|
+
offset = dataOffset + chunk.uncompSize;
|
|
148
|
+
} else {
|
|
149
|
+
// LZMA compressed chunk
|
|
150
|
+
// Apply new properties if present
|
|
151
|
+
if (chunk.newProps) {
|
|
152
|
+
const { lc, lp, pb } = chunk.newProps;
|
|
153
|
+
if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {
|
|
154
|
+
throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);
|
|
155
|
+
}
|
|
156
|
+
this.propsSet = true;
|
|
157
|
+
}
|
|
158
|
+
if (!this.propsSet) {
|
|
159
|
+
throw new Error('LZMA chunk without properties');
|
|
160
|
+
}
|
|
161
|
+
// Reset probabilities if state reset
|
|
162
|
+
if (chunk.stateReset) {
|
|
163
|
+
this.lzmaDecoder.resetProbabilities();
|
|
164
|
+
}
|
|
165
|
+
// Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)
|
|
166
|
+
const useSolid = !chunk.stateReset || chunk.stateReset && !chunk.dictReset;
|
|
167
|
+
// Decode LZMA chunk
|
|
168
|
+
const chunkData = input.slice(dataOffset, dataOffset + chunk.compSize);
|
|
169
|
+
const decoded = this.lzmaDecoder.decode(chunkData, 0, chunk.uncompSize, useSolid);
|
|
170
|
+
// Copy to output
|
|
171
|
+
if (outputBuffer) {
|
|
172
|
+
decoded.copy(outputBuffer, outputPos);
|
|
173
|
+
outputPos += decoded.length;
|
|
174
|
+
} else {
|
|
175
|
+
outputChunks.push(decoded);
|
|
176
|
+
}
|
|
177
|
+
offset = dataOffset + chunk.compSize;
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
// Return pre-allocated buffer or concatenated chunks
|
|
181
|
+
if (outputBuffer) {
|
|
182
|
+
return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;
|
|
183
|
+
}
|
|
184
|
+
return Buffer.concat(outputChunks);
|
|
185
|
+
}
|
|
186
|
+
constructor(properties, outputSink){
|
|
187
|
+
if (!properties || properties.length < 1) {
|
|
188
|
+
throw new Error('LZMA2 requires properties byte');
|
|
189
|
+
}
|
|
190
|
+
this.dictionarySize = parseLzma2DictionarySize(properties[0]);
|
|
191
|
+
this.lzmaDecoder = new LzmaDecoder(outputSink);
|
|
192
|
+
this.lzmaDecoder.setDictionarySize(this.dictionarySize);
|
|
193
|
+
this.propsSet = false;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Decode LZMA2 data synchronously
|
|
198
|
+
* @param input - LZMA2 compressed data
|
|
199
|
+
* @param properties - 1-byte properties (dictionary size)
|
|
200
|
+
* @param unpackSize - Expected output size (optional, autodetects if not provided)
|
|
201
|
+
* @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)
|
|
202
|
+
* @returns Decompressed data (or bytes written if outputSink provided)
|
|
203
|
+
*/ export function decodeLzma2(input, properties, unpackSize, outputSink) {
|
|
204
|
+
const decoder = new Lzma2Decoder(properties, outputSink);
|
|
205
|
+
if (outputSink) {
|
|
206
|
+
// Zero-copy mode: write to sink during decode
|
|
207
|
+
return decoder.decodeWithSink(input);
|
|
208
|
+
}
|
|
209
|
+
// Buffering mode: returns Buffer (zero-copy)
|
|
210
|
+
return decoder.decode(input, unpackSize);
|
|
211
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/sync/Lzma2Decoder.ts"],"sourcesContent":["/**\n * Synchronous LZMA2 Decoder\n *\n * LZMA2 is a container format that wraps LZMA chunks with framing.\n * Decodes LZMA2 data from a buffer.\n */\n\nimport { allocBufferUnsafe } from 'extract-base-iterator';\nimport { parseLzma2ChunkHeader } from '../Lzma2ChunkParser.ts';\nimport { type OutputSink, parseLzma2DictionarySize } from '../types.ts';\nimport { LzmaDecoder } from './LzmaDecoder.ts';\n\n/**\n * Synchronous LZMA2 decoder\n */\nexport class Lzma2Decoder {\n private lzmaDecoder: LzmaDecoder;\n private dictionarySize: number;\n private propsSet: boolean;\n\n constructor(properties: Buffer | Uint8Array, outputSink?: OutputSink) {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n this.dictionarySize = parseLzma2DictionarySize(properties[0]);\n this.lzmaDecoder = new LzmaDecoder(outputSink);\n this.lzmaDecoder.setDictionarySize(this.dictionarySize);\n this.propsSet = false;\n }\n\n /**\n * Reset the dictionary (for stream boundaries)\n */\n resetDictionary(): void {\n this.lzmaDecoder.resetDictionary();\n }\n\n /**\n * Reset all probability models (for stream boundaries)\n */\n resetProbabilities(): void {\n this.lzmaDecoder.resetProbabilities();\n }\n\n /**\n * Set LZMA properties\n */\n setLcLpPb(lc: number, lp: number, pb: number): boolean {\n return this.lzmaDecoder.setLcLpPb(lc, lp, pb);\n }\n\n /**\n * Feed uncompressed data to the dictionary (for subsequent LZMA chunks)\n */\n feedUncompressed(data: Buffer): void {\n this.lzmaDecoder.feedUncompressed(data);\n }\n\n /**\n * Decode raw LZMA data (used internally for LZMA2 chunks)\n * @param input - LZMA compressed data\n * @param offset - Input offset\n * @param outSize - Expected output size\n * @param solid - Use solid mode\n * @returns Decompressed data\n */\n decodeLzmaData(input: Buffer, offset: number, outSize: number, solid = false): Buffer {\n return this.lzmaDecoder.decode(input, offset, outSize, solid);\n }\n\n /**\n * Decode LZMA2 data with streaming output\n * @param input - LZMA2 compressed data\n * @returns Total number of bytes written to sink\n */\n decodeWithSink(input: Buffer): number {\n let totalBytes = 0;\n let offset = 0;\n\n while (offset < input.length) {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (!result.success) {\n throw new Error('Truncated LZMA2 chunk header');\n }\n\n const chunk = result.chunk;\n\n if (chunk.type === 'end') {\n break;\n }\n\n // Validate we have enough data for the chunk\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n if (offset + chunk.headerSize + dataSize > input.length) {\n throw new Error(`Truncated LZMA2 ${chunk.type} data`);\n }\n\n // Handle dictionary reset\n if (chunk.dictReset) {\n this.lzmaDecoder.resetDictionary();\n }\n\n const dataOffset = offset + chunk.headerSize;\n\n if (chunk.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);\n\n // Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it\n this.lzmaDecoder.feedUncompressed(uncompData);\n\n totalBytes += uncompData.length;\n offset = dataOffset + chunk.uncompSize;\n } else {\n // LZMA compressed chunk\n\n // Apply new properties if present\n if (chunk.newProps) {\n const { lc, lp, pb } = chunk.newProps;\n if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n this.propsSet = true;\n }\n\n if (!this.propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunk.stateReset) {\n this.lzmaDecoder.resetProbabilities();\n }\n\n // Determine solid mode\n const useSolid = !chunk.stateReset || (chunk.stateReset && !chunk.dictReset);\n\n // Decode LZMA chunk directly to sink\n totalBytes += this.lzmaDecoder.decodeWithSink(input, dataOffset, chunk.uncompSize, useSolid);\n\n offset = dataOffset + chunk.compSize;\n }\n }\n\n // Flush any remaining data in the OutWindow\n this.lzmaDecoder.flushOutWindow();\n\n return totalBytes;\n }\n\n /**\n * Decode LZMA2 data\n * @param input - LZMA2 compressed data\n * @param unpackSize - Expected output size (optional, for pre-allocation)\n * @returns Decompressed data\n */\n decode(input: Buffer, unpackSize?: number): Buffer {\n // Pre-allocate output buffer if size is known\n let outputBuffer: Buffer | null = null;\n let outputPos = 0;\n const outputChunks: Buffer[] = [];\n\n if (unpackSize && unpackSize > 0) {\n outputBuffer = allocBufferUnsafe(unpackSize);\n }\n\n let offset = 0;\n\n while (offset < input.length) {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (!result.success) {\n throw new Error('Truncated LZMA2 chunk header');\n }\n\n const chunk = result.chunk;\n\n if (chunk.type === 'end') {\n break;\n }\n\n // Validate we have enough data for the chunk\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n if (offset + chunk.headerSize + dataSize > input.length) {\n throw new Error(`Truncated LZMA2 ${chunk.type} data`);\n }\n\n // Handle dictionary reset\n if (chunk.dictReset) {\n this.lzmaDecoder.resetDictionary();\n }\n\n const dataOffset = offset + chunk.headerSize;\n\n if (chunk.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);\n\n // Copy to output\n if (outputBuffer) {\n uncompData.copy(outputBuffer, outputPos);\n outputPos += uncompData.length;\n } else {\n outputChunks.push(uncompData);\n }\n\n // Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it\n this.lzmaDecoder.feedUncompressed(uncompData);\n\n offset = dataOffset + chunk.uncompSize;\n } else {\n // LZMA compressed chunk\n\n // Apply new properties if present\n if (chunk.newProps) {\n const { lc, lp, pb } = chunk.newProps;\n if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n this.propsSet = true;\n }\n\n if (!this.propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunk.stateReset) {\n this.lzmaDecoder.resetProbabilities();\n }\n\n // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)\n const useSolid = !chunk.stateReset || (chunk.stateReset && !chunk.dictReset);\n\n // Decode LZMA chunk\n const chunkData = input.slice(dataOffset, dataOffset + chunk.compSize);\n const decoded = this.lzmaDecoder.decode(chunkData, 0, chunk.uncompSize, useSolid);\n\n // Copy to output\n if (outputBuffer) {\n decoded.copy(outputBuffer, outputPos);\n outputPos += decoded.length;\n } else {\n outputChunks.push(decoded);\n }\n\n offset = dataOffset + chunk.compSize;\n }\n }\n\n // Return pre-allocated buffer or concatenated chunks\n if (outputBuffer) {\n return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;\n }\n return Buffer.concat(outputChunks);\n }\n}\n\n/**\n * Decode LZMA2 data synchronously\n * @param input - LZMA2 compressed data\n * @param properties - 1-byte properties (dictionary size)\n * @param unpackSize - Expected output size (optional, autodetects if not provided)\n * @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)\n * @returns Decompressed data (or bytes written if outputSink provided)\n */\nexport function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number, outputSink?: OutputSink): Buffer | number {\n const decoder = new Lzma2Decoder(properties, outputSink);\n if (outputSink) {\n // Zero-copy mode: write to sink during decode\n return decoder.decodeWithSink(input);\n }\n // Buffering mode: returns Buffer (zero-copy)\n return decoder.decode(input, unpackSize);\n}\n"],"names":["allocBufferUnsafe","parseLzma2ChunkHeader","parseLzma2DictionarySize","LzmaDecoder","Lzma2Decoder","resetDictionary","lzmaDecoder","resetProbabilities","setLcLpPb","lc","lp","pb","feedUncompressed","data","decodeLzmaData","input","offset","outSize","solid","decode","decodeWithSink","totalBytes","length","result","success","Error","chunk","type","dataSize","uncompSize","compSize","headerSize","dictReset","dataOffset","uncompData","slice","newProps","propsSet","stateReset","useSolid","flushOutWindow","unpackSize","outputBuffer","outputPos","outputChunks","copy","push","chunkData","decoded","Buffer","concat","properties","outputSink","dictionarySize","setDictionarySize","decodeLzma2","decoder"],"mappings":"AAAA;;;;;CAKC,GAED,SAASA,iBAAiB,QAAQ,wBAAwB;AAC1D,SAASC,qBAAqB,QAAQ,yBAAyB;AAC/D,SAA0BC,wBAAwB,QAAQ,cAAc;AACxE,SAASC,WAAW,QAAQ,mBAAmB;AAE/C;;CAEC,GACD,OAAO,MAAMC;IAgBX;;GAEC,GACDC,kBAAwB;QACtB,IAAI,CAACC,WAAW,CAACD,eAAe;IAClC;IAEA;;GAEC,GACDE,qBAA2B;QACzB,IAAI,CAACD,WAAW,CAACC,kBAAkB;IACrC;IAEA;;GAEC,GACDC,UAAUC,EAAU,EAAEC,EAAU,EAAEC,EAAU,EAAW;QACrD,OAAO,IAAI,CAACL,WAAW,CAACE,SAAS,CAACC,IAAIC,IAAIC;IAC5C;IAEA;;GAEC,GACDC,iBAAiBC,IAAY,EAAQ;QACnC,IAAI,CAACP,WAAW,CAACM,gBAAgB,CAACC;IACpC;IAEA;;;;;;;GAOC,GACDC,eAAeC,KAAa,EAAEC,MAAc,EAAEC,OAAe,EAAEC,QAAQ,KAAK,EAAU;QACpF,OAAO,IAAI,CAACZ,WAAW,CAACa,MAAM,CAACJ,OAAOC,QAAQC,SAASC;IACzD;IAEA;;;;GAIC,GACDE,eAAeL,KAAa,EAAU;QACpC,IAAIM,aAAa;QACjB,IAAIL,SAAS;QAEb,MAAOA,SAASD,MAAMO,MAAM,CAAE;YAC5B,MAAMC,SAAStB,sBAAsBc,OAAOC;YAE5C,IAAI,CAACO,OAAOC,OAAO,EAAE;gBACnB,MAAM,IAAIC,MAAM;YAClB;YAEA,MAAMC,QAAQH,OAAOG,KAAK;YAE1B,IAAIA,MAAMC,IAAI,KAAK,OAAO;gBACxB;YACF;YAEA,6CAA6C;YAC7C,MAAMC,WAAWF,MAAMC,IAAI,KAAK,iBAAiBD,MAAMG,UAAU,GAAGH,MAAMI,QAAQ;YAClF,IAAId,SAASU,MAAMK,UAAU,GAAGH,WAAWb,MAAMO,MAAM,EAAE;gBACvD,MAAM,IAAIG,MAAM,CAAC,gBAAgB,EAAEC,MAAMC,IAAI,CAAC,KAAK,CAAC;YACtD;YAEA,0BAA0B;YAC1B,IAAID,MAAMM,SAAS,EAAE;gBACnB,IAAI,CAAC1B,WAAW,CAACD,eAAe;YAClC;YAEA,MAAM4B,aAAajB,SAASU,MAAMK,UAAU;YAE5C,IAAIL,MAAMC,IAAI,KAAK,gBAAgB;gBACjC,MAAMO,aAAanB,MAAMoB,KAAK,CAACF,YAAYA,aAAaP,MAAMG,UAAU;gBAExE,kFAAkF;gBAClF,IAAI,CAACvB,WAAW,CAACM,gBAAgB,CAACsB;gBAElCb,cAAca,WAAWZ,MAAM;gBAC/BN,SAASiB,aAAaP,MAAMG,UAAU;YACxC,OAAO;gBACL,wBAAwB;gBAExB,kCAAkC;gBAClC,IAAIH,MAAMU,QAAQ,EAAE;oBAClB,MAAM,EAAE3B,EAAE,EAAEC,EAAE,EAAEC,EAAE,EAAE,GAAGe,MAAMU,QAAQ;oBACrC,IAAI,CAAC,IAAI,CAAC9B,WAAW,CAACE,SAAS,CAACC,IAAIC,IAAIC,KAAK;wBAC3C,MAAM,IAAIc,MAAM,CAAC,4BAA4B,EAAEhB,GAAG,IAAI,EAAEC,GAAG,IAAI,EAAEC,IAAI;oBACvE;oBACA,IAAI,CAAC0B,QAAQ,GAAG;gBAClB;gBAEA,IAAI,CAAC,IAAI,CAACA,QAAQ,EAAE;oBAClB,MAAM,IAAIZ,MAAM;gBAClB;gBAEA,qCAAqC;gBACrC,IAAIC,MAAMY,UAAU,EAAE;oBACpB,IAAI,CAAChC,WAAW,CAACC,kBAAkB;gBACrC;gBAEA,uBAAuB;gBACvB,MAAMgC,WAAW,CAACb,MAAMY,UAAU,IAAKZ,MAAMY,UAAU,IAAI,CAACZ,MAAMM,SAAS;gBAE3E,qCAAqC;gBACrCX,cAAc,IAAI,CAACf,WAAW,CAACc,cAAc,CAACL,OAAOkB,YAAYP,MAAMG,UAAU,EAAEU;gBAEnFvB,SAASiB,aAAaP,MAAMI,QAAQ;YACtC;QACF;QAEA,4CAA4C;QAC5C,IAAI,CAACxB,WAAW,CAACkC,cAAc;QAE/B,OAAOnB;IACT;IAEA;;;;;GAKC,GACDF,OAAOJ,KAAa,EAAE0B,UAAmB,EAAU;QACjD,8CAA8C;QAC9C,IAAIC,eAA8B;QAClC,IAAIC,YAAY;QAChB,MAAMC,eAAyB,EAAE;QAEjC,IAAIH,cAAcA,aAAa,GAAG;YAChCC,eAAe1C,kBAAkByC;QACnC;QAEA,IAAIzB,SAAS;QAEb,MAAOA,SAASD,MAAMO,MAAM,CAAE;YAC5B,MAAMC,SAAStB,sBAAsBc,OAAOC;YAE5C,IAAI,CAACO,OAAOC,OAAO,EAAE;gBACnB,MAAM,IAAIC,MAAM;YAClB;YAEA,MAAMC,QAAQH,OAAOG,KAAK;YAE1B,IAAIA,MAAMC,IAAI,KAAK,OAAO;gBACxB;YACF;YAEA,6CAA6C;YAC7C,MAAMC,WAAWF,MAAMC,IAAI,KAAK,iBAAiBD,MAAMG,UAAU,GAAGH,MAAMI,QAAQ;YAClF,IAAId,SAASU,MAAMK,UAAU,GAAGH,WAAWb,MAAMO,MAAM,EAAE;gBACvD,MAAM,IAAIG,MAAM,CAAC,gBAAgB,EAAEC,MAAMC,IAAI,CAAC,KAAK,CAAC;YACtD;YAEA,0BAA0B;YAC1B,IAAID,MAAMM,SAAS,EAAE;gBACnB,IAAI,CAAC1B,WAAW,CAACD,eAAe;YAClC;YAEA,MAAM4B,aAAajB,SAASU,MAAMK,UAAU;YAE5C,IAAIL,MAAMC,IAAI,KAAK,gBAAgB;gBACjC,MAAMO,aAAanB,MAAMoB,KAAK,CAACF,YAAYA,aAAaP,MAAMG,UAAU;gBAExE,iBAAiB;gBACjB,IAAIa,cAAc;oBAChBR,WAAWW,IAAI,CAACH,cAAcC;oBAC9BA,aAAaT,WAAWZ,MAAM;gBAChC,OAAO;oBACLsB,aAAaE,IAAI,CAACZ;gBACpB;gBAEA,kFAAkF;gBAClF,IAAI,CAAC5B,WAAW,CAACM,gBAAgB,CAACsB;gBAElClB,SAASiB,aAAaP,MAAMG,UAAU;YACxC,OAAO;gBACL,wBAAwB;gBAExB,kCAAkC;gBAClC,IAAIH,MAAMU,QAAQ,EAAE;oBAClB,MAAM,EAAE3B,EAAE,EAAEC,EAAE,EAAEC,EAAE,EAAE,GAAGe,MAAMU,QAAQ;oBACrC,IAAI,CAAC,IAAI,CAAC9B,WAAW,CAACE,SAAS,CAACC,IAAIC,IAAIC,KAAK;wBAC3C,MAAM,IAAIc,MAAM,CAAC,4BAA4B,EAAEhB,GAAG,IAAI,EAAEC,GAAG,IAAI,EAAEC,IAAI;oBACvE;oBACA,IAAI,CAAC0B,QAAQ,GAAG;gBAClB;gBAEA,IAAI,CAAC,IAAI,CAACA,QAAQ,EAAE;oBAClB,MAAM,IAAIZ,MAAM;gBAClB;gBAEA,qCAAqC;gBACrC,IAAIC,MAAMY,UAAU,EAAE;oBACpB,IAAI,CAAChC,WAAW,CAACC,kBAAkB;gBACrC;gBAEA,0GAA0G;gBAC1G,MAAMgC,WAAW,CAACb,MAAMY,UAAU,IAAKZ,MAAMY,UAAU,IAAI,CAACZ,MAAMM,SAAS;gBAE3E,oBAAoB;gBACpB,MAAMe,YAAYhC,MAAMoB,KAAK,CAACF,YAAYA,aAAaP,MAAMI,QAAQ;gBACrE,MAAMkB,UAAU,IAAI,CAAC1C,WAAW,CAACa,MAAM,CAAC4B,WAAW,GAAGrB,MAAMG,UAAU,EAAEU;gBAExE,iBAAiB;gBACjB,IAAIG,cAAc;oBAChBM,QAAQH,IAAI,CAACH,cAAcC;oBAC3BA,aAAaK,QAAQ1B,MAAM;gBAC7B,OAAO;oBACLsB,aAAaE,IAAI,CAACE;gBACpB;gBAEAhC,SAASiB,aAAaP,MAAMI,QAAQ;YACtC;QACF;QAEA,qDAAqD;QACrD,IAAIY,cAAc;YAChB,OAAOC,YAAYD,aAAapB,MAAM,GAAGoB,aAAaP,KAAK,CAAC,GAAGQ,aAAaD;QAC9E;QACA,OAAOO,OAAOC,MAAM,CAACN;IACvB;IA3OA,YAAYO,UAA+B,EAAEC,UAAuB,CAAE;QACpE,IAAI,CAACD,cAAcA,WAAW7B,MAAM,GAAG,GAAG;YACxC,MAAM,IAAIG,MAAM;QAClB;QAEA,IAAI,CAAC4B,cAAc,GAAGnD,yBAAyBiD,UAAU,CAAC,EAAE;QAC5D,IAAI,CAAC7C,WAAW,GAAG,IAAIH,YAAYiD;QACnC,IAAI,CAAC9C,WAAW,CAACgD,iBAAiB,CAAC,IAAI,CAACD,cAAc;QACtD,IAAI,CAAChB,QAAQ,GAAG;IAClB;AAmOF;AAEA;;;;;;;CAOC,GACD,OAAO,SAASkB,YAAYxC,KAAa,EAAEoC,UAA+B,EAAEV,UAAmB,EAAEW,UAAuB;IACtH,MAAMI,UAAU,IAAIpD,aAAa+C,YAAYC;IAC7C,IAAIA,YAAY;QACd,8CAA8C;QAC9C,OAAOI,QAAQpC,cAAc,CAACL;IAChC;IACA,6CAA6C;IAC7C,OAAOyC,QAAQrC,MAAM,CAACJ,OAAO0B;AAC/B"}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Synchronous LZMA1 Decoder
|
|
3
|
+
*
|
|
4
|
+
* Decodes LZMA1 compressed data from a buffer.
|
|
5
|
+
* All operations are synchronous.
|
|
6
|
+
*/
|
|
7
|
+
import { type OutputSink } from '../types.js';
|
|
8
|
+
/**
|
|
9
|
+
* Synchronous LZMA1 decoder
|
|
10
|
+
*/
|
|
11
|
+
export declare class LzmaDecoder {
|
|
12
|
+
private outWindow;
|
|
13
|
+
private rangeDecoder;
|
|
14
|
+
private isMatchDecoders;
|
|
15
|
+
private isRepDecoders;
|
|
16
|
+
private isRepG0Decoders;
|
|
17
|
+
private isRepG1Decoders;
|
|
18
|
+
private isRepG2Decoders;
|
|
19
|
+
private isRep0LongDecoders;
|
|
20
|
+
private posSlotDecoder;
|
|
21
|
+
private posDecoders;
|
|
22
|
+
private posAlignDecoder;
|
|
23
|
+
private lenDecoder;
|
|
24
|
+
private repLenDecoder;
|
|
25
|
+
private literalDecoder;
|
|
26
|
+
private dictionarySize;
|
|
27
|
+
private dictionarySizeCheck;
|
|
28
|
+
private posStateMask;
|
|
29
|
+
private state;
|
|
30
|
+
private rep0;
|
|
31
|
+
private rep1;
|
|
32
|
+
private rep2;
|
|
33
|
+
private rep3;
|
|
34
|
+
private prevByte;
|
|
35
|
+
private totalPos;
|
|
36
|
+
constructor(outputSink?: OutputSink);
|
|
37
|
+
/**
|
|
38
|
+
* Set dictionary size
|
|
39
|
+
*/
|
|
40
|
+
setDictionarySize(dictionarySize: number): boolean;
|
|
41
|
+
/**
|
|
42
|
+
* Set lc, lp, pb properties
|
|
43
|
+
*/
|
|
44
|
+
setLcLpPb(lc: number, lp: number, pb: number): boolean;
|
|
45
|
+
/**
|
|
46
|
+
* Set decoder properties from 5-byte buffer
|
|
47
|
+
*/
|
|
48
|
+
setDecoderProperties(properties: Buffer | Uint8Array): boolean;
|
|
49
|
+
/**
|
|
50
|
+
* Initialize probability tables
|
|
51
|
+
*/
|
|
52
|
+
private initProbabilities;
|
|
53
|
+
/**
|
|
54
|
+
* Reset probabilities only (for LZMA2 state reset)
|
|
55
|
+
*/
|
|
56
|
+
resetProbabilities(): void;
|
|
57
|
+
/**
|
|
58
|
+
* Reset dictionary position (for LZMA2 dictionary reset)
|
|
59
|
+
*/
|
|
60
|
+
resetDictionary(): void;
|
|
61
|
+
/**
|
|
62
|
+
* Feed uncompressed data into the dictionary (for LZMA2 uncompressed chunks)
|
|
63
|
+
* This updates the sliding window so subsequent LZMA chunks can reference this data.
|
|
64
|
+
*/
|
|
65
|
+
feedUncompressed(data: Buffer): void;
|
|
66
|
+
/**
|
|
67
|
+
* Flush any remaining data in the OutWindow to the sink
|
|
68
|
+
*/
|
|
69
|
+
flushOutWindow(): void;
|
|
70
|
+
/**
|
|
71
|
+
* Decode LZMA data with streaming output (no buffer accumulation)
|
|
72
|
+
* @param input - Compressed input buffer
|
|
73
|
+
* @param inputOffset - Offset into input buffer
|
|
74
|
+
* @param outSize - Expected output size
|
|
75
|
+
* @param solid - If true, preserve state from previous decode
|
|
76
|
+
* @returns Number of bytes written to sink
|
|
77
|
+
*/
|
|
78
|
+
decodeWithSink(input: Buffer, inputOffset: number, outSize: number, solid?: boolean): number;
|
|
79
|
+
/**
|
|
80
|
+
* Decode LZMA data
|
|
81
|
+
* @param input - Compressed input buffer
|
|
82
|
+
* @param inputOffset - Offset into input buffer
|
|
83
|
+
* @param outSize - Expected output size
|
|
84
|
+
* @param solid - If true, preserve state from previous decode
|
|
85
|
+
* @returns Decompressed data
|
|
86
|
+
*/
|
|
87
|
+
decode(input: Buffer, inputOffset: number, outSize: number, solid?: boolean): Buffer;
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Decode LZMA1 data synchronously
|
|
91
|
+
* @param input - Compressed data (without 5-byte properties header)
|
|
92
|
+
* @param properties - 5-byte LZMA properties
|
|
93
|
+
* @param outSize - Expected output size
|
|
94
|
+
* @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)
|
|
95
|
+
* @returns Decompressed data (or bytes written if outputSink provided)
|
|
96
|
+
*/
|
|
97
|
+
export declare function decodeLzma(input: Buffer, properties: Buffer | Uint8Array, outSize: number, outputSink?: OutputSink): Buffer | number;
|