7z-iterator 1.1.2 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/FileEntry.d.cts +12 -4
- package/dist/cjs/FileEntry.d.ts +12 -4
- package/dist/cjs/FileEntry.js +52 -24
- package/dist/cjs/FileEntry.js.map +1 -1
- package/dist/cjs/SevenZipIterator.d.cts +25 -2
- package/dist/cjs/SevenZipIterator.d.ts +25 -2
- package/dist/cjs/SevenZipIterator.js +68 -21
- package/dist/cjs/SevenZipIterator.js.map +1 -1
- package/dist/cjs/index.d.cts +1 -2
- package/dist/cjs/index.d.ts +1 -2
- package/dist/cjs/index.js +19 -3
- package/dist/cjs/index.js.map +1 -1
- package/dist/cjs/lib/streamToSource.d.cts +8 -11
- package/dist/cjs/lib/streamToSource.d.ts +8 -11
- package/dist/cjs/lib/streamToSource.js +21 -67
- package/dist/cjs/lib/streamToSource.js.map +1 -1
- package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/cjs/lzma/index.d.cts +13 -0
- package/dist/cjs/lzma/index.d.ts +13 -0
- package/dist/cjs/lzma/index.js +63 -0
- package/dist/cjs/lzma/index.js.map +1 -0
- package/dist/cjs/lzma/stream/transforms.d.cts +38 -0
- package/dist/cjs/lzma/stream/transforms.d.ts +38 -0
- package/dist/cjs/lzma/stream/transforms.js +149 -0
- package/dist/cjs/lzma/stream/transforms.js.map +1 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +30 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +30 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js +135 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +82 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +82 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js +440 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/cjs/lzma/types.d.cts +110 -0
- package/dist/cjs/lzma/types.d.ts +110 -0
- package/dist/cjs/lzma/types.js +264 -0
- package/dist/cjs/lzma/types.js.map +1 -0
- package/dist/cjs/nextEntry.js +24 -26
- package/dist/cjs/nextEntry.js.map +1 -1
- package/dist/cjs/sevenz/ArchiveSource.d.cts +16 -0
- package/dist/cjs/sevenz/ArchiveSource.d.ts +16 -0
- package/dist/cjs/sevenz/ArchiveSource.js +69 -0
- package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
- package/dist/cjs/sevenz/FolderStreamSplitter.d.cts +101 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.d.ts +101 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.js +229 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -0
- package/dist/cjs/sevenz/SevenZipParser.d.cts +71 -10
- package/dist/cjs/sevenz/SevenZipParser.d.ts +71 -10
- package/dist/cjs/sevenz/SevenZipParser.js +574 -203
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BZip2.js +2 -1
- package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Bcj.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/Bcj.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/Bcj.js +102 -8
- package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BcjArm.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/BcjArm.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/BcjArm.js +51 -9
- package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Copy.d.cts +2 -4
- package/dist/cjs/sevenz/codecs/Copy.d.ts +2 -4
- package/dist/cjs/sevenz/codecs/Copy.js +2 -15
- package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Deflate.d.cts +6 -4
- package/dist/cjs/sevenz/codecs/Deflate.d.ts +6 -4
- package/dist/cjs/sevenz/codecs/Deflate.js +4 -9
- package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Delta.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/Delta.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/Delta.js +29 -10
- package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma.d.cts +5 -2
- package/dist/cjs/sevenz/codecs/Lzma.d.ts +5 -2
- package/dist/cjs/sevenz/codecs/Lzma.js +13 -28
- package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma2.d.cts +9 -3
- package/dist/cjs/sevenz/codecs/Lzma2.d.ts +9 -3
- package/dist/cjs/sevenz/codecs/Lzma2.js +17 -198
- package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -2
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +2 -15
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
- package/dist/cjs/types.d.cts +2 -16
- package/dist/cjs/types.d.ts +2 -16
- package/dist/cjs/types.js.map +1 -1
- package/dist/esm/FileEntry.d.ts +12 -4
- package/dist/esm/FileEntry.js +52 -26
- package/dist/esm/FileEntry.js.map +1 -1
- package/dist/esm/SevenZipIterator.d.ts +25 -2
- package/dist/esm/SevenZipIterator.js +69 -22
- package/dist/esm/SevenZipIterator.js.map +1 -1
- package/dist/esm/index.d.ts +1 -2
- package/dist/esm/index.js +2 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/streamToSource.d.ts +8 -11
- package/dist/esm/lib/streamToSource.js +22 -68
- package/dist/esm/lib/streamToSource.js.map +1 -1
- package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/esm/lzma/index.d.ts +13 -0
- package/dist/esm/lzma/index.js +15 -0
- package/dist/esm/lzma/index.js.map +1 -0
- package/dist/esm/lzma/stream/transforms.d.ts +38 -0
- package/dist/esm/lzma/stream/transforms.js +150 -0
- package/dist/esm/lzma/stream/transforms.js.map +1 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +30 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js +115 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/esm/lzma/sync/LzmaDecoder.d.ts +82 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js +403 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
- package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/esm/lzma/types.d.ts +110 -0
- package/dist/esm/lzma/types.js +154 -0
- package/dist/esm/lzma/types.js.map +1 -0
- package/dist/esm/nextEntry.js +24 -26
- package/dist/esm/nextEntry.js.map +1 -1
- package/dist/esm/sevenz/ArchiveSource.d.ts +16 -0
- package/dist/esm/sevenz/ArchiveSource.js +70 -1
- package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
- package/dist/esm/sevenz/FolderStreamSplitter.d.ts +101 -0
- package/dist/esm/sevenz/FolderStreamSplitter.js +207 -0
- package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -0
- package/dist/esm/sevenz/SevenZipParser.d.ts +71 -10
- package/dist/esm/sevenz/SevenZipParser.js +414 -198
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/BZip2.js +2 -1
- package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
- package/dist/esm/sevenz/codecs/Bcj.d.ts +5 -4
- package/dist/esm/sevenz/codecs/Bcj.js +106 -6
- package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
- package/dist/esm/sevenz/codecs/BcjArm.d.ts +5 -4
- package/dist/esm/sevenz/codecs/BcjArm.js +55 -7
- package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
- package/dist/esm/sevenz/codecs/Copy.d.ts +2 -4
- package/dist/esm/sevenz/codecs/Copy.js +1 -9
- package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
- package/dist/esm/sevenz/codecs/Deflate.d.ts +6 -4
- package/dist/esm/sevenz/codecs/Deflate.js +9 -7
- package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
- package/dist/esm/sevenz/codecs/Delta.d.ts +5 -4
- package/dist/esm/sevenz/codecs/Delta.js +33 -8
- package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma.d.ts +5 -2
- package/dist/esm/sevenz/codecs/Lzma.js +17 -24
- package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma2.d.ts +9 -3
- package/dist/esm/sevenz/codecs/Lzma2.js +15 -196
- package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
- package/dist/esm/sevenz/codecs/createBufferingDecoder.js +1 -9
- package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
- package/dist/esm/types.d.ts +2 -16
- package/dist/esm/types.js.map +1 -1
- package/package.json +3 -3
- package/assets/lzma-purejs/LICENSE +0 -11
- package/assets/lzma-purejs/index.js +0 -19
- package/assets/lzma-purejs/lib/LZ/OutWindow.js +0 -78
- package/assets/lzma-purejs/lib/LZ.js +0 -6
- package/assets/lzma-purejs/lib/LZMA/Base.js +0 -48
- package/assets/lzma-purejs/lib/LZMA/Decoder.js +0 -328
- package/assets/lzma-purejs/lib/LZMA.js +0 -6
- package/assets/lzma-purejs/lib/RangeCoder/BitTreeDecoder.js +0 -41
- package/assets/lzma-purejs/lib/RangeCoder/Decoder.js +0 -58
- package/assets/lzma-purejs/lib/RangeCoder/Encoder.js +0 -106
- package/assets/lzma-purejs/lib/RangeCoder.js +0 -10
- package/assets/lzma-purejs/lib/Stream.js +0 -41
- package/assets/lzma-purejs/lib/Util.js +0 -114
- package/assets/lzma-purejs/lib/makeBuffer.js +0 -25
- package/assets/lzma-purejs/package-lock.json +0 -13
- package/assets/lzma-purejs/package.json +0 -8
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LZMA Transform Stream Wrappers
|
|
3
|
+
*
|
|
4
|
+
* Provides Transform streams for LZMA1 and LZMA2 decompression.
|
|
5
|
+
*
|
|
6
|
+
* LZMA2 streaming works by buffering until a complete chunk is available,
|
|
7
|
+
* then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max
|
|
8
|
+
* uncompressed), so memory usage is predictable and bounded.
|
|
9
|
+
*
|
|
10
|
+
* True byte-by-byte async LZMA streaming would require rewriting the entire
|
|
11
|
+
* decoder with continuation-passing style, which is complex and not worth
|
|
12
|
+
* the effort given LZMA2's chunked format.
|
|
13
|
+
*/
|
|
14
|
+
import { Transform } from 'extract-base-iterator';
|
|
15
|
+
/**
|
|
16
|
+
* Create an LZMA2 decoder Transform stream
|
|
17
|
+
*
|
|
18
|
+
* This is a streaming decoder that processes LZMA2 chunks incrementally.
|
|
19
|
+
* Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).
|
|
20
|
+
*
|
|
21
|
+
* @param properties - 1-byte LZMA2 properties (dictionary size)
|
|
22
|
+
* @returns Transform stream that decompresses LZMA2 data
|
|
23
|
+
*/
|
|
24
|
+
export declare function createLzma2Decoder(properties: Buffer | Uint8Array): InstanceType<typeof Transform>;
|
|
25
|
+
/**
|
|
26
|
+
* Create an LZMA1 decoder Transform stream
|
|
27
|
+
*
|
|
28
|
+
* Note: LZMA1 has no chunk boundaries, so this requires knowing the
|
|
29
|
+
* uncompressed size upfront. The stream buffers all input, then
|
|
30
|
+
* decompresses when complete.
|
|
31
|
+
*
|
|
32
|
+
* For true streaming, use LZMA2 which has built-in chunking.
|
|
33
|
+
*
|
|
34
|
+
* @param properties - 5-byte LZMA properties
|
|
35
|
+
* @param unpackSize - Expected uncompressed size
|
|
36
|
+
* @returns Transform stream that decompresses LZMA1 data
|
|
37
|
+
*/
|
|
38
|
+
export declare function createLzmaDecoder(properties: Buffer | Uint8Array, unpackSize: number): InstanceType<typeof Transform>;
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LZMA Transform Stream Wrappers
|
|
3
|
+
*
|
|
4
|
+
* Provides Transform streams for LZMA1 and LZMA2 decompression.
|
|
5
|
+
*
|
|
6
|
+
* LZMA2 streaming works by buffering until a complete chunk is available,
|
|
7
|
+
* then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max
|
|
8
|
+
* uncompressed), so memory usage is predictable and bounded.
|
|
9
|
+
*
|
|
10
|
+
* True byte-by-byte async LZMA streaming would require rewriting the entire
|
|
11
|
+
* decoder with continuation-passing style, which is complex and not worth
|
|
12
|
+
* the effort given LZMA2's chunked format.
|
|
13
|
+
*/ import { Transform } from 'extract-base-iterator';
|
|
14
|
+
import { hasCompleteChunk } from '../Lzma2ChunkParser.js';
|
|
15
|
+
import { LzmaDecoder } from '../sync/LzmaDecoder.js';
|
|
16
|
+
import { parseLzma2DictionarySize } from '../types.js';
|
|
17
|
+
/**
|
|
18
|
+
* Create an LZMA2 decoder Transform stream
|
|
19
|
+
*
|
|
20
|
+
* This is a streaming decoder that processes LZMA2 chunks incrementally.
|
|
21
|
+
* Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).
|
|
22
|
+
*
|
|
23
|
+
* @param properties - 1-byte LZMA2 properties (dictionary size)
|
|
24
|
+
* @returns Transform stream that decompresses LZMA2 data
|
|
25
|
+
*/ export function createLzma2Decoder(properties) {
|
|
26
|
+
if (!properties || properties.length < 1) {
|
|
27
|
+
throw new Error('LZMA2 requires properties byte');
|
|
28
|
+
}
|
|
29
|
+
const dictSize = parseLzma2DictionarySize(properties[0]);
|
|
30
|
+
// LZMA decoder instance - reused across chunks for solid mode
|
|
31
|
+
const decoder = new LzmaDecoder();
|
|
32
|
+
decoder.setDictionarySize(dictSize);
|
|
33
|
+
// Track current LZMA properties
|
|
34
|
+
let propsSet = false;
|
|
35
|
+
// Buffer for incomplete chunk data
|
|
36
|
+
let pending = null;
|
|
37
|
+
let finished = false;
|
|
38
|
+
return new Transform({
|
|
39
|
+
transform: function(chunk, _encoding, callback) {
|
|
40
|
+
if (finished) {
|
|
41
|
+
callback(null);
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
// Combine with pending data
|
|
45
|
+
let input;
|
|
46
|
+
if (pending && pending.length > 0) {
|
|
47
|
+
input = Buffer.concat([
|
|
48
|
+
pending,
|
|
49
|
+
chunk
|
|
50
|
+
]);
|
|
51
|
+
pending = null;
|
|
52
|
+
} else {
|
|
53
|
+
input = chunk;
|
|
54
|
+
}
|
|
55
|
+
let offset = 0;
|
|
56
|
+
try {
|
|
57
|
+
while(offset < input.length && !finished){
|
|
58
|
+
const result = hasCompleteChunk(input, offset);
|
|
59
|
+
if (!result.success) {
|
|
60
|
+
// Need more data
|
|
61
|
+
pending = input.slice(offset);
|
|
62
|
+
break;
|
|
63
|
+
}
|
|
64
|
+
const { chunk: chunkInfo, totalSize } = result;
|
|
65
|
+
if (chunkInfo.type === 'end') {
|
|
66
|
+
finished = true;
|
|
67
|
+
break;
|
|
68
|
+
}
|
|
69
|
+
// Handle dictionary reset
|
|
70
|
+
if (chunkInfo.dictReset) {
|
|
71
|
+
decoder.resetDictionary();
|
|
72
|
+
}
|
|
73
|
+
const dataOffset = offset + chunkInfo.headerSize;
|
|
74
|
+
if (chunkInfo.type === 'uncompressed') {
|
|
75
|
+
const uncompData = input.slice(dataOffset, dataOffset + chunkInfo.uncompSize);
|
|
76
|
+
this.push(uncompData);
|
|
77
|
+
// Feed uncompressed data to dictionary for subsequent LZMA chunks
|
|
78
|
+
decoder.feedUncompressed(uncompData);
|
|
79
|
+
} else {
|
|
80
|
+
// LZMA compressed chunk
|
|
81
|
+
// Apply new properties if present
|
|
82
|
+
if (chunkInfo.newProps) {
|
|
83
|
+
const { lc, lp, pb } = chunkInfo.newProps;
|
|
84
|
+
if (!decoder.setLcLpPb(lc, lp, pb)) {
|
|
85
|
+
throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);
|
|
86
|
+
}
|
|
87
|
+
propsSet = true;
|
|
88
|
+
}
|
|
89
|
+
if (!propsSet) {
|
|
90
|
+
throw new Error('LZMA chunk without properties');
|
|
91
|
+
}
|
|
92
|
+
// Reset probabilities if state reset
|
|
93
|
+
if (chunkInfo.stateReset) {
|
|
94
|
+
decoder.resetProbabilities();
|
|
95
|
+
}
|
|
96
|
+
// Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)
|
|
97
|
+
const useSolid = !chunkInfo.stateReset || chunkInfo.stateReset && !chunkInfo.dictReset;
|
|
98
|
+
const compData = input.slice(dataOffset, dataOffset + chunkInfo.compSize);
|
|
99
|
+
const decoded = decoder.decode(compData, 0, chunkInfo.uncompSize, useSolid);
|
|
100
|
+
this.push(decoded);
|
|
101
|
+
}
|
|
102
|
+
offset += totalSize;
|
|
103
|
+
}
|
|
104
|
+
callback(null);
|
|
105
|
+
} catch (err) {
|
|
106
|
+
callback(err);
|
|
107
|
+
}
|
|
108
|
+
},
|
|
109
|
+
flush: function(callback) {
|
|
110
|
+
if (pending && pending.length > 0 && !finished) {
|
|
111
|
+
callback(new Error('Truncated LZMA2 stream'));
|
|
112
|
+
} else {
|
|
113
|
+
callback(null);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Create an LZMA1 decoder Transform stream
|
|
120
|
+
*
|
|
121
|
+
* Note: LZMA1 has no chunk boundaries, so this requires knowing the
|
|
122
|
+
* uncompressed size upfront. The stream buffers all input, then
|
|
123
|
+
* decompresses when complete.
|
|
124
|
+
*
|
|
125
|
+
* For true streaming, use LZMA2 which has built-in chunking.
|
|
126
|
+
*
|
|
127
|
+
* @param properties - 5-byte LZMA properties
|
|
128
|
+
* @param unpackSize - Expected uncompressed size
|
|
129
|
+
* @returns Transform stream that decompresses LZMA1 data
|
|
130
|
+
*/ export function createLzmaDecoder(properties, unpackSize) {
|
|
131
|
+
const decoder = new LzmaDecoder();
|
|
132
|
+
decoder.setDecoderProperties(properties);
|
|
133
|
+
const chunks = [];
|
|
134
|
+
return new Transform({
|
|
135
|
+
transform: function(chunk, _encoding, callback) {
|
|
136
|
+
chunks.push(chunk);
|
|
137
|
+
callback(null);
|
|
138
|
+
},
|
|
139
|
+
flush: function(callback) {
|
|
140
|
+
try {
|
|
141
|
+
const input = Buffer.concat(chunks);
|
|
142
|
+
const output = decoder.decode(input, 0, unpackSize, false);
|
|
143
|
+
this.push(output);
|
|
144
|
+
callback(null);
|
|
145
|
+
} catch (err) {
|
|
146
|
+
callback(err);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/lzma/stream/transforms.ts"],"sourcesContent":["/**\n * LZMA Transform Stream Wrappers\n *\n * Provides Transform streams for LZMA1 and LZMA2 decompression.\n *\n * LZMA2 streaming works by buffering until a complete chunk is available,\n * then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max\n * uncompressed), so memory usage is predictable and bounded.\n *\n * True byte-by-byte async LZMA streaming would require rewriting the entire\n * decoder with continuation-passing style, which is complex and not worth\n * the effort given LZMA2's chunked format.\n */\n\nimport { Transform } from 'extract-base-iterator';\nimport { hasCompleteChunk } from '../Lzma2ChunkParser.ts';\nimport { LzmaDecoder } from '../sync/LzmaDecoder.ts';\nimport { parseLzma2DictionarySize } from '../types.ts';\n\n/**\n * Create an LZMA2 decoder Transform stream\n *\n * This is a streaming decoder that processes LZMA2 chunks incrementally.\n * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).\n *\n * @param properties - 1-byte LZMA2 properties (dictionary size)\n * @returns Transform stream that decompresses LZMA2 data\n */\nexport function createLzma2Decoder(properties: Buffer | Uint8Array): InstanceType<typeof Transform> {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n const dictSize = parseLzma2DictionarySize(properties[0]);\n\n // LZMA decoder instance - reused across chunks for solid mode\n const decoder = new LzmaDecoder();\n decoder.setDictionarySize(dictSize);\n\n // Track current LZMA properties\n let propsSet = false;\n\n // Buffer for incomplete chunk data\n let pending: Buffer | null = null;\n let finished = false;\n\n return new Transform({\n transform: function (this: InstanceType<typeof Transform>, chunk: Buffer, _encoding: string, callback: (err?: Error | null) => void) {\n if (finished) {\n callback(null);\n return;\n }\n\n // Combine with pending data\n let input: Buffer;\n if (pending && pending.length > 0) {\n input = Buffer.concat([pending, chunk]);\n pending = null;\n } else {\n input = chunk;\n }\n\n let offset = 0;\n\n try {\n while (offset < input.length && !finished) {\n const result = hasCompleteChunk(input, offset);\n\n if (!result.success) {\n // Need more data\n pending = input.slice(offset);\n break;\n }\n\n const { chunk: chunkInfo, totalSize } = result;\n\n if (chunkInfo.type === 'end') {\n finished = true;\n break;\n }\n\n // Handle dictionary reset\n if (chunkInfo.dictReset) {\n decoder.resetDictionary();\n }\n\n const dataOffset = offset + chunkInfo.headerSize;\n\n if (chunkInfo.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunkInfo.uncompSize);\n this.push(uncompData);\n\n // Feed uncompressed data to dictionary for subsequent LZMA chunks\n decoder.feedUncompressed(uncompData);\n } else {\n // LZMA compressed chunk\n\n // Apply new properties if present\n if (chunkInfo.newProps) {\n const { lc, lp, pb } = chunkInfo.newProps;\n if (!decoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n propsSet = true;\n }\n\n if (!propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunkInfo.stateReset) {\n decoder.resetProbabilities();\n }\n\n // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)\n const useSolid = !chunkInfo.stateReset || (chunkInfo.stateReset && !chunkInfo.dictReset);\n\n const compData = input.slice(dataOffset, dataOffset + chunkInfo.compSize);\n const decoded = decoder.decode(compData, 0, chunkInfo.uncompSize, useSolid);\n this.push(decoded);\n }\n\n offset += totalSize;\n }\n\n callback(null);\n } catch (err) {\n callback(err as Error);\n }\n },\n\n flush: function (this: InstanceType<typeof Transform>, callback: (err?: Error | null) => void) {\n if (pending && pending.length > 0 && !finished) {\n callback(new Error('Truncated LZMA2 stream'));\n } else {\n callback(null);\n }\n },\n });\n}\n\n/**\n * Create an LZMA1 decoder Transform stream\n *\n * Note: LZMA1 has no chunk boundaries, so this requires knowing the\n * uncompressed size upfront. The stream buffers all input, then\n * decompresses when complete.\n *\n * For true streaming, use LZMA2 which has built-in chunking.\n *\n * @param properties - 5-byte LZMA properties\n * @param unpackSize - Expected uncompressed size\n * @returns Transform stream that decompresses LZMA1 data\n */\nexport function createLzmaDecoder(properties: Buffer | Uint8Array, unpackSize: number): InstanceType<typeof Transform> {\n const decoder = new LzmaDecoder();\n decoder.setDecoderProperties(properties);\n\n const chunks: Buffer[] = [];\n\n return new Transform({\n transform: function (this: InstanceType<typeof Transform>, chunk: Buffer, _encoding: string, callback: (err?: Error | null) => void) {\n chunks.push(chunk);\n callback(null);\n },\n\n flush: function (this: InstanceType<typeof Transform>, callback: (err?: Error | null) => void) {\n try {\n const input = Buffer.concat(chunks);\n const output = decoder.decode(input, 0, unpackSize, false);\n this.push(output);\n callback(null);\n } catch (err) {\n callback(err as Error);\n }\n },\n });\n}\n"],"names":["Transform","hasCompleteChunk","LzmaDecoder","parseLzma2DictionarySize","createLzma2Decoder","properties","length","Error","dictSize","decoder","setDictionarySize","propsSet","pending","finished","transform","chunk","_encoding","callback","input","Buffer","concat","offset","result","success","slice","chunkInfo","totalSize","type","dictReset","resetDictionary","dataOffset","headerSize","uncompData","uncompSize","push","feedUncompressed","newProps","lc","lp","pb","setLcLpPb","stateReset","resetProbabilities","useSolid","compData","compSize","decoded","decode","err","flush","createLzmaDecoder","unpackSize","setDecoderProperties","chunks","output"],"mappings":"AAAA;;;;;;;;;;;;CAYC,GAED,SAASA,SAAS,QAAQ,wBAAwB;AAClD,SAASC,gBAAgB,QAAQ,yBAAyB;AAC1D,SAASC,WAAW,QAAQ,yBAAyB;AACrD,SAASC,wBAAwB,QAAQ,cAAc;AAEvD;;;;;;;;CAQC,GACD,OAAO,SAASC,mBAAmBC,UAA+B;IAChE,IAAI,CAACA,cAAcA,WAAWC,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,MAAMC,WAAWL,yBAAyBE,UAAU,CAAC,EAAE;IAEvD,8DAA8D;IAC9D,MAAMI,UAAU,IAAIP;IACpBO,QAAQC,iBAAiB,CAACF;IAE1B,gCAAgC;IAChC,IAAIG,WAAW;IAEf,mCAAmC;IACnC,IAAIC,UAAyB;IAC7B,IAAIC,WAAW;IAEf,OAAO,IAAIb,UAAU;QACnBc,WAAW,SAAgDC,KAAa,EAAEC,SAAiB,EAAEC,QAAsC;YACjI,IAAIJ,UAAU;gBACZI,SAAS;gBACT;YACF;YAEA,4BAA4B;YAC5B,IAAIC;YACJ,IAAIN,WAAWA,QAAQN,MAAM,GAAG,GAAG;gBACjCY,QAAQC,OAAOC,MAAM,CAAC;oBAACR;oBAASG;iBAAM;gBACtCH,UAAU;YACZ,OAAO;gBACLM,QAAQH;YACV;YAEA,IAAIM,SAAS;YAEb,IAAI;gBACF,MAAOA,SAASH,MAAMZ,MAAM,IAAI,CAACO,SAAU;oBACzC,MAAMS,SAASrB,iBAAiBiB,OAAOG;oBAEvC,IAAI,CAACC,OAAOC,OAAO,EAAE;wBACnB,iBAAiB;wBACjBX,UAAUM,MAAMM,KAAK,CAACH;wBACtB;oBACF;oBAEA,MAAM,EAAEN,OAAOU,SAAS,EAAEC,SAAS,EAAE,GAAGJ;oBAExC,IAAIG,UAAUE,IAAI,KAAK,OAAO;wBAC5Bd,WAAW;wBACX;oBACF;oBAEA,0BAA0B;oBAC1B,IAAIY,UAAUG,SAAS,EAAE;wBACvBnB,QAAQoB,eAAe;oBACzB;oBAEA,MAAMC,aAAaT,SAASI,UAAUM,UAAU;oBAEhD,IAAIN,UAAUE,IAAI,KAAK,gBAAgB;wBACrC,MAAMK,aAAad,MAAMM,KAAK,CAACM,YAAYA,aAAaL,UAAUQ,UAAU;wBAC5E,IAAI,CAACC,IAAI,CAACF;wBAEV,kEAAkE;wBAClEvB,QAAQ0B,gBAAgB,CAACH;oBAC3B,OAAO;wBACL,wBAAwB;wBAExB,kCAAkC;wBAClC,IAAIP,UAAUW,QAAQ,EAAE;4BACtB,MAAM,EAAEC,EAAE,EAAEC,EAAE,EAAEC,EAAE,EAAE,GAAGd,UAAUW,QAAQ;4BACzC,IAAI,CAAC3B,QAAQ+B,SAAS,CAACH,IAAIC,IAAIC,KAAK;gCAClC,MAAM,IAAIhC,MAAM,CAAC,4BAA4B,EAAE8B,GAAG,IAAI,EAAEC,GAAG,IAAI,EAAEC,IAAI;4BACvE;4BACA5B,WAAW;wBACb;wBAEA,IAAI,CAACA,UAAU;4BACb,MAAM,IAAIJ,MAAM;wBAClB;wBAEA,qCAAqC;wBACrC,IAAIkB,UAAUgB,UAAU,EAAE;4BACxBhC,QAAQiC,kBAAkB;wBAC5B;wBAEA,0GAA0G;wBAC1G,MAAMC,WAAW,CAAClB,UAAUgB,UAAU,IAAKhB,UAAUgB,UAAU,IAAI,CAAChB,UAAUG,SAAS;wBAEvF,MAAMgB,WAAW1B,MAAMM,KAAK,CAACM,YAAYA,aAAaL,UAAUoB,QAAQ;wBACxE,MAAMC,UAAUrC,QAAQsC,MAAM,CAACH,UAAU,GAAGnB,UAAUQ,UAAU,EAAEU;wBAClE,IAAI,CAACT,IAAI,CAACY;oBACZ;oBAEAzB,UAAUK;gBACZ;gBAEAT,SAAS;YACX,EAAE,OAAO+B,KAAK;gBACZ/B,SAAS+B;YACX;QACF;QAEAC,OAAO,SAAgDhC,QAAsC;YAC3F,IAAIL,WAAWA,QAAQN,MAAM,GAAG,KAAK,CAACO,UAAU;gBAC9CI,SAAS,IAAIV,MAAM;YACrB,OAAO;gBACLU,SAAS;YACX;QACF;IACF;AACF;AAEA;;;;;;;;;;;;CAYC,GACD,OAAO,SAASiC,kBAAkB7C,UAA+B,EAAE8C,UAAkB;IACnF,MAAM1C,UAAU,IAAIP;IACpBO,QAAQ2C,oBAAoB,CAAC/C;IAE7B,MAAMgD,SAAmB,EAAE;IAE3B,OAAO,IAAIrD,UAAU;QACnBc,WAAW,SAAgDC,KAAa,EAAEC,SAAiB,EAAEC,QAAsC;YACjIoC,OAAOnB,IAAI,CAACnB;YACZE,SAAS;QACX;QAEAgC,OAAO,SAAgDhC,QAAsC;YAC3F,IAAI;gBACF,MAAMC,QAAQC,OAAOC,MAAM,CAACiC;gBAC5B,MAAMC,SAAS7C,QAAQsC,MAAM,CAAC7B,OAAO,GAAGiC,YAAY;gBACpD,IAAI,CAACjB,IAAI,CAACoB;gBACVrC,SAAS;YACX,EAAE,OAAO+B,KAAK;gBACZ/B,SAAS+B;YACX;QACF;IACF;AACF"}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Synchronous LZMA2 Decoder
|
|
3
|
+
*
|
|
4
|
+
* LZMA2 is a container format that wraps LZMA chunks with framing.
|
|
5
|
+
* Decodes LZMA2 data from a buffer.
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Synchronous LZMA2 decoder
|
|
9
|
+
*/
|
|
10
|
+
export declare class Lzma2Decoder {
|
|
11
|
+
private lzmaDecoder;
|
|
12
|
+
private dictionarySize;
|
|
13
|
+
private propsSet;
|
|
14
|
+
constructor(properties: Buffer | Uint8Array);
|
|
15
|
+
/**
|
|
16
|
+
* Decode LZMA2 data
|
|
17
|
+
* @param input - LZMA2 compressed data
|
|
18
|
+
* @param unpackSize - Expected output size (optional, for pre-allocation)
|
|
19
|
+
* @returns Decompressed data
|
|
20
|
+
*/
|
|
21
|
+
decode(input: Buffer, unpackSize?: number): Buffer;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Decode LZMA2 data synchronously
|
|
25
|
+
* @param input - LZMA2 compressed data
|
|
26
|
+
* @param properties - 1-byte properties (dictionary size)
|
|
27
|
+
* @param unpackSize - Expected output size (optional)
|
|
28
|
+
* @returns Decompressed data
|
|
29
|
+
*/
|
|
30
|
+
export declare function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number): Buffer;
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Synchronous LZMA2 Decoder
|
|
3
|
+
*
|
|
4
|
+
* LZMA2 is a container format that wraps LZMA chunks with framing.
|
|
5
|
+
* Decodes LZMA2 data from a buffer.
|
|
6
|
+
*/ import { allocBufferUnsafe } from 'extract-base-iterator';
|
|
7
|
+
import { parseLzma2ChunkHeader } from '../Lzma2ChunkParser.js';
|
|
8
|
+
import { parseLzma2DictionarySize } from '../types.js';
|
|
9
|
+
import { LzmaDecoder } from './LzmaDecoder.js';
|
|
10
|
+
/**
|
|
11
|
+
* Synchronous LZMA2 decoder
|
|
12
|
+
*/ export class Lzma2Decoder {
|
|
13
|
+
/**
|
|
14
|
+
* Decode LZMA2 data
|
|
15
|
+
* @param input - LZMA2 compressed data
|
|
16
|
+
* @param unpackSize - Expected output size (optional, for pre-allocation)
|
|
17
|
+
* @returns Decompressed data
|
|
18
|
+
*/ decode(input, unpackSize) {
|
|
19
|
+
// Pre-allocate output buffer if size is known
|
|
20
|
+
let outputBuffer = null;
|
|
21
|
+
let outputPos = 0;
|
|
22
|
+
const outputChunks = [];
|
|
23
|
+
if (unpackSize && unpackSize > 0) {
|
|
24
|
+
outputBuffer = allocBufferUnsafe(unpackSize);
|
|
25
|
+
}
|
|
26
|
+
let offset = 0;
|
|
27
|
+
while(offset < input.length){
|
|
28
|
+
const result = parseLzma2ChunkHeader(input, offset);
|
|
29
|
+
if (!result.success) {
|
|
30
|
+
throw new Error('Truncated LZMA2 chunk header');
|
|
31
|
+
}
|
|
32
|
+
const chunk = result.chunk;
|
|
33
|
+
if (chunk.type === 'end') {
|
|
34
|
+
break;
|
|
35
|
+
}
|
|
36
|
+
// Validate we have enough data for the chunk
|
|
37
|
+
const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;
|
|
38
|
+
if (offset + chunk.headerSize + dataSize > input.length) {
|
|
39
|
+
throw new Error(`Truncated LZMA2 ${chunk.type} data`);
|
|
40
|
+
}
|
|
41
|
+
// Handle dictionary reset
|
|
42
|
+
if (chunk.dictReset) {
|
|
43
|
+
this.lzmaDecoder.resetDictionary();
|
|
44
|
+
}
|
|
45
|
+
const dataOffset = offset + chunk.headerSize;
|
|
46
|
+
if (chunk.type === 'uncompressed') {
|
|
47
|
+
const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);
|
|
48
|
+
// Copy to output
|
|
49
|
+
if (outputBuffer) {
|
|
50
|
+
uncompData.copy(outputBuffer, outputPos);
|
|
51
|
+
outputPos += uncompData.length;
|
|
52
|
+
} else {
|
|
53
|
+
outputChunks.push(uncompData);
|
|
54
|
+
}
|
|
55
|
+
// Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it
|
|
56
|
+
this.lzmaDecoder.feedUncompressed(uncompData);
|
|
57
|
+
offset = dataOffset + chunk.uncompSize;
|
|
58
|
+
} else {
|
|
59
|
+
// LZMA compressed chunk
|
|
60
|
+
// Apply new properties if present
|
|
61
|
+
if (chunk.newProps) {
|
|
62
|
+
const { lc, lp, pb } = chunk.newProps;
|
|
63
|
+
if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {
|
|
64
|
+
throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);
|
|
65
|
+
}
|
|
66
|
+
this.propsSet = true;
|
|
67
|
+
}
|
|
68
|
+
if (!this.propsSet) {
|
|
69
|
+
throw new Error('LZMA chunk without properties');
|
|
70
|
+
}
|
|
71
|
+
// Reset probabilities if state reset
|
|
72
|
+
if (chunk.stateReset) {
|
|
73
|
+
this.lzmaDecoder.resetProbabilities();
|
|
74
|
+
}
|
|
75
|
+
// Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)
|
|
76
|
+
const useSolid = !chunk.stateReset || chunk.stateReset && !chunk.dictReset;
|
|
77
|
+
// Decode LZMA chunk
|
|
78
|
+
const chunkData = input.slice(dataOffset, dataOffset + chunk.compSize);
|
|
79
|
+
const decoded = this.lzmaDecoder.decode(chunkData, 0, chunk.uncompSize, useSolid);
|
|
80
|
+
// Copy to output
|
|
81
|
+
if (outputBuffer) {
|
|
82
|
+
decoded.copy(outputBuffer, outputPos);
|
|
83
|
+
outputPos += decoded.length;
|
|
84
|
+
} else {
|
|
85
|
+
outputChunks.push(decoded);
|
|
86
|
+
}
|
|
87
|
+
offset = dataOffset + chunk.compSize;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
// Return pre-allocated buffer or concatenated chunks
|
|
91
|
+
if (outputBuffer) {
|
|
92
|
+
return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;
|
|
93
|
+
}
|
|
94
|
+
return Buffer.concat(outputChunks);
|
|
95
|
+
}
|
|
96
|
+
constructor(properties){
|
|
97
|
+
if (!properties || properties.length < 1) {
|
|
98
|
+
throw new Error('LZMA2 requires properties byte');
|
|
99
|
+
}
|
|
100
|
+
this.dictionarySize = parseLzma2DictionarySize(properties[0]);
|
|
101
|
+
this.lzmaDecoder = new LzmaDecoder();
|
|
102
|
+
this.lzmaDecoder.setDictionarySize(this.dictionarySize);
|
|
103
|
+
this.propsSet = false;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Decode LZMA2 data synchronously
|
|
108
|
+
* @param input - LZMA2 compressed data
|
|
109
|
+
* @param properties - 1-byte properties (dictionary size)
|
|
110
|
+
* @param unpackSize - Expected output size (optional)
|
|
111
|
+
* @returns Decompressed data
|
|
112
|
+
*/ export function decodeLzma2(input, properties, unpackSize) {
|
|
113
|
+
const decoder = new Lzma2Decoder(properties);
|
|
114
|
+
return decoder.decode(input, unpackSize);
|
|
115
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/lzma/sync/Lzma2Decoder.ts"],"sourcesContent":["/**\n * Synchronous LZMA2 Decoder\n *\n * LZMA2 is a container format that wraps LZMA chunks with framing.\n * Decodes LZMA2 data from a buffer.\n */\n\nimport { allocBufferUnsafe } from 'extract-base-iterator';\nimport { parseLzma2ChunkHeader } from '../Lzma2ChunkParser.ts';\nimport { parseLzma2DictionarySize } from '../types.ts';\nimport { LzmaDecoder } from './LzmaDecoder.ts';\n\n/**\n * Synchronous LZMA2 decoder\n */\nexport class Lzma2Decoder {\n private lzmaDecoder: LzmaDecoder;\n private dictionarySize: number;\n private propsSet: boolean;\n\n constructor(properties: Buffer | Uint8Array) {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n this.dictionarySize = parseLzma2DictionarySize(properties[0]);\n this.lzmaDecoder = new LzmaDecoder();\n this.lzmaDecoder.setDictionarySize(this.dictionarySize);\n this.propsSet = false;\n }\n\n /**\n * Decode LZMA2 data\n * @param input - LZMA2 compressed data\n * @param unpackSize - Expected output size (optional, for pre-allocation)\n * @returns Decompressed data\n */\n decode(input: Buffer, unpackSize?: number): Buffer {\n // Pre-allocate output buffer if size is known\n let outputBuffer: Buffer | null = null;\n let outputPos = 0;\n const outputChunks: Buffer[] = [];\n\n if (unpackSize && unpackSize > 0) {\n outputBuffer = allocBufferUnsafe(unpackSize);\n }\n\n let offset = 0;\n\n while (offset < input.length) {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (!result.success) {\n throw new Error('Truncated LZMA2 chunk header');\n }\n\n const chunk = result.chunk;\n\n if (chunk.type === 'end') {\n break;\n }\n\n // Validate we have enough data for the chunk\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n if (offset + chunk.headerSize + dataSize > input.length) {\n throw new Error(`Truncated LZMA2 ${chunk.type} data`);\n }\n\n // Handle dictionary reset\n if (chunk.dictReset) {\n this.lzmaDecoder.resetDictionary();\n }\n\n const dataOffset = offset + chunk.headerSize;\n\n if (chunk.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);\n\n // Copy to output\n if (outputBuffer) {\n uncompData.copy(outputBuffer, outputPos);\n outputPos += uncompData.length;\n } else {\n outputChunks.push(uncompData);\n }\n\n // Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it\n this.lzmaDecoder.feedUncompressed(uncompData);\n\n offset = dataOffset + chunk.uncompSize;\n } else {\n // LZMA compressed chunk\n\n // Apply new properties if present\n if (chunk.newProps) {\n const { lc, lp, pb } = chunk.newProps;\n if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n this.propsSet = true;\n }\n\n if (!this.propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunk.stateReset) {\n this.lzmaDecoder.resetProbabilities();\n }\n\n // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)\n const useSolid = !chunk.stateReset || (chunk.stateReset && !chunk.dictReset);\n\n // Decode LZMA chunk\n const chunkData = input.slice(dataOffset, dataOffset + chunk.compSize);\n const decoded = this.lzmaDecoder.decode(chunkData, 0, chunk.uncompSize, useSolid);\n\n // Copy to output\n if (outputBuffer) {\n decoded.copy(outputBuffer, outputPos);\n outputPos += decoded.length;\n } else {\n outputChunks.push(decoded);\n }\n\n offset = dataOffset + chunk.compSize;\n }\n }\n\n // Return pre-allocated buffer or concatenated chunks\n if (outputBuffer) {\n return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;\n }\n return Buffer.concat(outputChunks);\n }\n}\n\n/**\n * Decode LZMA2 data synchronously\n * @param input - LZMA2 compressed data\n * @param properties - 1-byte properties (dictionary size)\n * @param unpackSize - Expected output size (optional)\n * @returns Decompressed data\n */\nexport function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number): Buffer {\n const decoder = new Lzma2Decoder(properties);\n return decoder.decode(input, unpackSize);\n}\n"],"names":["allocBufferUnsafe","parseLzma2ChunkHeader","parseLzma2DictionarySize","LzmaDecoder","Lzma2Decoder","decode","input","unpackSize","outputBuffer","outputPos","outputChunks","offset","length","result","success","Error","chunk","type","dataSize","uncompSize","compSize","headerSize","dictReset","lzmaDecoder","resetDictionary","dataOffset","uncompData","slice","copy","push","feedUncompressed","newProps","lc","lp","pb","setLcLpPb","propsSet","stateReset","resetProbabilities","useSolid","chunkData","decoded","Buffer","concat","properties","dictionarySize","setDictionarySize","decodeLzma2","decoder"],"mappings":"AAAA;;;;;CAKC,GAED,SAASA,iBAAiB,QAAQ,wBAAwB;AAC1D,SAASC,qBAAqB,QAAQ,yBAAyB;AAC/D,SAASC,wBAAwB,QAAQ,cAAc;AACvD,SAASC,WAAW,QAAQ,mBAAmB;AAE/C;;CAEC,GACD,OAAO,MAAMC;IAgBX;;;;;GAKC,GACDC,OAAOC,KAAa,EAAEC,UAAmB,EAAU;QACjD,8CAA8C;QAC9C,IAAIC,eAA8B;QAClC,IAAIC,YAAY;QAChB,MAAMC,eAAyB,EAAE;QAEjC,IAAIH,cAAcA,aAAa,GAAG;YAChCC,eAAeR,kBAAkBO;QACnC;QAEA,IAAII,SAAS;QAEb,MAAOA,SAASL,MAAMM,MAAM,CAAE;YAC5B,MAAMC,SAASZ,sBAAsBK,OAAOK;YAE5C,IAAI,CAACE,OAAOC,OAAO,EAAE;gBACnB,MAAM,IAAIC,MAAM;YAClB;YAEA,MAAMC,QAAQH,OAAOG,KAAK;YAE1B,IAAIA,MAAMC,IAAI,KAAK,OAAO;gBACxB;YACF;YAEA,6CAA6C;YAC7C,MAAMC,WAAWF,MAAMC,IAAI,KAAK,iBAAiBD,MAAMG,UAAU,GAAGH,MAAMI,QAAQ;YAClF,IAAIT,SAASK,MAAMK,UAAU,GAAGH,WAAWZ,MAAMM,MAAM,EAAE;gBACvD,MAAM,IAAIG,MAAM,CAAC,gBAAgB,EAAEC,MAAMC,IAAI,CAAC,KAAK,CAAC;YACtD;YAEA,0BAA0B;YAC1B,IAAID,MAAMM,SAAS,EAAE;gBACnB,IAAI,CAACC,WAAW,CAACC,eAAe;YAClC;YAEA,MAAMC,aAAad,SAASK,MAAMK,UAAU;YAE5C,IAAIL,MAAMC,IAAI,KAAK,gBAAgB;gBACjC,MAAMS,aAAapB,MAAMqB,KAAK,CAACF,YAAYA,aAAaT,MAAMG,UAAU;gBAExE,iBAAiB;gBACjB,IAAIX,cAAc;oBAChBkB,WAAWE,IAAI,CAACpB,cAAcC;oBAC9BA,aAAaiB,WAAWd,MAAM;gBAChC,OAAO;oBACLF,aAAamB,IAAI,CAACH;gBACpB;gBAEA,kFAAkF;gBAClF,IAAI,CAACH,WAAW,CAACO,gBAAgB,CAACJ;gBAElCf,SAASc,aAAaT,MAAMG,UAAU;YACxC,OAAO;gBACL,wBAAwB;gBAExB,kCAAkC;gBAClC,IAAIH,MAAMe,QAAQ,EAAE;oBAClB,MAAM,EAAEC,EAAE,EAAEC,EAAE,EAAEC,EAAE,EAAE,GAAGlB,MAAMe,QAAQ;oBACrC,IAAI,CAAC,IAAI,CAACR,WAAW,CAACY,SAAS,CAACH,IAAIC,IAAIC,KAAK;wBAC3C,MAAM,IAAInB,MAAM,CAAC,4BAA4B,EAAEiB,GAAG,IAAI,EAAEC,GAAG,IAAI,EAAEC,IAAI;oBACvE;oBACA,IAAI,CAACE,QAAQ,GAAG;gBAClB;gBAEA,IAAI,CAAC,IAAI,CAACA,QAAQ,EAAE;oBAClB,MAAM,IAAIrB,MAAM;gBAClB;gBAEA,qCAAqC;gBACrC,IAAIC,MAAMqB,UAAU,EAAE;oBACpB,IAAI,CAACd,WAAW,CAACe,kBAAkB;gBACrC;gBAEA,0GAA0G;gBAC1G,MAAMC,WAAW,CAACvB,MAAMqB,UAAU,IAAKrB,MAAMqB,UAAU,IAAI,CAACrB,MAAMM,SAAS;gBAE3E,oBAAoB;gBACpB,MAAMkB,YAAYlC,MAAMqB,KAAK,CAACF,YAAYA,aAAaT,MAAMI,QAAQ;gBACrE,MAAMqB,UAAU,IAAI,CAAClB,WAAW,CAAClB,MAAM,CAACmC,WAAW,GAAGxB,MAAMG,UAAU,EAAEoB;gBAExE,iBAAiB;gBACjB,IAAI/B,cAAc;oBAChBiC,QAAQb,IAAI,CAACpB,cAAcC;oBAC3BA,aAAagC,QAAQ7B,MAAM;gBAC7B,OAAO;oBACLF,aAAamB,IAAI,CAACY;gBACpB;gBAEA9B,SAASc,aAAaT,MAAMI,QAAQ;YACtC;QACF;QAEA,qDAAqD;QACrD,IAAIZ,cAAc;YAChB,OAAOC,YAAYD,aAAaI,MAAM,GAAGJ,aAAamB,KAAK,CAAC,GAAGlB,aAAaD;QAC9E;QACA,OAAOkC,OAAOC,MAAM,CAACjC;IACvB;IAnHA,YAAYkC,UAA+B,CAAE;QAC3C,IAAI,CAACA,cAAcA,WAAWhC,MAAM,GAAG,GAAG;YACxC,MAAM,IAAIG,MAAM;QAClB;QAEA,IAAI,CAAC8B,cAAc,GAAG3C,yBAAyB0C,UAAU,CAAC,EAAE;QAC5D,IAAI,CAACrB,WAAW,GAAG,IAAIpB;QACvB,IAAI,CAACoB,WAAW,CAACuB,iBAAiB,CAAC,IAAI,CAACD,cAAc;QACtD,IAAI,CAACT,QAAQ,GAAG;IAClB;AA2GF;AAEA;;;;;;CAMC,GACD,OAAO,SAASW,YAAYzC,KAAa,EAAEsC,UAA+B,EAAErC,UAAmB;IAC7F,MAAMyC,UAAU,IAAI5C,aAAawC;IACjC,OAAOI,QAAQ3C,MAAM,CAACC,OAAOC;AAC/B"}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Synchronous LZMA1 Decoder
|
|
3
|
+
*
|
|
4
|
+
* Decodes LZMA1 compressed data from a buffer.
|
|
5
|
+
* All operations are synchronous.
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Synchronous LZMA1 decoder
|
|
9
|
+
*/
|
|
10
|
+
export declare class LzmaDecoder {
|
|
11
|
+
private outWindow;
|
|
12
|
+
private rangeDecoder;
|
|
13
|
+
private isMatchDecoders;
|
|
14
|
+
private isRepDecoders;
|
|
15
|
+
private isRepG0Decoders;
|
|
16
|
+
private isRepG1Decoders;
|
|
17
|
+
private isRepG2Decoders;
|
|
18
|
+
private isRep0LongDecoders;
|
|
19
|
+
private posSlotDecoder;
|
|
20
|
+
private posDecoders;
|
|
21
|
+
private posAlignDecoder;
|
|
22
|
+
private lenDecoder;
|
|
23
|
+
private repLenDecoder;
|
|
24
|
+
private literalDecoder;
|
|
25
|
+
private dictionarySize;
|
|
26
|
+
private dictionarySizeCheck;
|
|
27
|
+
private posStateMask;
|
|
28
|
+
private state;
|
|
29
|
+
private rep0;
|
|
30
|
+
private rep1;
|
|
31
|
+
private rep2;
|
|
32
|
+
private rep3;
|
|
33
|
+
private prevByte;
|
|
34
|
+
private totalPos;
|
|
35
|
+
constructor();
|
|
36
|
+
/**
|
|
37
|
+
* Set dictionary size
|
|
38
|
+
*/
|
|
39
|
+
setDictionarySize(dictionarySize: number): boolean;
|
|
40
|
+
/**
|
|
41
|
+
* Set lc, lp, pb properties
|
|
42
|
+
*/
|
|
43
|
+
setLcLpPb(lc: number, lp: number, pb: number): boolean;
|
|
44
|
+
/**
|
|
45
|
+
* Set decoder properties from 5-byte buffer
|
|
46
|
+
*/
|
|
47
|
+
setDecoderProperties(properties: Buffer | Uint8Array): boolean;
|
|
48
|
+
/**
|
|
49
|
+
* Initialize probability tables
|
|
50
|
+
*/
|
|
51
|
+
private initProbabilities;
|
|
52
|
+
/**
|
|
53
|
+
* Reset probabilities only (for LZMA2 state reset)
|
|
54
|
+
*/
|
|
55
|
+
resetProbabilities(): void;
|
|
56
|
+
/**
|
|
57
|
+
* Reset dictionary position (for LZMA2 dictionary reset)
|
|
58
|
+
*/
|
|
59
|
+
resetDictionary(): void;
|
|
60
|
+
/**
|
|
61
|
+
* Feed uncompressed data into the dictionary (for LZMA2 uncompressed chunks)
|
|
62
|
+
* This updates the sliding window so subsequent LZMA chunks can reference this data.
|
|
63
|
+
*/
|
|
64
|
+
feedUncompressed(data: Buffer): void;
|
|
65
|
+
/**
|
|
66
|
+
* Decode LZMA data
|
|
67
|
+
* @param input - Compressed input buffer
|
|
68
|
+
* @param inputOffset - Offset into input buffer
|
|
69
|
+
* @param outSize - Expected output size
|
|
70
|
+
* @param solid - If true, preserve state from previous decode
|
|
71
|
+
* @returns Decompressed data
|
|
72
|
+
*/
|
|
73
|
+
decode(input: Buffer, inputOffset: number, outSize: number, solid?: boolean): Buffer;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Decode LZMA1 data synchronously
|
|
77
|
+
* @param input - Compressed data (without 5-byte properties header)
|
|
78
|
+
* @param properties - 5-byte LZMA properties
|
|
79
|
+
* @param outSize - Expected output size
|
|
80
|
+
* @returns Decompressed data
|
|
81
|
+
*/
|
|
82
|
+
export declare function decodeLzma(input: Buffer, properties: Buffer | Uint8Array, outSize: number): Buffer;
|