7z-iterator 1.4.0 → 2.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.d.cts +3 -3
- package/dist/cjs/index.d.ts +3 -3
- package/dist/cjs/index.js +7 -38
- package/dist/cjs/index.js.map +1 -1
- package/dist/cjs/nextEntry.js +1 -2
- package/dist/cjs/nextEntry.js.map +1 -1
- package/dist/cjs/sevenz/ArchiveSource.d.cts +1 -0
- package/dist/cjs/sevenz/ArchiveSource.d.ts +1 -0
- package/dist/cjs/sevenz/ArchiveSource.js +23 -0
- package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
- package/dist/cjs/sevenz/SevenZipParser.js +22 -3
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/index.js +52 -30
- package/dist/cjs/sevenz/codecs/index.js.map +1 -1
- package/dist/cjs/sevenz/constants.d.cts +1 -0
- package/dist/cjs/sevenz/constants.d.ts +1 -0
- package/dist/cjs/sevenz/constants.js +1 -0
- package/dist/cjs/sevenz/constants.js.map +1 -1
- package/dist/esm/index.d.ts +3 -3
- package/dist/esm/index.js +4 -3
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/nextEntry.js +1 -2
- package/dist/esm/nextEntry.js.map +1 -1
- package/dist/esm/sevenz/ArchiveSource.d.ts +1 -0
- package/dist/esm/sevenz/ArchiveSource.js +23 -0
- package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
- package/dist/esm/sevenz/SevenZipParser.js +22 -3
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/index.js +32 -10
- package/dist/esm/sevenz/codecs/index.js.map +1 -1
- package/dist/esm/sevenz/constants.d.ts +1 -0
- package/dist/esm/sevenz/constants.js +1 -0
- package/dist/esm/sevenz/constants.js.map +1 -1
- package/package.json +3 -3
- package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +0 -73
- package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +0 -73
- package/dist/cjs/lzma/Lzma2ChunkParser.js +0 -148
- package/dist/cjs/lzma/Lzma2ChunkParser.js.map +0 -1
- package/dist/cjs/lzma/index.d.cts +0 -31
- package/dist/cjs/lzma/index.d.ts +0 -31
- package/dist/cjs/lzma/index.js +0 -83
- package/dist/cjs/lzma/index.js.map +0 -1
- package/dist/cjs/lzma/stream/transforms.d.cts +0 -46
- package/dist/cjs/lzma/stream/transforms.d.ts +0 -46
- package/dist/cjs/lzma/stream/transforms.js +0 -209
- package/dist/cjs/lzma/stream/transforms.js.map +0 -1
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +0 -63
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +0 -63
- package/dist/cjs/lzma/sync/Lzma2Decoder.js +0 -231
- package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +0 -1
- package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +0 -97
- package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +0 -97
- package/dist/cjs/lzma/sync/LzmaDecoder.js +0 -580
- package/dist/cjs/lzma/sync/LzmaDecoder.js.map +0 -1
- package/dist/cjs/lzma/sync/RangeDecoder.d.cts +0 -69
- package/dist/cjs/lzma/sync/RangeDecoder.d.ts +0 -69
- package/dist/cjs/lzma/sync/RangeDecoder.js +0 -162
- package/dist/cjs/lzma/sync/RangeDecoder.js.map +0 -1
- package/dist/cjs/lzma/types.d.cts +0 -117
- package/dist/cjs/lzma/types.d.ts +0 -117
- package/dist/cjs/lzma/types.js +0 -264
- package/dist/cjs/lzma/types.js.map +0 -1
- package/dist/cjs/sevenz/codecs/Bcj.d.cts +0 -16
- package/dist/cjs/sevenz/codecs/Bcj.d.ts +0 -16
- package/dist/cjs/sevenz/codecs/Bcj.js +0 -183
- package/dist/cjs/sevenz/codecs/Bcj.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjArm.d.cts +0 -21
- package/dist/cjs/sevenz/codecs/BcjArm.d.ts +0 -21
- package/dist/cjs/sevenz/codecs/BcjArm.js +0 -104
- package/dist/cjs/sevenz/codecs/BcjArm.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjArm64.d.cts +0 -21
- package/dist/cjs/sevenz/codecs/BcjArm64.d.ts +0 -21
- package/dist/cjs/sevenz/codecs/BcjArm64.js +0 -65
- package/dist/cjs/sevenz/codecs/BcjArm64.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjArmt.d.cts +0 -19
- package/dist/cjs/sevenz/codecs/BcjArmt.d.ts +0 -19
- package/dist/cjs/sevenz/codecs/BcjArmt.js +0 -76
- package/dist/cjs/sevenz/codecs/BcjArmt.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjIa64.d.cts +0 -15
- package/dist/cjs/sevenz/codecs/BcjIa64.d.ts +0 -15
- package/dist/cjs/sevenz/codecs/BcjIa64.js +0 -141
- package/dist/cjs/sevenz/codecs/BcjIa64.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjPpc.d.cts +0 -20
- package/dist/cjs/sevenz/codecs/BcjPpc.d.ts +0 -20
- package/dist/cjs/sevenz/codecs/BcjPpc.js +0 -64
- package/dist/cjs/sevenz/codecs/BcjPpc.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjSparc.d.cts +0 -19
- package/dist/cjs/sevenz/codecs/BcjSparc.d.ts +0 -19
- package/dist/cjs/sevenz/codecs/BcjSparc.js +0 -69
- package/dist/cjs/sevenz/codecs/BcjSparc.js.map +0 -1
- package/dist/cjs/sevenz/codecs/Delta.d.cts +0 -16
- package/dist/cjs/sevenz/codecs/Delta.d.ts +0 -16
- package/dist/cjs/sevenz/codecs/Delta.js +0 -74
- package/dist/cjs/sevenz/codecs/Delta.js.map +0 -1
- package/dist/cjs/sevenz/codecs/Lzma.d.cts +0 -17
- package/dist/cjs/sevenz/codecs/Lzma.d.ts +0 -17
- package/dist/cjs/sevenz/codecs/Lzma.js +0 -40
- package/dist/cjs/sevenz/codecs/Lzma.js.map +0 -1
- package/dist/cjs/sevenz/codecs/Lzma2.d.cts +0 -20
- package/dist/cjs/sevenz/codecs/Lzma2.d.ts +0 -20
- package/dist/cjs/sevenz/codecs/Lzma2.js +0 -42
- package/dist/cjs/sevenz/codecs/Lzma2.js.map +0 -1
- package/dist/cjs/xz/Decoder.d.cts +0 -25
- package/dist/cjs/xz/Decoder.d.ts +0 -25
- package/dist/cjs/xz/Decoder.js +0 -194
- package/dist/cjs/xz/Decoder.js.map +0 -1
- package/dist/esm/lzma/Lzma2ChunkParser.d.ts +0 -73
- package/dist/esm/lzma/Lzma2ChunkParser.js +0 -137
- package/dist/esm/lzma/Lzma2ChunkParser.js.map +0 -1
- package/dist/esm/lzma/index.d.ts +0 -31
- package/dist/esm/lzma/index.js +0 -44
- package/dist/esm/lzma/index.js.map +0 -1
- package/dist/esm/lzma/stream/transforms.d.ts +0 -46
- package/dist/esm/lzma/stream/transforms.js +0 -189
- package/dist/esm/lzma/stream/transforms.js.map +0 -1
- package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +0 -63
- package/dist/esm/lzma/sync/Lzma2Decoder.js +0 -211
- package/dist/esm/lzma/sync/Lzma2Decoder.js.map +0 -1
- package/dist/esm/lzma/sync/LzmaDecoder.d.ts +0 -97
- package/dist/esm/lzma/sync/LzmaDecoder.js +0 -543
- package/dist/esm/lzma/sync/LzmaDecoder.js.map +0 -1
- package/dist/esm/lzma/sync/RangeDecoder.d.ts +0 -69
- package/dist/esm/lzma/sync/RangeDecoder.js +0 -132
- package/dist/esm/lzma/sync/RangeDecoder.js.map +0 -1
- package/dist/esm/lzma/types.d.ts +0 -117
- package/dist/esm/lzma/types.js +0 -154
- package/dist/esm/lzma/types.js.map +0 -1
- package/dist/esm/sevenz/codecs/Bcj.d.ts +0 -16
- package/dist/esm/sevenz/codecs/Bcj.js +0 -175
- package/dist/esm/sevenz/codecs/Bcj.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjArm.d.ts +0 -21
- package/dist/esm/sevenz/codecs/BcjArm.js +0 -101
- package/dist/esm/sevenz/codecs/BcjArm.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjArm64.d.ts +0 -21
- package/dist/esm/sevenz/codecs/BcjArm64.js +0 -57
- package/dist/esm/sevenz/codecs/BcjArm64.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjArmt.d.ts +0 -19
- package/dist/esm/sevenz/codecs/BcjArmt.js +0 -66
- package/dist/esm/sevenz/codecs/BcjArmt.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjIa64.d.ts +0 -15
- package/dist/esm/sevenz/codecs/BcjIa64.js +0 -127
- package/dist/esm/sevenz/codecs/BcjIa64.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjPpc.d.ts +0 -20
- package/dist/esm/sevenz/codecs/BcjPpc.js +0 -55
- package/dist/esm/sevenz/codecs/BcjPpc.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjSparc.d.ts +0 -19
- package/dist/esm/sevenz/codecs/BcjSparc.js +0 -59
- package/dist/esm/sevenz/codecs/BcjSparc.js.map +0 -1
- package/dist/esm/sevenz/codecs/Delta.d.ts +0 -16
- package/dist/esm/sevenz/codecs/Delta.js +0 -66
- package/dist/esm/sevenz/codecs/Delta.js.map +0 -1
- package/dist/esm/sevenz/codecs/Lzma.d.ts +0 -17
- package/dist/esm/sevenz/codecs/Lzma.js +0 -33
- package/dist/esm/sevenz/codecs/Lzma.js.map +0 -1
- package/dist/esm/sevenz/codecs/Lzma2.d.ts +0 -20
- package/dist/esm/sevenz/codecs/Lzma2.js +0 -38
- package/dist/esm/sevenz/codecs/Lzma2.js.map +0 -1
- package/dist/esm/xz/Decoder.d.ts +0 -25
- package/dist/esm/xz/Decoder.js +0 -185
- package/dist/esm/xz/Decoder.js.map +0 -1
|
@@ -1,209 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* LZMA Transform Stream Wrappers
|
|
3
|
-
*
|
|
4
|
-
* Provides Transform streams for LZMA1 and LZMA2 decompression.
|
|
5
|
-
*
|
|
6
|
-
* LZMA2 streaming works by buffering until a complete chunk is available,
|
|
7
|
-
* then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max
|
|
8
|
-
* uncompressed), so memory usage is predictable and bounded.
|
|
9
|
-
*
|
|
10
|
-
* Performance Optimization:
|
|
11
|
-
* - Uses OutputSink pattern for zero-copy output during decode
|
|
12
|
-
* - Each decoded byte written directly to stream (not buffered then copied)
|
|
13
|
-
* - ~4x faster than previous buffering approach
|
|
14
|
-
*
|
|
15
|
-
* True byte-by-byte async LZMA streaming would require rewriting the entire
|
|
16
|
-
* decoder with continuation-passing style, which is complex and not worth
|
|
17
|
-
* the effort given LZMA2's chunked format.
|
|
18
|
-
*/ "use strict";
|
|
19
|
-
Object.defineProperty(exports, "__esModule", {
|
|
20
|
-
value: true
|
|
21
|
-
});
|
|
22
|
-
function _export(target, all) {
|
|
23
|
-
for(var name in all)Object.defineProperty(target, name, {
|
|
24
|
-
enumerable: true,
|
|
25
|
-
get: Object.getOwnPropertyDescriptor(all, name).get
|
|
26
|
-
});
|
|
27
|
-
}
|
|
28
|
-
_export(exports, {
|
|
29
|
-
get createLzma2Decoder () {
|
|
30
|
-
return createLzma2Decoder;
|
|
31
|
-
},
|
|
32
|
-
get createLzmaDecoder () {
|
|
33
|
-
return createLzmaDecoder;
|
|
34
|
-
}
|
|
35
|
-
});
|
|
36
|
-
var _extractbaseiterator = require("extract-base-iterator");
|
|
37
|
-
var _Lzma2ChunkParserts = require("../Lzma2ChunkParser.js");
|
|
38
|
-
var _LzmaDecoderts = require("../sync/LzmaDecoder.js");
|
|
39
|
-
var _typests = require("../types.js");
|
|
40
|
-
function createLzma2Decoder(properties) {
|
|
41
|
-
if (!properties || properties.length < 1) {
|
|
42
|
-
throw new Error('LZMA2 requires properties byte');
|
|
43
|
-
}
|
|
44
|
-
var dictSize = (0, _typests.parseLzma2DictionarySize)(properties[0]);
|
|
45
|
-
// LZMA decoder instance - reused across chunks for solid mode
|
|
46
|
-
var decoder = new _LzmaDecoderts.LzmaDecoder();
|
|
47
|
-
decoder.setDictionarySize(dictSize);
|
|
48
|
-
// Track current LZMA properties
|
|
49
|
-
var propsSet = false;
|
|
50
|
-
// Buffer for incomplete chunk data
|
|
51
|
-
var pending = null;
|
|
52
|
-
var finished = false;
|
|
53
|
-
return new _extractbaseiterator.Transform({
|
|
54
|
-
transform: function transform(chunk, _encoding, callback) {
|
|
55
|
-
var _this = this;
|
|
56
|
-
if (finished) {
|
|
57
|
-
callback(null);
|
|
58
|
-
return;
|
|
59
|
-
}
|
|
60
|
-
// Combine with pending data
|
|
61
|
-
var input;
|
|
62
|
-
if (pending && pending.length > 0) {
|
|
63
|
-
input = Buffer.concat([
|
|
64
|
-
pending,
|
|
65
|
-
chunk
|
|
66
|
-
]);
|
|
67
|
-
pending = null;
|
|
68
|
-
} else {
|
|
69
|
-
input = chunk;
|
|
70
|
-
}
|
|
71
|
-
var offset = 0;
|
|
72
|
-
try {
|
|
73
|
-
while(offset < input.length && !finished){
|
|
74
|
-
var result = (0, _Lzma2ChunkParserts.hasCompleteChunk)(input, offset);
|
|
75
|
-
if (!result.success) {
|
|
76
|
-
// Need more data
|
|
77
|
-
pending = input.slice(offset);
|
|
78
|
-
break;
|
|
79
|
-
}
|
|
80
|
-
var chunkInfo = result.chunk, totalSize = result.totalSize;
|
|
81
|
-
if (chunkInfo.type === 'end') {
|
|
82
|
-
finished = true;
|
|
83
|
-
break;
|
|
84
|
-
}
|
|
85
|
-
// Handle dictionary reset
|
|
86
|
-
if (chunkInfo.dictReset) {
|
|
87
|
-
decoder.resetDictionary();
|
|
88
|
-
}
|
|
89
|
-
var dataOffset = offset + chunkInfo.headerSize;
|
|
90
|
-
if (chunkInfo.type === 'uncompressed') {
|
|
91
|
-
var uncompData = input.slice(dataOffset, dataOffset + chunkInfo.uncompSize);
|
|
92
|
-
this.push(uncompData);
|
|
93
|
-
// Feed uncompressed data to dictionary for subsequent LZMA chunks
|
|
94
|
-
decoder.feedUncompressed(uncompData);
|
|
95
|
-
} else {
|
|
96
|
-
// LZMA compressed chunk
|
|
97
|
-
// Variables to store properties (used for both decoders)
|
|
98
|
-
var lc = void 0;
|
|
99
|
-
var lp = void 0;
|
|
100
|
-
var pb = void 0;
|
|
101
|
-
// Apply new properties if present
|
|
102
|
-
if (chunkInfo.newProps) {
|
|
103
|
-
var ref;
|
|
104
|
-
ref = chunkInfo.newProps, lc = ref.lc, lp = ref.lp, pb = ref.pb, ref;
|
|
105
|
-
if (!decoder.setLcLpPb(lc, lp, pb)) {
|
|
106
|
-
throw new Error("Invalid LZMA properties: lc=".concat(lc, " lp=").concat(lp, " pb=").concat(pb));
|
|
107
|
-
}
|
|
108
|
-
propsSet = true;
|
|
109
|
-
}
|
|
110
|
-
if (!propsSet) {
|
|
111
|
-
throw new Error('LZMA chunk without properties');
|
|
112
|
-
}
|
|
113
|
-
// Reset probabilities if state reset
|
|
114
|
-
if (chunkInfo.stateReset) {
|
|
115
|
-
decoder.resetProbabilities();
|
|
116
|
-
}
|
|
117
|
-
// Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)
|
|
118
|
-
var useSolid = !chunkInfo.stateReset || chunkInfo.stateReset && !chunkInfo.dictReset;
|
|
119
|
-
var compData = input.slice(dataOffset, dataOffset + chunkInfo.compSize);
|
|
120
|
-
// Enhanced: Use OutputSink for direct emission (zero-copy)
|
|
121
|
-
// Create a decoder with direct stream emission
|
|
122
|
-
var streamDecoder = new _LzmaDecoderts.LzmaDecoder({
|
|
123
|
-
write: function(chunk) {
|
|
124
|
-
return _this.push(chunk);
|
|
125
|
-
}
|
|
126
|
-
});
|
|
127
|
-
streamDecoder.setDictionarySize(dictSize);
|
|
128
|
-
// Preserve properties from main decoder
|
|
129
|
-
streamDecoder.setLcLpPb(lc, lp, pb);
|
|
130
|
-
// Use solid mode based on chunk properties
|
|
131
|
-
streamDecoder.decodeWithSink(compData, 0, chunkInfo.uncompSize, useSolid);
|
|
132
|
-
// Flush any remaining data in the OutWindow
|
|
133
|
-
streamDecoder.flushOutWindow();
|
|
134
|
-
}
|
|
135
|
-
offset += totalSize;
|
|
136
|
-
}
|
|
137
|
-
callback(null);
|
|
138
|
-
} catch (err) {
|
|
139
|
-
callback(err);
|
|
140
|
-
}
|
|
141
|
-
},
|
|
142
|
-
flush: function flush(callback) {
|
|
143
|
-
if (pending && pending.length > 0 && !finished) {
|
|
144
|
-
callback(new Error('Truncated LZMA2 stream'));
|
|
145
|
-
} else {
|
|
146
|
-
callback(null);
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
});
|
|
150
|
-
}
|
|
151
|
-
function createLzmaDecoder(properties, unpackSize) {
|
|
152
|
-
var decoder = new _LzmaDecoderts.LzmaDecoder();
|
|
153
|
-
decoder.setDecoderProperties(properties);
|
|
154
|
-
var chunks = [];
|
|
155
|
-
var totalSize = 0;
|
|
156
|
-
return new _extractbaseiterator.Transform({
|
|
157
|
-
transform: function transform(chunk, _encoding, callback) {
|
|
158
|
-
chunks.push(chunk);
|
|
159
|
-
totalSize += chunk.length;
|
|
160
|
-
callback(null);
|
|
161
|
-
},
|
|
162
|
-
flush: function flush(callback) {
|
|
163
|
-
var _this = this;
|
|
164
|
-
try {
|
|
165
|
-
// Optimization: Pre-allocate single buffer instead of Buffer.concat()
|
|
166
|
-
// This reduces peak memory usage by ~50% during concatenation
|
|
167
|
-
var input = (0, _extractbaseiterator.allocBufferUnsafe)(totalSize);
|
|
168
|
-
var offset = 0;
|
|
169
|
-
var _iteratorNormalCompletion = true, _didIteratorError = false, _iteratorError = undefined;
|
|
170
|
-
try {
|
|
171
|
-
// Copy each chunk into the pre-allocated buffer
|
|
172
|
-
for(var _iterator = chunks[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true){
|
|
173
|
-
var chunk = _step.value;
|
|
174
|
-
chunk.copy(input, offset);
|
|
175
|
-
offset += chunk.length;
|
|
176
|
-
}
|
|
177
|
-
} catch (err) {
|
|
178
|
-
_didIteratorError = true;
|
|
179
|
-
_iteratorError = err;
|
|
180
|
-
} finally{
|
|
181
|
-
try {
|
|
182
|
-
if (!_iteratorNormalCompletion && _iterator.return != null) {
|
|
183
|
-
_iterator.return();
|
|
184
|
-
}
|
|
185
|
-
} finally{
|
|
186
|
-
if (_didIteratorError) {
|
|
187
|
-
throw _iteratorError;
|
|
188
|
-
}
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
// Enhanced: Use OutputSink for direct emission (zero-copy)
|
|
192
|
-
// Create a decoder with direct stream emission
|
|
193
|
-
var streamDecoder = new _LzmaDecoderts.LzmaDecoder({
|
|
194
|
-
write: function(chunk) {
|
|
195
|
-
return _this.push(chunk);
|
|
196
|
-
}
|
|
197
|
-
});
|
|
198
|
-
streamDecoder.setDecoderProperties(properties);
|
|
199
|
-
streamDecoder.decodeWithSink(input, 0, unpackSize, false);
|
|
200
|
-
// Flush any remaining data in the OutWindow
|
|
201
|
-
streamDecoder.flushOutWindow();
|
|
202
|
-
callback(null);
|
|
203
|
-
} catch (err) {
|
|
204
|
-
callback(err);
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
});
|
|
208
|
-
}
|
|
209
|
-
/* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/lzma/stream/transforms.ts"],"sourcesContent":["/**\n * LZMA Transform Stream Wrappers\n *\n * Provides Transform streams for LZMA1 and LZMA2 decompression.\n *\n * LZMA2 streaming works by buffering until a complete chunk is available,\n * then decoding synchronously. LZMA2 chunks are bounded in size (~2MB max\n * uncompressed), so memory usage is predictable and bounded.\n *\n * Performance Optimization:\n * - Uses OutputSink pattern for zero-copy output during decode\n * - Each decoded byte written directly to stream (not buffered then copied)\n * - ~4x faster than previous buffering approach\n *\n * True byte-by-byte async LZMA streaming would require rewriting the entire\n * decoder with continuation-passing style, which is complex and not worth\n * the effort given LZMA2's chunked format.\n */\n\nimport { allocBufferUnsafe, Transform } from 'extract-base-iterator';\nimport { hasCompleteChunk } from '../Lzma2ChunkParser.ts';\nimport { LzmaDecoder } from '../sync/LzmaDecoder.ts';\nimport { parseLzma2DictionarySize } from '../types.ts';\n\n/**\n * Create an LZMA2 decoder Transform stream\n *\n * This is a streaming decoder that processes LZMA2 chunks incrementally.\n * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).\n *\n * @param properties - 1-byte LZMA2 properties (dictionary size)\n * @returns Transform stream that decompresses LZMA2 data\n */\nexport function createLzma2Decoder(properties: Buffer | Uint8Array): InstanceType<typeof Transform> {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n const dictSize = parseLzma2DictionarySize(properties[0]);\n\n // LZMA decoder instance - reused across chunks for solid mode\n const decoder = new LzmaDecoder();\n decoder.setDictionarySize(dictSize);\n\n // Track current LZMA properties\n let propsSet = false;\n\n // Buffer for incomplete chunk data\n let pending: Buffer | null = null;\n let finished = false;\n\n return new Transform({\n transform: function (this: InstanceType<typeof Transform>, chunk: Buffer, _encoding: string, callback: (err?: Error | null) => void) {\n if (finished) {\n callback(null);\n return;\n }\n\n // Combine with pending data\n let input: Buffer;\n if (pending && pending.length > 0) {\n input = Buffer.concat([pending, chunk]);\n pending = null;\n } else {\n input = chunk;\n }\n\n let offset = 0;\n\n try {\n while (offset < input.length && !finished) {\n const result = hasCompleteChunk(input, offset);\n\n if (!result.success) {\n // Need more data\n pending = input.slice(offset);\n break;\n }\n\n const { chunk: chunkInfo, totalSize } = result;\n\n if (chunkInfo.type === 'end') {\n finished = true;\n break;\n }\n\n // Handle dictionary reset\n if (chunkInfo.dictReset) {\n decoder.resetDictionary();\n }\n\n const dataOffset = offset + chunkInfo.headerSize;\n\n if (chunkInfo.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunkInfo.uncompSize);\n this.push(uncompData);\n\n // Feed uncompressed data to dictionary for subsequent LZMA chunks\n decoder.feedUncompressed(uncompData);\n } else {\n // LZMA compressed chunk\n\n // Variables to store properties (used for both decoders)\n let lc: number;\n let lp: number;\n let pb: number;\n\n // Apply new properties if present\n if (chunkInfo.newProps) {\n ({ lc, lp, pb } = chunkInfo.newProps);\n if (!decoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n propsSet = true;\n }\n\n if (!propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunkInfo.stateReset) {\n decoder.resetProbabilities();\n }\n\n // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)\n const useSolid = !chunkInfo.stateReset || (chunkInfo.stateReset && !chunkInfo.dictReset);\n\n const compData = input.slice(dataOffset, dataOffset + chunkInfo.compSize);\n\n // Enhanced: Use OutputSink for direct emission (zero-copy)\n // Create a decoder with direct stream emission\n const streamDecoder = new LzmaDecoder({\n write: (chunk: Buffer) => this.push(chunk),\n });\n streamDecoder.setDictionarySize(dictSize);\n // Preserve properties from main decoder\n streamDecoder.setLcLpPb(lc, lp, pb);\n\n // Use solid mode based on chunk properties\n streamDecoder.decodeWithSink(compData, 0, chunkInfo.uncompSize, useSolid);\n\n // Flush any remaining data in the OutWindow\n streamDecoder.flushOutWindow();\n }\n\n offset += totalSize;\n }\n\n callback(null);\n } catch (err) {\n callback(err as Error);\n }\n },\n\n flush: function (this: InstanceType<typeof Transform>, callback: (err?: Error | null) => void) {\n if (pending && pending.length > 0 && !finished) {\n callback(new Error('Truncated LZMA2 stream'));\n } else {\n callback(null);\n }\n },\n });\n}\n\n/**\n * Create an LZMA1 decoder Transform stream\n *\n * Note: LZMA1 has no chunk boundaries, so this requires knowing the\n * uncompressed size upfront. The stream buffers all input, then\n * decompresses when complete.\n *\n * For true streaming, use LZMA2 which has built-in chunking.\n *\n * Optimization: Pre-allocates input buffer and copies chunks once,\n * avoiding the double-buffering of Buffer.concat().\n *\n * @param properties - 5-byte LZMA properties\n * @param unpackSize - Expected uncompressed size\n * @returns Transform stream that decompresses LZMA1 data\n */\nexport function createLzmaDecoder(properties: Buffer | Uint8Array, unpackSize: number): InstanceType<typeof Transform> {\n const decoder = new LzmaDecoder();\n decoder.setDecoderProperties(properties);\n\n const chunks: Buffer[] = [];\n let totalSize = 0;\n\n return new Transform({\n transform: function (this: InstanceType<typeof Transform>, chunk: Buffer, _encoding: string, callback: (err?: Error | null) => void) {\n chunks.push(chunk);\n totalSize += chunk.length;\n callback(null);\n },\n\n flush: function (this: InstanceType<typeof Transform>, callback: (err?: Error | null) => void) {\n try {\n // Optimization: Pre-allocate single buffer instead of Buffer.concat()\n // This reduces peak memory usage by ~50% during concatenation\n const input = allocBufferUnsafe(totalSize);\n let offset = 0;\n\n // Copy each chunk into the pre-allocated buffer\n for (const chunk of chunks) {\n chunk.copy(input, offset);\n offset += chunk.length;\n }\n\n // Enhanced: Use OutputSink for direct emission (zero-copy)\n // Create a decoder with direct stream emission\n const streamDecoder = new LzmaDecoder({\n write: (chunk: Buffer) => this.push(chunk),\n });\n streamDecoder.setDecoderProperties(properties);\n streamDecoder.decodeWithSink(input, 0, unpackSize, false);\n\n // Flush any remaining data in the OutWindow\n streamDecoder.flushOutWindow();\n\n callback(null);\n } catch (err) {\n callback(err as Error);\n }\n },\n });\n}\n"],"names":["createLzma2Decoder","createLzmaDecoder","properties","length","Error","dictSize","parseLzma2DictionarySize","decoder","LzmaDecoder","setDictionarySize","propsSet","pending","finished","Transform","transform","chunk","_encoding","callback","input","Buffer","concat","offset","result","hasCompleteChunk","success","slice","chunkInfo","totalSize","type","dictReset","resetDictionary","dataOffset","headerSize","uncompData","uncompSize","push","feedUncompressed","lc","lp","pb","newProps","setLcLpPb","stateReset","resetProbabilities","useSolid","compData","compSize","streamDecoder","write","decodeWithSink","flushOutWindow","err","flush","unpackSize","setDecoderProperties","chunks","allocBufferUnsafe","copy"],"mappings":"AAAA;;;;;;;;;;;;;;;;;CAiBC;;;;;;;;;;;QAgBeA;eAAAA;;QAoJAC;eAAAA;;;mCAlK6B;kCACZ;6BACL;uBACa;AAWlC,SAASD,mBAAmBE,UAA+B;IAChE,IAAI,CAACA,cAAcA,WAAWC,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,IAAMC,WAAWC,IAAAA,iCAAwB,EAACJ,UAAU,CAAC,EAAE;IAEvD,8DAA8D;IAC9D,IAAMK,UAAU,IAAIC,0BAAW;IAC/BD,QAAQE,iBAAiB,CAACJ;IAE1B,gCAAgC;IAChC,IAAIK,WAAW;IAEf,mCAAmC;IACnC,IAAIC,UAAyB;IAC7B,IAAIC,WAAW;IAEf,OAAO,IAAIC,8BAAS,CAAC;QACnBC,WAAW,SAAXA,UAA2DC,KAAa,EAAEC,SAAiB,EAAEC,QAAsC;;YACjI,IAAIL,UAAU;gBACZK,SAAS;gBACT;YACF;YAEA,4BAA4B;YAC5B,IAAIC;YACJ,IAAIP,WAAWA,QAAQR,MAAM,GAAG,GAAG;gBACjCe,QAAQC,OAAOC,MAAM,CAAC;oBAACT;oBAASI;iBAAM;gBACtCJ,UAAU;YACZ,OAAO;gBACLO,QAAQH;YACV;YAEA,IAAIM,SAAS;YAEb,IAAI;gBACF,MAAOA,SAASH,MAAMf,MAAM,IAAI,CAACS,SAAU;oBACzC,IAAMU,SAASC,IAAAA,oCAAgB,EAACL,OAAOG;oBAEvC,IAAI,CAACC,OAAOE,OAAO,EAAE;wBACnB,iBAAiB;wBACjBb,UAAUO,MAAMO,KAAK,CAACJ;wBACtB;oBACF;oBAEA,IAAQN,AAAOW,YAAyBJ,OAAhCP,OAAkBY,YAAcL,OAAdK;oBAE1B,IAAID,UAAUE,IAAI,KAAK,OAAO;wBAC5BhB,WAAW;wBACX;oBACF;oBAEA,0BAA0B;oBAC1B,IAAIc,UAAUG,SAAS,EAAE;wBACvBtB,QAAQuB,eAAe;oBACzB;oBAEA,IAAMC,aAAaV,SAASK,UAAUM,UAAU;oBAEhD,IAAIN,UAAUE,IAAI,KAAK,gBAAgB;wBACrC,IAAMK,aAAaf,MAAMO,KAAK,CAACM,YAAYA,aAAaL,UAAUQ,UAAU;wBAC5E,IAAI,CAACC,IAAI,CAACF;wBAEV,kEAAkE;wBAClE1B,QAAQ6B,gBAAgB,CAACH;oBAC3B,OAAO;wBACL,wBAAwB;wBAExB,yDAAyD;wBACzD,IAAII,KAAAA,KAAAA;wBACJ,IAAIC,KAAAA,KAAAA;wBACJ,IAAIC,KAAAA,KAAAA;wBAEJ,kCAAkC;wBAClC,IAAIb,UAAUc,QAAQ,EAAE;;kCACJd,UAAUc,QAAQ,EAAjCH,SAAAA,IAAIC,SAAAA,IAAIC,SAAAA;4BACX,IAAI,CAAChC,QAAQkC,SAAS,CAACJ,IAAIC,IAAIC,KAAK;gCAClC,MAAM,IAAInC,MAAM,AAAC,+BAAuCkC,OAATD,IAAG,QAAeE,OAATD,IAAG,QAAS,OAAHC;4BACnE;4BACA7B,WAAW;wBACb;wBAEA,IAAI,CAACA,UAAU;4BACb,MAAM,IAAIN,MAAM;wBAClB;wBAEA,qCAAqC;wBACrC,IAAIsB,UAAUgB,UAAU,EAAE;4BACxBnC,QAAQoC,kBAAkB;wBAC5B;wBAEA,0GAA0G;wBAC1G,IAAMC,WAAW,CAAClB,UAAUgB,UAAU,IAAKhB,UAAUgB,UAAU,IAAI,CAAChB,UAAUG,SAAS;wBAEvF,IAAMgB,WAAW3B,MAAMO,KAAK,CAACM,YAAYA,aAAaL,UAAUoB,QAAQ;wBAExE,2DAA2D;wBAC3D,+CAA+C;wBAC/C,IAAMC,gBAAgB,IAAIvC,0BAAW,CAAC;4BACpCwC,OAAO,SAACjC;uCAAkB,MAAKoB,IAAI,CAACpB;;wBACtC;wBACAgC,cAActC,iBAAiB,CAACJ;wBAChC,wCAAwC;wBACxC0C,cAAcN,SAAS,CAACJ,IAAIC,IAAIC;wBAEhC,2CAA2C;wBAC3CQ,cAAcE,cAAc,CAACJ,UAAU,GAAGnB,UAAUQ,UAAU,EAAEU;wBAEhE,4CAA4C;wBAC5CG,cAAcG,cAAc;oBAC9B;oBAEA7B,UAAUM;gBACZ;gBAEAV,SAAS;YACX,EAAE,OAAOkC,KAAK;gBACZlC,SAASkC;YACX;QACF;QAEAC,OAAO,SAAPA,MAAuDnC,QAAsC;YAC3F,IAAIN,WAAWA,QAAQR,MAAM,GAAG,KAAK,CAACS,UAAU;gBAC9CK,SAAS,IAAIb,MAAM;YACrB,OAAO;gBACLa,SAAS;YACX;QACF;IACF;AACF;AAkBO,SAAShB,kBAAkBC,UAA+B,EAAEmD,UAAkB;IACnF,IAAM9C,UAAU,IAAIC,0BAAW;IAC/BD,QAAQ+C,oBAAoB,CAACpD;IAE7B,IAAMqD,SAAmB,EAAE;IAC3B,IAAI5B,YAAY;IAEhB,OAAO,IAAId,8BAAS,CAAC;QACnBC,WAAW,SAAXA,UAA2DC,KAAa,EAAEC,SAAiB,EAAEC,QAAsC;YACjIsC,OAAOpB,IAAI,CAACpB;YACZY,aAAaZ,MAAMZ,MAAM;YACzBc,SAAS;QACX;QAEAmC,OAAO,SAAPA,MAAuDnC,QAAsC;;YAC3F,IAAI;gBACF,sEAAsE;gBACtE,8DAA8D;gBAC9D,IAAMC,QAAQsC,IAAAA,sCAAiB,EAAC7B;gBAChC,IAAIN,SAAS;oBAGR,kCAAA,2BAAA;;oBADL,gDAAgD;oBAChD,QAAK,YAAekC,2BAAf,SAAA,6BAAA,QAAA,yBAAA,iCAAuB;wBAAvB,IAAMxC,QAAN;wBACHA,MAAM0C,IAAI,CAACvC,OAAOG;wBAClBA,UAAUN,MAAMZ,MAAM;oBACxB;;oBAHK;oBAAA;;;6BAAA,6BAAA;4BAAA;;;4BAAA;kCAAA;;;;gBAKL,2DAA2D;gBAC3D,+CAA+C;gBAC/C,IAAM4C,gBAAgB,IAAIvC,0BAAW,CAAC;oBACpCwC,OAAO,SAACjC;+BAAkB,MAAKoB,IAAI,CAACpB;;gBACtC;gBACAgC,cAAcO,oBAAoB,CAACpD;gBACnC6C,cAAcE,cAAc,CAAC/B,OAAO,GAAGmC,YAAY;gBAEnD,4CAA4C;gBAC5CN,cAAcG,cAAc;gBAE5BjC,SAAS;YACX,EAAE,OAAOkC,KAAK;gBACZlC,SAASkC;YACX;QACF;IACF;AACF"}
|
|
@@ -1,63 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Synchronous LZMA2 Decoder
|
|
3
|
-
*
|
|
4
|
-
* LZMA2 is a container format that wraps LZMA chunks with framing.
|
|
5
|
-
* Decodes LZMA2 data from a buffer.
|
|
6
|
-
*/
|
|
7
|
-
import { type OutputSink } from '../types.js';
|
|
8
|
-
/**
|
|
9
|
-
* Synchronous LZMA2 decoder
|
|
10
|
-
*/
|
|
11
|
-
export declare class Lzma2Decoder {
|
|
12
|
-
private lzmaDecoder;
|
|
13
|
-
private dictionarySize;
|
|
14
|
-
private propsSet;
|
|
15
|
-
constructor(properties: Buffer | Uint8Array, outputSink?: OutputSink);
|
|
16
|
-
/**
|
|
17
|
-
* Reset the dictionary (for stream boundaries)
|
|
18
|
-
*/
|
|
19
|
-
resetDictionary(): void;
|
|
20
|
-
/**
|
|
21
|
-
* Reset all probability models (for stream boundaries)
|
|
22
|
-
*/
|
|
23
|
-
resetProbabilities(): void;
|
|
24
|
-
/**
|
|
25
|
-
* Set LZMA properties
|
|
26
|
-
*/
|
|
27
|
-
setLcLpPb(lc: number, lp: number, pb: number): boolean;
|
|
28
|
-
/**
|
|
29
|
-
* Feed uncompressed data to the dictionary (for subsequent LZMA chunks)
|
|
30
|
-
*/
|
|
31
|
-
feedUncompressed(data: Buffer): void;
|
|
32
|
-
/**
|
|
33
|
-
* Decode raw LZMA data (used internally for LZMA2 chunks)
|
|
34
|
-
* @param input - LZMA compressed data
|
|
35
|
-
* @param offset - Input offset
|
|
36
|
-
* @param outSize - Expected output size
|
|
37
|
-
* @param solid - Use solid mode
|
|
38
|
-
* @returns Decompressed data
|
|
39
|
-
*/
|
|
40
|
-
decodeLzmaData(input: Buffer, offset: number, outSize: number, solid?: boolean): Buffer;
|
|
41
|
-
/**
|
|
42
|
-
* Decode LZMA2 data with streaming output
|
|
43
|
-
* @param input - LZMA2 compressed data
|
|
44
|
-
* @returns Total number of bytes written to sink
|
|
45
|
-
*/
|
|
46
|
-
decodeWithSink(input: Buffer): number;
|
|
47
|
-
/**
|
|
48
|
-
* Decode LZMA2 data
|
|
49
|
-
* @param input - LZMA2 compressed data
|
|
50
|
-
* @param unpackSize - Expected output size (optional, for pre-allocation)
|
|
51
|
-
* @returns Decompressed data
|
|
52
|
-
*/
|
|
53
|
-
decode(input: Buffer, unpackSize?: number): Buffer;
|
|
54
|
-
}
|
|
55
|
-
/**
|
|
56
|
-
* Decode LZMA2 data synchronously
|
|
57
|
-
* @param input - LZMA2 compressed data
|
|
58
|
-
* @param properties - 1-byte properties (dictionary size)
|
|
59
|
-
* @param unpackSize - Expected output size (optional, autodetects if not provided)
|
|
60
|
-
* @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)
|
|
61
|
-
* @returns Decompressed data (or bytes written if outputSink provided)
|
|
62
|
-
*/
|
|
63
|
-
export declare function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number, outputSink?: OutputSink): Buffer | number;
|
|
@@ -1,63 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Synchronous LZMA2 Decoder
|
|
3
|
-
*
|
|
4
|
-
* LZMA2 is a container format that wraps LZMA chunks with framing.
|
|
5
|
-
* Decodes LZMA2 data from a buffer.
|
|
6
|
-
*/
|
|
7
|
-
import { type OutputSink } from '../types.js';
|
|
8
|
-
/**
|
|
9
|
-
* Synchronous LZMA2 decoder
|
|
10
|
-
*/
|
|
11
|
-
export declare class Lzma2Decoder {
|
|
12
|
-
private lzmaDecoder;
|
|
13
|
-
private dictionarySize;
|
|
14
|
-
private propsSet;
|
|
15
|
-
constructor(properties: Buffer | Uint8Array, outputSink?: OutputSink);
|
|
16
|
-
/**
|
|
17
|
-
* Reset the dictionary (for stream boundaries)
|
|
18
|
-
*/
|
|
19
|
-
resetDictionary(): void;
|
|
20
|
-
/**
|
|
21
|
-
* Reset all probability models (for stream boundaries)
|
|
22
|
-
*/
|
|
23
|
-
resetProbabilities(): void;
|
|
24
|
-
/**
|
|
25
|
-
* Set LZMA properties
|
|
26
|
-
*/
|
|
27
|
-
setLcLpPb(lc: number, lp: number, pb: number): boolean;
|
|
28
|
-
/**
|
|
29
|
-
* Feed uncompressed data to the dictionary (for subsequent LZMA chunks)
|
|
30
|
-
*/
|
|
31
|
-
feedUncompressed(data: Buffer): void;
|
|
32
|
-
/**
|
|
33
|
-
* Decode raw LZMA data (used internally for LZMA2 chunks)
|
|
34
|
-
* @param input - LZMA compressed data
|
|
35
|
-
* @param offset - Input offset
|
|
36
|
-
* @param outSize - Expected output size
|
|
37
|
-
* @param solid - Use solid mode
|
|
38
|
-
* @returns Decompressed data
|
|
39
|
-
*/
|
|
40
|
-
decodeLzmaData(input: Buffer, offset: number, outSize: number, solid?: boolean): Buffer;
|
|
41
|
-
/**
|
|
42
|
-
* Decode LZMA2 data with streaming output
|
|
43
|
-
* @param input - LZMA2 compressed data
|
|
44
|
-
* @returns Total number of bytes written to sink
|
|
45
|
-
*/
|
|
46
|
-
decodeWithSink(input: Buffer): number;
|
|
47
|
-
/**
|
|
48
|
-
* Decode LZMA2 data
|
|
49
|
-
* @param input - LZMA2 compressed data
|
|
50
|
-
* @param unpackSize - Expected output size (optional, for pre-allocation)
|
|
51
|
-
* @returns Decompressed data
|
|
52
|
-
*/
|
|
53
|
-
decode(input: Buffer, unpackSize?: number): Buffer;
|
|
54
|
-
}
|
|
55
|
-
/**
|
|
56
|
-
* Decode LZMA2 data synchronously
|
|
57
|
-
* @param input - LZMA2 compressed data
|
|
58
|
-
* @param properties - 1-byte properties (dictionary size)
|
|
59
|
-
* @param unpackSize - Expected output size (optional, autodetects if not provided)
|
|
60
|
-
* @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)
|
|
61
|
-
* @returns Decompressed data (or bytes written if outputSink provided)
|
|
62
|
-
*/
|
|
63
|
-
export declare function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number, outputSink?: OutputSink): Buffer | number;
|
|
@@ -1,231 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Synchronous LZMA2 Decoder
|
|
3
|
-
*
|
|
4
|
-
* LZMA2 is a container format that wraps LZMA chunks with framing.
|
|
5
|
-
* Decodes LZMA2 data from a buffer.
|
|
6
|
-
*/ "use strict";
|
|
7
|
-
Object.defineProperty(exports, "__esModule", {
|
|
8
|
-
value: true
|
|
9
|
-
});
|
|
10
|
-
function _export(target, all) {
|
|
11
|
-
for(var name in all)Object.defineProperty(target, name, {
|
|
12
|
-
enumerable: true,
|
|
13
|
-
get: Object.getOwnPropertyDescriptor(all, name).get
|
|
14
|
-
});
|
|
15
|
-
}
|
|
16
|
-
_export(exports, {
|
|
17
|
-
get Lzma2Decoder () {
|
|
18
|
-
return Lzma2Decoder;
|
|
19
|
-
},
|
|
20
|
-
get decodeLzma2 () {
|
|
21
|
-
return decodeLzma2;
|
|
22
|
-
}
|
|
23
|
-
});
|
|
24
|
-
var _extractbaseiterator = require("extract-base-iterator");
|
|
25
|
-
var _Lzma2ChunkParserts = require("../Lzma2ChunkParser.js");
|
|
26
|
-
var _typests = require("../types.js");
|
|
27
|
-
var _LzmaDecoderts = require("./LzmaDecoder.js");
|
|
28
|
-
function _class_call_check(instance, Constructor) {
|
|
29
|
-
if (!(instance instanceof Constructor)) {
|
|
30
|
-
throw new TypeError("Cannot call a class as a function");
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
var Lzma2Decoder = /*#__PURE__*/ function() {
|
|
34
|
-
"use strict";
|
|
35
|
-
function Lzma2Decoder(properties, outputSink) {
|
|
36
|
-
_class_call_check(this, Lzma2Decoder);
|
|
37
|
-
if (!properties || properties.length < 1) {
|
|
38
|
-
throw new Error('LZMA2 requires properties byte');
|
|
39
|
-
}
|
|
40
|
-
this.dictionarySize = (0, _typests.parseLzma2DictionarySize)(properties[0]);
|
|
41
|
-
this.lzmaDecoder = new _LzmaDecoderts.LzmaDecoder(outputSink);
|
|
42
|
-
this.lzmaDecoder.setDictionarySize(this.dictionarySize);
|
|
43
|
-
this.propsSet = false;
|
|
44
|
-
}
|
|
45
|
-
var _proto = Lzma2Decoder.prototype;
|
|
46
|
-
/**
|
|
47
|
-
* Reset the dictionary (for stream boundaries)
|
|
48
|
-
*/ _proto.resetDictionary = function resetDictionary() {
|
|
49
|
-
this.lzmaDecoder.resetDictionary();
|
|
50
|
-
};
|
|
51
|
-
/**
|
|
52
|
-
* Reset all probability models (for stream boundaries)
|
|
53
|
-
*/ _proto.resetProbabilities = function resetProbabilities() {
|
|
54
|
-
this.lzmaDecoder.resetProbabilities();
|
|
55
|
-
};
|
|
56
|
-
/**
|
|
57
|
-
* Set LZMA properties
|
|
58
|
-
*/ _proto.setLcLpPb = function setLcLpPb(lc, lp, pb) {
|
|
59
|
-
return this.lzmaDecoder.setLcLpPb(lc, lp, pb);
|
|
60
|
-
};
|
|
61
|
-
/**
|
|
62
|
-
* Feed uncompressed data to the dictionary (for subsequent LZMA chunks)
|
|
63
|
-
*/ _proto.feedUncompressed = function feedUncompressed(data) {
|
|
64
|
-
this.lzmaDecoder.feedUncompressed(data);
|
|
65
|
-
};
|
|
66
|
-
/**
|
|
67
|
-
* Decode raw LZMA data (used internally for LZMA2 chunks)
|
|
68
|
-
* @param input - LZMA compressed data
|
|
69
|
-
* @param offset - Input offset
|
|
70
|
-
* @param outSize - Expected output size
|
|
71
|
-
* @param solid - Use solid mode
|
|
72
|
-
* @returns Decompressed data
|
|
73
|
-
*/ _proto.decodeLzmaData = function decodeLzmaData(input, offset, outSize) {
|
|
74
|
-
var solid = arguments.length > 3 && arguments[3] !== void 0 ? arguments[3] : false;
|
|
75
|
-
return this.lzmaDecoder.decode(input, offset, outSize, solid);
|
|
76
|
-
};
|
|
77
|
-
/**
|
|
78
|
-
* Decode LZMA2 data with streaming output
|
|
79
|
-
* @param input - LZMA2 compressed data
|
|
80
|
-
* @returns Total number of bytes written to sink
|
|
81
|
-
*/ _proto.decodeWithSink = function decodeWithSink(input) {
|
|
82
|
-
var totalBytes = 0;
|
|
83
|
-
var offset = 0;
|
|
84
|
-
while(offset < input.length){
|
|
85
|
-
var result = (0, _Lzma2ChunkParserts.parseLzma2ChunkHeader)(input, offset);
|
|
86
|
-
if (!result.success) {
|
|
87
|
-
throw new Error('Truncated LZMA2 chunk header');
|
|
88
|
-
}
|
|
89
|
-
var chunk = result.chunk;
|
|
90
|
-
if (chunk.type === 'end') {
|
|
91
|
-
break;
|
|
92
|
-
}
|
|
93
|
-
// Validate we have enough data for the chunk
|
|
94
|
-
var dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;
|
|
95
|
-
if (offset + chunk.headerSize + dataSize > input.length) {
|
|
96
|
-
throw new Error("Truncated LZMA2 ".concat(chunk.type, " data"));
|
|
97
|
-
}
|
|
98
|
-
// Handle dictionary reset
|
|
99
|
-
if (chunk.dictReset) {
|
|
100
|
-
this.lzmaDecoder.resetDictionary();
|
|
101
|
-
}
|
|
102
|
-
var dataOffset = offset + chunk.headerSize;
|
|
103
|
-
if (chunk.type === 'uncompressed') {
|
|
104
|
-
var uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);
|
|
105
|
-
// Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it
|
|
106
|
-
this.lzmaDecoder.feedUncompressed(uncompData);
|
|
107
|
-
totalBytes += uncompData.length;
|
|
108
|
-
offset = dataOffset + chunk.uncompSize;
|
|
109
|
-
} else {
|
|
110
|
-
// LZMA compressed chunk
|
|
111
|
-
// Apply new properties if present
|
|
112
|
-
if (chunk.newProps) {
|
|
113
|
-
var _chunk_newProps = chunk.newProps, lc = _chunk_newProps.lc, lp = _chunk_newProps.lp, pb = _chunk_newProps.pb;
|
|
114
|
-
if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {
|
|
115
|
-
throw new Error("Invalid LZMA properties: lc=".concat(lc, " lp=").concat(lp, " pb=").concat(pb));
|
|
116
|
-
}
|
|
117
|
-
this.propsSet = true;
|
|
118
|
-
}
|
|
119
|
-
if (!this.propsSet) {
|
|
120
|
-
throw new Error('LZMA chunk without properties');
|
|
121
|
-
}
|
|
122
|
-
// Reset probabilities if state reset
|
|
123
|
-
if (chunk.stateReset) {
|
|
124
|
-
this.lzmaDecoder.resetProbabilities();
|
|
125
|
-
}
|
|
126
|
-
// Determine solid mode
|
|
127
|
-
var useSolid = !chunk.stateReset || chunk.stateReset && !chunk.dictReset;
|
|
128
|
-
// Decode LZMA chunk directly to sink
|
|
129
|
-
totalBytes += this.lzmaDecoder.decodeWithSink(input, dataOffset, chunk.uncompSize, useSolid);
|
|
130
|
-
offset = dataOffset + chunk.compSize;
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
// Flush any remaining data in the OutWindow
|
|
134
|
-
this.lzmaDecoder.flushOutWindow();
|
|
135
|
-
return totalBytes;
|
|
136
|
-
};
|
|
137
|
-
/**
|
|
138
|
-
* Decode LZMA2 data
|
|
139
|
-
* @param input - LZMA2 compressed data
|
|
140
|
-
* @param unpackSize - Expected output size (optional, for pre-allocation)
|
|
141
|
-
* @returns Decompressed data
|
|
142
|
-
*/ _proto.decode = function decode(input, unpackSize) {
|
|
143
|
-
// Pre-allocate output buffer if size is known
|
|
144
|
-
var outputBuffer = null;
|
|
145
|
-
var outputPos = 0;
|
|
146
|
-
var outputChunks = [];
|
|
147
|
-
if (unpackSize && unpackSize > 0) {
|
|
148
|
-
outputBuffer = (0, _extractbaseiterator.allocBufferUnsafe)(unpackSize);
|
|
149
|
-
}
|
|
150
|
-
var offset = 0;
|
|
151
|
-
while(offset < input.length){
|
|
152
|
-
var result = (0, _Lzma2ChunkParserts.parseLzma2ChunkHeader)(input, offset);
|
|
153
|
-
if (!result.success) {
|
|
154
|
-
throw new Error('Truncated LZMA2 chunk header');
|
|
155
|
-
}
|
|
156
|
-
var chunk = result.chunk;
|
|
157
|
-
if (chunk.type === 'end') {
|
|
158
|
-
break;
|
|
159
|
-
}
|
|
160
|
-
// Validate we have enough data for the chunk
|
|
161
|
-
var dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;
|
|
162
|
-
if (offset + chunk.headerSize + dataSize > input.length) {
|
|
163
|
-
throw new Error("Truncated LZMA2 ".concat(chunk.type, " data"));
|
|
164
|
-
}
|
|
165
|
-
// Handle dictionary reset
|
|
166
|
-
if (chunk.dictReset) {
|
|
167
|
-
this.lzmaDecoder.resetDictionary();
|
|
168
|
-
}
|
|
169
|
-
var dataOffset = offset + chunk.headerSize;
|
|
170
|
-
if (chunk.type === 'uncompressed') {
|
|
171
|
-
var uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);
|
|
172
|
-
// Copy to output
|
|
173
|
-
if (outputBuffer) {
|
|
174
|
-
uncompData.copy(outputBuffer, outputPos);
|
|
175
|
-
outputPos += uncompData.length;
|
|
176
|
-
} else {
|
|
177
|
-
outputChunks.push(uncompData);
|
|
178
|
-
}
|
|
179
|
-
// Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it
|
|
180
|
-
this.lzmaDecoder.feedUncompressed(uncompData);
|
|
181
|
-
offset = dataOffset + chunk.uncompSize;
|
|
182
|
-
} else {
|
|
183
|
-
// LZMA compressed chunk
|
|
184
|
-
// Apply new properties if present
|
|
185
|
-
if (chunk.newProps) {
|
|
186
|
-
var _chunk_newProps = chunk.newProps, lc = _chunk_newProps.lc, lp = _chunk_newProps.lp, pb = _chunk_newProps.pb;
|
|
187
|
-
if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {
|
|
188
|
-
throw new Error("Invalid LZMA properties: lc=".concat(lc, " lp=").concat(lp, " pb=").concat(pb));
|
|
189
|
-
}
|
|
190
|
-
this.propsSet = true;
|
|
191
|
-
}
|
|
192
|
-
if (!this.propsSet) {
|
|
193
|
-
throw new Error('LZMA chunk without properties');
|
|
194
|
-
}
|
|
195
|
-
// Reset probabilities if state reset
|
|
196
|
-
if (chunk.stateReset) {
|
|
197
|
-
this.lzmaDecoder.resetProbabilities();
|
|
198
|
-
}
|
|
199
|
-
// Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)
|
|
200
|
-
var useSolid = !chunk.stateReset || chunk.stateReset && !chunk.dictReset;
|
|
201
|
-
// Decode LZMA chunk
|
|
202
|
-
var chunkData = input.slice(dataOffset, dataOffset + chunk.compSize);
|
|
203
|
-
var decoded = this.lzmaDecoder.decode(chunkData, 0, chunk.uncompSize, useSolid);
|
|
204
|
-
// Copy to output
|
|
205
|
-
if (outputBuffer) {
|
|
206
|
-
decoded.copy(outputBuffer, outputPos);
|
|
207
|
-
outputPos += decoded.length;
|
|
208
|
-
} else {
|
|
209
|
-
outputChunks.push(decoded);
|
|
210
|
-
}
|
|
211
|
-
offset = dataOffset + chunk.compSize;
|
|
212
|
-
}
|
|
213
|
-
}
|
|
214
|
-
// Return pre-allocated buffer or concatenated chunks
|
|
215
|
-
if (outputBuffer) {
|
|
216
|
-
return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;
|
|
217
|
-
}
|
|
218
|
-
return Buffer.concat(outputChunks);
|
|
219
|
-
};
|
|
220
|
-
return Lzma2Decoder;
|
|
221
|
-
}();
|
|
222
|
-
function decodeLzma2(input, properties, unpackSize, outputSink) {
|
|
223
|
-
var decoder = new Lzma2Decoder(properties, outputSink);
|
|
224
|
-
if (outputSink) {
|
|
225
|
-
// Zero-copy mode: write to sink during decode
|
|
226
|
-
return decoder.decodeWithSink(input);
|
|
227
|
-
}
|
|
228
|
-
// Buffering mode: returns Buffer (zero-copy)
|
|
229
|
-
return decoder.decode(input, unpackSize);
|
|
230
|
-
}
|
|
231
|
-
/* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/lzma/sync/Lzma2Decoder.ts"],"sourcesContent":["/**\n * Synchronous LZMA2 Decoder\n *\n * LZMA2 is a container format that wraps LZMA chunks with framing.\n * Decodes LZMA2 data from a buffer.\n */\n\nimport { allocBufferUnsafe } from 'extract-base-iterator';\nimport { parseLzma2ChunkHeader } from '../Lzma2ChunkParser.ts';\nimport { type OutputSink, parseLzma2DictionarySize } from '../types.ts';\nimport { LzmaDecoder } from './LzmaDecoder.ts';\n\n/**\n * Synchronous LZMA2 decoder\n */\nexport class Lzma2Decoder {\n private lzmaDecoder: LzmaDecoder;\n private dictionarySize: number;\n private propsSet: boolean;\n\n constructor(properties: Buffer | Uint8Array, outputSink?: OutputSink) {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n this.dictionarySize = parseLzma2DictionarySize(properties[0]);\n this.lzmaDecoder = new LzmaDecoder(outputSink);\n this.lzmaDecoder.setDictionarySize(this.dictionarySize);\n this.propsSet = false;\n }\n\n /**\n * Reset the dictionary (for stream boundaries)\n */\n resetDictionary(): void {\n this.lzmaDecoder.resetDictionary();\n }\n\n /**\n * Reset all probability models (for stream boundaries)\n */\n resetProbabilities(): void {\n this.lzmaDecoder.resetProbabilities();\n }\n\n /**\n * Set LZMA properties\n */\n setLcLpPb(lc: number, lp: number, pb: number): boolean {\n return this.lzmaDecoder.setLcLpPb(lc, lp, pb);\n }\n\n /**\n * Feed uncompressed data to the dictionary (for subsequent LZMA chunks)\n */\n feedUncompressed(data: Buffer): void {\n this.lzmaDecoder.feedUncompressed(data);\n }\n\n /**\n * Decode raw LZMA data (used internally for LZMA2 chunks)\n * @param input - LZMA compressed data\n * @param offset - Input offset\n * @param outSize - Expected output size\n * @param solid - Use solid mode\n * @returns Decompressed data\n */\n decodeLzmaData(input: Buffer, offset: number, outSize: number, solid = false): Buffer {\n return this.lzmaDecoder.decode(input, offset, outSize, solid);\n }\n\n /**\n * Decode LZMA2 data with streaming output\n * @param input - LZMA2 compressed data\n * @returns Total number of bytes written to sink\n */\n decodeWithSink(input: Buffer): number {\n let totalBytes = 0;\n let offset = 0;\n\n while (offset < input.length) {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (!result.success) {\n throw new Error('Truncated LZMA2 chunk header');\n }\n\n const chunk = result.chunk;\n\n if (chunk.type === 'end') {\n break;\n }\n\n // Validate we have enough data for the chunk\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n if (offset + chunk.headerSize + dataSize > input.length) {\n throw new Error(`Truncated LZMA2 ${chunk.type} data`);\n }\n\n // Handle dictionary reset\n if (chunk.dictReset) {\n this.lzmaDecoder.resetDictionary();\n }\n\n const dataOffset = offset + chunk.headerSize;\n\n if (chunk.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);\n\n // Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it\n this.lzmaDecoder.feedUncompressed(uncompData);\n\n totalBytes += uncompData.length;\n offset = dataOffset + chunk.uncompSize;\n } else {\n // LZMA compressed chunk\n\n // Apply new properties if present\n if (chunk.newProps) {\n const { lc, lp, pb } = chunk.newProps;\n if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n this.propsSet = true;\n }\n\n if (!this.propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunk.stateReset) {\n this.lzmaDecoder.resetProbabilities();\n }\n\n // Determine solid mode\n const useSolid = !chunk.stateReset || (chunk.stateReset && !chunk.dictReset);\n\n // Decode LZMA chunk directly to sink\n totalBytes += this.lzmaDecoder.decodeWithSink(input, dataOffset, chunk.uncompSize, useSolid);\n\n offset = dataOffset + chunk.compSize;\n }\n }\n\n // Flush any remaining data in the OutWindow\n this.lzmaDecoder.flushOutWindow();\n\n return totalBytes;\n }\n\n /**\n * Decode LZMA2 data\n * @param input - LZMA2 compressed data\n * @param unpackSize - Expected output size (optional, for pre-allocation)\n * @returns Decompressed data\n */\n decode(input: Buffer, unpackSize?: number): Buffer {\n // Pre-allocate output buffer if size is known\n let outputBuffer: Buffer | null = null;\n let outputPos = 0;\n const outputChunks: Buffer[] = [];\n\n if (unpackSize && unpackSize > 0) {\n outputBuffer = allocBufferUnsafe(unpackSize);\n }\n\n let offset = 0;\n\n while (offset < input.length) {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (!result.success) {\n throw new Error('Truncated LZMA2 chunk header');\n }\n\n const chunk = result.chunk;\n\n if (chunk.type === 'end') {\n break;\n }\n\n // Validate we have enough data for the chunk\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n if (offset + chunk.headerSize + dataSize > input.length) {\n throw new Error(`Truncated LZMA2 ${chunk.type} data`);\n }\n\n // Handle dictionary reset\n if (chunk.dictReset) {\n this.lzmaDecoder.resetDictionary();\n }\n\n const dataOffset = offset + chunk.headerSize;\n\n if (chunk.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);\n\n // Copy to output\n if (outputBuffer) {\n uncompData.copy(outputBuffer, outputPos);\n outputPos += uncompData.length;\n } else {\n outputChunks.push(uncompData);\n }\n\n // Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it\n this.lzmaDecoder.feedUncompressed(uncompData);\n\n offset = dataOffset + chunk.uncompSize;\n } else {\n // LZMA compressed chunk\n\n // Apply new properties if present\n if (chunk.newProps) {\n const { lc, lp, pb } = chunk.newProps;\n if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n this.propsSet = true;\n }\n\n if (!this.propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunk.stateReset) {\n this.lzmaDecoder.resetProbabilities();\n }\n\n // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)\n const useSolid = !chunk.stateReset || (chunk.stateReset && !chunk.dictReset);\n\n // Decode LZMA chunk\n const chunkData = input.slice(dataOffset, dataOffset + chunk.compSize);\n const decoded = this.lzmaDecoder.decode(chunkData, 0, chunk.uncompSize, useSolid);\n\n // Copy to output\n if (outputBuffer) {\n decoded.copy(outputBuffer, outputPos);\n outputPos += decoded.length;\n } else {\n outputChunks.push(decoded);\n }\n\n offset = dataOffset + chunk.compSize;\n }\n }\n\n // Return pre-allocated buffer or concatenated chunks\n if (outputBuffer) {\n return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;\n }\n return Buffer.concat(outputChunks);\n }\n}\n\n/**\n * Decode LZMA2 data synchronously\n * @param input - LZMA2 compressed data\n * @param properties - 1-byte properties (dictionary size)\n * @param unpackSize - Expected output size (optional, autodetects if not provided)\n * @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)\n * @returns Decompressed data (or bytes written if outputSink provided)\n */\nexport function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number, outputSink?: OutputSink): Buffer | number {\n const decoder = new Lzma2Decoder(properties, outputSink);\n if (outputSink) {\n // Zero-copy mode: write to sink during decode\n return decoder.decodeWithSink(input);\n }\n // Buffering mode: returns Buffer (zero-copy)\n return decoder.decode(input, unpackSize);\n}\n"],"names":["Lzma2Decoder","decodeLzma2","properties","outputSink","length","Error","dictionarySize","parseLzma2DictionarySize","lzmaDecoder","LzmaDecoder","setDictionarySize","propsSet","resetDictionary","resetProbabilities","setLcLpPb","lc","lp","pb","feedUncompressed","data","decodeLzmaData","input","offset","outSize","solid","decode","decodeWithSink","totalBytes","result","parseLzma2ChunkHeader","success","chunk","type","dataSize","uncompSize","compSize","headerSize","dictReset","dataOffset","uncompData","slice","newProps","stateReset","useSolid","flushOutWindow","unpackSize","outputBuffer","outputPos","outputChunks","allocBufferUnsafe","copy","push","chunkData","decoded","Buffer","concat","decoder"],"mappings":"AAAA;;;;;CAKC;;;;;;;;;;;QAUYA;eAAAA;;QA2PGC;eAAAA;;;mCAnQkB;kCACI;uBACoB;6BAC9B;;;;;;AAKrB,IAAA,AAAMD,6BAAN;;aAAMA,aAKCE,UAA+B,EAAEC,UAAuB;gCALzDH;QAMT,IAAI,CAACE,cAAcA,WAAWE,MAAM,GAAG,GAAG;YACxC,MAAM,IAAIC,MAAM;QAClB;QAEA,IAAI,CAACC,cAAc,GAAGC,IAAAA,iCAAwB,EAACL,UAAU,CAAC,EAAE;QAC5D,IAAI,CAACM,WAAW,GAAG,IAAIC,0BAAW,CAACN;QACnC,IAAI,CAACK,WAAW,CAACE,iBAAiB,CAAC,IAAI,CAACJ,cAAc;QACtD,IAAI,CAACK,QAAQ,GAAG;;iBAbPX;IAgBX;;GAEC,GACDY,OAAAA,eAEC,GAFDA,SAAAA;QACE,IAAI,CAACJ,WAAW,CAACI,eAAe;IAClC;IAEA;;GAEC,GACDC,OAAAA,kBAEC,GAFDA,SAAAA;QACE,IAAI,CAACL,WAAW,CAACK,kBAAkB;IACrC;IAEA;;GAEC,GACDC,OAAAA,SAEC,GAFDA,SAAAA,UAAUC,EAAU,EAAEC,EAAU,EAAEC,EAAU;QAC1C,OAAO,IAAI,CAACT,WAAW,CAACM,SAAS,CAACC,IAAIC,IAAIC;IAC5C;IAEA;;GAEC,GACDC,OAAAA,gBAEC,GAFDA,SAAAA,iBAAiBC,IAAY;QAC3B,IAAI,CAACX,WAAW,CAACU,gBAAgB,CAACC;IACpC;IAEA;;;;;;;GAOC,GACDC,OAAAA,cAEC,GAFDA,SAAAA,eAAeC,KAAa,EAAEC,MAAc,EAAEC,OAAe;YAAEC,QAAAA,iEAAQ;QACrE,OAAO,IAAI,CAAChB,WAAW,CAACiB,MAAM,CAACJ,OAAOC,QAAQC,SAASC;IACzD;IAEA;;;;GAIC,GACDE,OAAAA,cAyEC,GAzEDA,SAAAA,eAAeL,KAAa;QAC1B,IAAIM,aAAa;QACjB,IAAIL,SAAS;QAEb,MAAOA,SAASD,MAAMjB,MAAM,CAAE;YAC5B,IAAMwB,SAASC,IAAAA,yCAAqB,EAACR,OAAOC;YAE5C,IAAI,CAACM,OAAOE,OAAO,EAAE;gBACnB,MAAM,IAAIzB,MAAM;YAClB;YAEA,IAAM0B,QAAQH,OAAOG,KAAK;YAE1B,IAAIA,MAAMC,IAAI,KAAK,OAAO;gBACxB;YACF;YAEA,6CAA6C;YAC7C,IAAMC,WAAWF,MAAMC,IAAI,KAAK,iBAAiBD,MAAMG,UAAU,GAAGH,MAAMI,QAAQ;YAClF,IAAIb,SAASS,MAAMK,UAAU,GAAGH,WAAWZ,MAAMjB,MAAM,EAAE;gBACvD,MAAM,IAAIC,MAAM,AAAC,mBAA6B,OAAX0B,MAAMC,IAAI,EAAC;YAChD;YAEA,0BAA0B;YAC1B,IAAID,MAAMM,SAAS,EAAE;gBACnB,IAAI,CAAC7B,WAAW,CAACI,eAAe;YAClC;YAEA,IAAM0B,aAAahB,SAASS,MAAMK,UAAU;YAE5C,IAAIL,MAAMC,IAAI,KAAK,gBAAgB;gBACjC,IAAMO,aAAalB,MAAMmB,KAAK,CAACF,YAAYA,aAAaP,MAAMG,UAAU;gBAExE,kFAAkF;gBAClF,IAAI,CAAC1B,WAAW,CAACU,gBAAgB,CAACqB;gBAElCZ,cAAcY,WAAWnC,MAAM;gBAC/BkB,SAASgB,aAAaP,MAAMG,UAAU;YACxC,OAAO;gBACL,wBAAwB;gBAExB,kCAAkC;gBAClC,IAAIH,MAAMU,QAAQ,EAAE;oBAClB,IAAuBV,kBAAAA,MAAMU,QAAQ,EAA7B1B,KAAegB,gBAAfhB,IAAIC,KAAWe,gBAAXf,IAAIC,KAAOc,gBAAPd;oBAChB,IAAI,CAAC,IAAI,CAACT,WAAW,CAACM,SAAS,CAACC,IAAIC,IAAIC,KAAK;wBAC3C,MAAM,IAAIZ,MAAM,AAAC,+BAAuCW,OAATD,IAAG,QAAeE,OAATD,IAAG,QAAS,OAAHC;oBACnE;oBACA,IAAI,CAACN,QAAQ,GAAG;gBAClB;gBAEA,IAAI,CAAC,IAAI,CAACA,QAAQ,EAAE;oBAClB,MAAM,IAAIN,MAAM;gBAClB;gBAEA,qCAAqC;gBACrC,IAAI0B,MAAMW,UAAU,EAAE;oBACpB,IAAI,CAAClC,WAAW,CAACK,kBAAkB;gBACrC;gBAEA,uBAAuB;gBACvB,IAAM8B,WAAW,CAACZ,MAAMW,UAAU,IAAKX,MAAMW,UAAU,IAAI,CAACX,MAAMM,SAAS;gBAE3E,qCAAqC;gBACrCV,cAAc,IAAI,CAACnB,WAAW,CAACkB,cAAc,CAACL,OAAOiB,YAAYP,MAAMG,UAAU,EAAES;gBAEnFrB,SAASgB,aAAaP,MAAMI,QAAQ;YACtC;QACF;QAEA,4CAA4C;QAC5C,IAAI,CAAC3B,WAAW,CAACoC,cAAc;QAE/B,OAAOjB;IACT;IAEA;;;;;GAKC,GACDF,OAAAA,MAkGC,GAlGDA,SAAAA,OAAOJ,KAAa,EAAEwB,UAAmB;QACvC,8CAA8C;QAC9C,IAAIC,eAA8B;QAClC,IAAIC,YAAY;QAChB,IAAMC,eAAyB,EAAE;QAEjC,IAAIH,cAAcA,aAAa,GAAG;YAChCC,eAAeG,IAAAA,sCAAiB,EAACJ;QACnC;QAEA,IAAIvB,SAAS;QAEb,MAAOA,SAASD,MAAMjB,MAAM,CAAE;YAC5B,IAAMwB,SAASC,IAAAA,yCAAqB,EAACR,OAAOC;YAE5C,IAAI,CAACM,OAAOE,OAAO,EAAE;gBACnB,MAAM,IAAIzB,MAAM;YAClB;YAEA,IAAM0B,QAAQH,OAAOG,KAAK;YAE1B,IAAIA,MAAMC,IAAI,KAAK,OAAO;gBACxB;YACF;YAEA,6CAA6C;YAC7C,IAAMC,WAAWF,MAAMC,IAAI,KAAK,iBAAiBD,MAAMG,UAAU,GAAGH,MAAMI,QAAQ;YAClF,IAAIb,SAASS,MAAMK,UAAU,GAAGH,WAAWZ,MAAMjB,MAAM,EAAE;gBACvD,MAAM,IAAIC,MAAM,AAAC,mBAA6B,OAAX0B,MAAMC,IAAI,EAAC;YAChD;YAEA,0BAA0B;YAC1B,IAAID,MAAMM,SAAS,EAAE;gBACnB,IAAI,CAAC7B,WAAW,CAACI,eAAe;YAClC;YAEA,IAAM0B,aAAahB,SAASS,MAAMK,UAAU;YAE5C,IAAIL,MAAMC,IAAI,KAAK,gBAAgB;gBACjC,IAAMO,aAAalB,MAAMmB,KAAK,CAACF,YAAYA,aAAaP,MAAMG,UAAU;gBAExE,iBAAiB;gBACjB,IAAIY,cAAc;oBAChBP,WAAWW,IAAI,CAACJ,cAAcC;oBAC9BA,aAAaR,WAAWnC,MAAM;gBAChC,OAAO;oBACL4C,aAAaG,IAAI,CAACZ;gBACpB;gBAEA,kFAAkF;gBAClF,IAAI,CAAC/B,WAAW,CAACU,gBAAgB,CAACqB;gBAElCjB,SAASgB,aAAaP,MAAMG,UAAU;YACxC,OAAO;gBACL,wBAAwB;gBAExB,kCAAkC;gBAClC,IAAIH,MAAMU,QAAQ,EAAE;oBAClB,IAAuBV,kBAAAA,MAAMU,QAAQ,EAA7B1B,KAAegB,gBAAfhB,IAAIC,KAAWe,gBAAXf,IAAIC,KAAOc,gBAAPd;oBAChB,IAAI,CAAC,IAAI,CAACT,WAAW,CAACM,SAAS,CAACC,IAAIC,IAAIC,KAAK;wBAC3C,MAAM,IAAIZ,MAAM,AAAC,+BAAuCW,OAATD,IAAG,QAAeE,OAATD,IAAG,QAAS,OAAHC;oBACnE;oBACA,IAAI,CAACN,QAAQ,GAAG;gBAClB;gBAEA,IAAI,CAAC,IAAI,CAACA,QAAQ,EAAE;oBAClB,MAAM,IAAIN,MAAM;gBAClB;gBAEA,qCAAqC;gBACrC,IAAI0B,MAAMW,UAAU,EAAE;oBACpB,IAAI,CAAClC,WAAW,CAACK,kBAAkB;gBACrC;gBAEA,0GAA0G;gBAC1G,IAAM8B,WAAW,CAACZ,MAAMW,UAAU,IAAKX,MAAMW,UAAU,IAAI,CAACX,MAAMM,SAAS;gBAE3E,oBAAoB;gBACpB,IAAMe,YAAY/B,MAAMmB,KAAK,CAACF,YAAYA,aAAaP,MAAMI,QAAQ;gBACrE,IAAMkB,UAAU,IAAI,CAAC7C,WAAW,CAACiB,MAAM,CAAC2B,WAAW,GAAGrB,MAAMG,UAAU,EAAES;gBAExE,iBAAiB;gBACjB,IAAIG,cAAc;oBAChBO,QAAQH,IAAI,CAACJ,cAAcC;oBAC3BA,aAAaM,QAAQjD,MAAM;gBAC7B,OAAO;oBACL4C,aAAaG,IAAI,CAACE;gBACpB;gBAEA/B,SAASgB,aAAaP,MAAMI,QAAQ;YACtC;QACF;QAEA,qDAAqD;QACrD,IAAIW,cAAc;YAChB,OAAOC,YAAYD,aAAa1C,MAAM,GAAG0C,aAAaN,KAAK,CAAC,GAAGO,aAAaD;QAC9E;QACA,OAAOQ,OAAOC,MAAM,CAACP;IACvB;WAhPWhD;;AA2PN,SAASC,YAAYoB,KAAa,EAAEnB,UAA+B,EAAE2C,UAAmB,EAAE1C,UAAuB;IACtH,IAAMqD,UAAU,IAAIxD,aAAaE,YAAYC;IAC7C,IAAIA,YAAY;QACd,8CAA8C;QAC9C,OAAOqD,QAAQ9B,cAAc,CAACL;IAChC;IACA,6CAA6C;IAC7C,OAAOmC,QAAQ/B,MAAM,CAACJ,OAAOwB;AAC/B"}
|