xz-compat 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +42 -0
- package/README.md +248 -0
- package/dist/cjs/compat.d.cts +1 -0
- package/dist/cjs/compat.d.ts +1 -0
- package/dist/cjs/compat.js +23 -0
- package/dist/cjs/compat.js.map +1 -0
- package/dist/cjs/filters/bcj/Bcj.d.cts +16 -0
- package/dist/cjs/filters/bcj/Bcj.d.ts +16 -0
- package/dist/cjs/filters/bcj/Bcj.js +192 -0
- package/dist/cjs/filters/bcj/Bcj.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjArm.d.cts +16 -0
- package/dist/cjs/filters/bcj/BcjArm.d.ts +16 -0
- package/dist/cjs/filters/bcj/BcjArm.js +122 -0
- package/dist/cjs/filters/bcj/BcjArm.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjArm64.d.cts +21 -0
- package/dist/cjs/filters/bcj/BcjArm64.d.ts +21 -0
- package/dist/cjs/filters/bcj/BcjArm64.js +65 -0
- package/dist/cjs/filters/bcj/BcjArm64.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjArmt.d.cts +19 -0
- package/dist/cjs/filters/bcj/BcjArmt.d.ts +19 -0
- package/dist/cjs/filters/bcj/BcjArmt.js +76 -0
- package/dist/cjs/filters/bcj/BcjArmt.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjIa64.d.cts +15 -0
- package/dist/cjs/filters/bcj/BcjIa64.d.ts +15 -0
- package/dist/cjs/filters/bcj/BcjIa64.js +141 -0
- package/dist/cjs/filters/bcj/BcjIa64.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjPpc.d.cts +20 -0
- package/dist/cjs/filters/bcj/BcjPpc.d.ts +20 -0
- package/dist/cjs/filters/bcj/BcjPpc.js +64 -0
- package/dist/cjs/filters/bcj/BcjPpc.js.map +1 -0
- package/dist/cjs/filters/bcj/BcjSparc.d.cts +19 -0
- package/dist/cjs/filters/bcj/BcjSparc.d.ts +19 -0
- package/dist/cjs/filters/bcj/BcjSparc.js +69 -0
- package/dist/cjs/filters/bcj/BcjSparc.js.map +1 -0
- package/dist/cjs/filters/delta/Delta.d.cts +16 -0
- package/dist/cjs/filters/delta/Delta.d.ts +16 -0
- package/dist/cjs/filters/delta/Delta.js +74 -0
- package/dist/cjs/filters/delta/Delta.js.map +1 -0
- package/dist/cjs/filters/index.d.cts +8 -0
- package/dist/cjs/filters/index.d.ts +8 -0
- package/dist/cjs/filters/index.js +27 -0
- package/dist/cjs/filters/index.js.map +1 -0
- package/dist/cjs/index.d.cts +4 -0
- package/dist/cjs/index.d.ts +4 -0
- package/dist/cjs/index.js +58 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/cjs/lzma/index.d.cts +31 -0
- package/dist/cjs/lzma/index.d.ts +31 -0
- package/dist/cjs/lzma/index.js +83 -0
- package/dist/cjs/lzma/index.js.map +1 -0
- package/dist/cjs/lzma/stream/transforms.d.cts +46 -0
- package/dist/cjs/lzma/stream/transforms.d.ts +46 -0
- package/dist/cjs/lzma/stream/transforms.js +193 -0
- package/dist/cjs/lzma/stream/transforms.js.map +1 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +63 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +63 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js +231 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +97 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +97 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js +582 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/cjs/lzma/types.d.cts +117 -0
- package/dist/cjs/lzma/types.d.ts +117 -0
- package/dist/cjs/lzma/types.js +264 -0
- package/dist/cjs/lzma/types.js.map +1 -0
- package/dist/cjs/package.json +1 -0
- package/dist/cjs/utils/createBufferingDecoder.d.cts +10 -0
- package/dist/cjs/utils/createBufferingDecoder.d.ts +10 -0
- package/dist/cjs/utils/createBufferingDecoder.js +41 -0
- package/dist/cjs/utils/createBufferingDecoder.js.map +1 -0
- package/dist/cjs/xz/Decoder.d.cts +21 -0
- package/dist/cjs/xz/Decoder.d.ts +21 -0
- package/dist/cjs/xz/Decoder.js +325 -0
- package/dist/cjs/xz/Decoder.js.map +1 -0
- package/dist/esm/compat.d.ts +1 -0
- package/dist/esm/compat.js +7 -0
- package/dist/esm/compat.js.map +1 -0
- package/dist/esm/filters/bcj/Bcj.d.ts +16 -0
- package/dist/esm/filters/bcj/Bcj.js +184 -0
- package/dist/esm/filters/bcj/Bcj.js.map +1 -0
- package/dist/esm/filters/bcj/BcjArm.d.ts +16 -0
- package/dist/esm/filters/bcj/BcjArm.js +114 -0
- package/dist/esm/filters/bcj/BcjArm.js.map +1 -0
- package/dist/esm/filters/bcj/BcjArm64.d.ts +21 -0
- package/dist/esm/filters/bcj/BcjArm64.js +57 -0
- package/dist/esm/filters/bcj/BcjArm64.js.map +1 -0
- package/dist/esm/filters/bcj/BcjArmt.d.ts +19 -0
- package/dist/esm/filters/bcj/BcjArmt.js +66 -0
- package/dist/esm/filters/bcj/BcjArmt.js.map +1 -0
- package/dist/esm/filters/bcj/BcjIa64.d.ts +15 -0
- package/dist/esm/filters/bcj/BcjIa64.js +127 -0
- package/dist/esm/filters/bcj/BcjIa64.js.map +1 -0
- package/dist/esm/filters/bcj/BcjPpc.d.ts +20 -0
- package/dist/esm/filters/bcj/BcjPpc.js +55 -0
- package/dist/esm/filters/bcj/BcjPpc.js.map +1 -0
- package/dist/esm/filters/bcj/BcjSparc.d.ts +19 -0
- package/dist/esm/filters/bcj/BcjSparc.js +59 -0
- package/dist/esm/filters/bcj/BcjSparc.js.map +1 -0
- package/dist/esm/filters/delta/Delta.d.ts +16 -0
- package/dist/esm/filters/delta/Delta.js +66 -0
- package/dist/esm/filters/delta/Delta.js.map +1 -0
- package/dist/esm/filters/index.d.ts +8 -0
- package/dist/esm/filters/index.js +9 -0
- package/dist/esm/filters/index.js.map +1 -0
- package/dist/esm/index.d.ts +4 -0
- package/dist/esm/index.js +5 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/esm/lzma/index.d.ts +31 -0
- package/dist/esm/lzma/index.js +44 -0
- package/dist/esm/lzma/index.js.map +1 -0
- package/dist/esm/lzma/stream/transforms.d.ts +46 -0
- package/dist/esm/lzma/stream/transforms.js +190 -0
- package/dist/esm/lzma/stream/transforms.js.map +1 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +63 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js +211 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/esm/lzma/sync/LzmaDecoder.d.ts +97 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js +545 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
- package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/esm/lzma/types.d.ts +117 -0
- package/dist/esm/lzma/types.js +154 -0
- package/dist/esm/lzma/types.js.map +1 -0
- package/dist/esm/package.json +1 -0
- package/dist/esm/utils/createBufferingDecoder.d.ts +10 -0
- package/dist/esm/utils/createBufferingDecoder.js +30 -0
- package/dist/esm/utils/createBufferingDecoder.js.map +1 -0
- package/dist/esm/xz/Decoder.d.ts +21 -0
- package/dist/esm/xz/Decoder.js +313 -0
- package/dist/esm/xz/Decoder.js.map +1 -0
- package/package.json +75 -0
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* XZ Decompression Module
|
|
3
|
+
*
|
|
4
|
+
* XZ is a container format that wraps LZMA2 compressed data.
|
|
5
|
+
* This module provides both synchronous and streaming XZ decoders.
|
|
6
|
+
*
|
|
7
|
+
* Pure JavaScript implementation, works on Node.js 0.8+
|
|
8
|
+
*/ import { Transform } from 'extract-base-iterator';
|
|
9
|
+
import { decodeBcj } from '../filters/bcj/Bcj.js';
|
|
10
|
+
import { decodeBcjArm } from '../filters/bcj/BcjArm.js';
|
|
11
|
+
import { decodeBcjArm64 } from '../filters/bcj/BcjArm64.js';
|
|
12
|
+
import { decodeBcjArmt } from '../filters/bcj/BcjArmt.js';
|
|
13
|
+
import { decodeBcjIa64 } from '../filters/bcj/BcjIa64.js';
|
|
14
|
+
import { decodeBcjPpc } from '../filters/bcj/BcjPpc.js';
|
|
15
|
+
import { decodeBcjSparc } from '../filters/bcj/BcjSparc.js';
|
|
16
|
+
import { decodeDelta } from '../filters/delta/Delta.js';
|
|
17
|
+
import { decodeLzma2 } from '../lzma/index.js';
|
|
18
|
+
// XZ magic bytes
|
|
19
|
+
const XZ_MAGIC = [
|
|
20
|
+
0xfd,
|
|
21
|
+
0x37,
|
|
22
|
+
0x7a,
|
|
23
|
+
0x58,
|
|
24
|
+
0x5a,
|
|
25
|
+
0x00
|
|
26
|
+
];
|
|
27
|
+
const XZ_FOOTER_MAGIC = [
|
|
28
|
+
0x59,
|
|
29
|
+
0x5a
|
|
30
|
+
]; // "YZ"
|
|
31
|
+
// Filter IDs (from XZ specification)
|
|
32
|
+
const FILTER_DELTA = 0x03;
|
|
33
|
+
const FILTER_BCJ_X86 = 0x04;
|
|
34
|
+
const FILTER_BCJ_PPC = 0x05;
|
|
35
|
+
const FILTER_BCJ_IA64 = 0x06;
|
|
36
|
+
const FILTER_BCJ_ARM = 0x07;
|
|
37
|
+
const FILTER_BCJ_ARMT = 0x08;
|
|
38
|
+
const FILTER_BCJ_SPARC = 0x09;
|
|
39
|
+
const FILTER_BCJ_ARM64 = 0x0a;
|
|
40
|
+
const FILTER_LZMA2 = 0x21;
|
|
41
|
+
/**
|
|
42
|
+
* Simple buffer comparison
|
|
43
|
+
*/ function bufferEquals(buf, offset, expected) {
|
|
44
|
+
if (offset + expected.length > buf.length) {
|
|
45
|
+
return false;
|
|
46
|
+
}
|
|
47
|
+
for(let i = 0; i < expected.length; i++){
|
|
48
|
+
if (buf[offset + i] !== expected[i]) {
|
|
49
|
+
return false;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return true;
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Decode variable-length integer (XZ multibyte encoding)
|
|
56
|
+
* Returns number, but limits to 32-bit to work on Node 0.8+
|
|
57
|
+
*/ function decodeMultibyte(buf, offset) {
|
|
58
|
+
let value = 0;
|
|
59
|
+
let i = 0;
|
|
60
|
+
let byte;
|
|
61
|
+
do {
|
|
62
|
+
if (offset + i >= buf.length) {
|
|
63
|
+
throw new Error('Truncated multibyte integer');
|
|
64
|
+
}
|
|
65
|
+
byte = buf[offset + i];
|
|
66
|
+
value |= (byte & 0x7f) << i * 7;
|
|
67
|
+
i++;
|
|
68
|
+
if (i > 4) {
|
|
69
|
+
// Reduced to prevent overflow on Node 0.8
|
|
70
|
+
throw new Error('Multibyte integer too large');
|
|
71
|
+
}
|
|
72
|
+
}while (byte & 0x80)
|
|
73
|
+
return {
|
|
74
|
+
value,
|
|
75
|
+
bytesRead: i
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Apply a preprocessing filter (BCJ/Delta) to decompressed data
|
|
80
|
+
*/ function applyFilter(data, filter) {
|
|
81
|
+
switch(filter.id){
|
|
82
|
+
case FILTER_BCJ_X86:
|
|
83
|
+
return decodeBcj(data, filter.props);
|
|
84
|
+
case FILTER_BCJ_ARM:
|
|
85
|
+
return decodeBcjArm(data, filter.props);
|
|
86
|
+
case FILTER_BCJ_ARM64:
|
|
87
|
+
return decodeBcjArm64(data, filter.props);
|
|
88
|
+
case FILTER_BCJ_ARMT:
|
|
89
|
+
return decodeBcjArmt(data, filter.props);
|
|
90
|
+
case FILTER_BCJ_PPC:
|
|
91
|
+
return decodeBcjPpc(data, filter.props);
|
|
92
|
+
case FILTER_BCJ_SPARC:
|
|
93
|
+
return decodeBcjSparc(data, filter.props);
|
|
94
|
+
case FILTER_BCJ_IA64:
|
|
95
|
+
return decodeBcjIa64(data, filter.props);
|
|
96
|
+
case FILTER_DELTA:
|
|
97
|
+
return decodeDelta(data, filter.props);
|
|
98
|
+
default:
|
|
99
|
+
throw new Error(`Unsupported filter: 0x${filter.id.toString(16)}`);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Parse XZ Block Header to extract filters and LZMA2 properties
|
|
104
|
+
*/ function parseBlockHeader(input, offset, _checkSize) {
|
|
105
|
+
// Block header size
|
|
106
|
+
const blockHeaderSizeRaw = input[offset];
|
|
107
|
+
if (blockHeaderSizeRaw === 0) {
|
|
108
|
+
throw new Error('Invalid block header size (index indicator found instead of block)');
|
|
109
|
+
}
|
|
110
|
+
const blockHeaderSize = (blockHeaderSizeRaw + 1) * 4;
|
|
111
|
+
// Parse block header
|
|
112
|
+
const blockHeaderStart = offset;
|
|
113
|
+
offset++; // skip size byte
|
|
114
|
+
const blockFlags = input[offset++];
|
|
115
|
+
const numFilters = (blockFlags & 0x03) + 1;
|
|
116
|
+
const hasCompressedSize = (blockFlags & 0x40) !== 0;
|
|
117
|
+
const hasUncompressedSize = (blockFlags & 0x80) !== 0;
|
|
118
|
+
// Skip optional sizes
|
|
119
|
+
if (hasCompressedSize) {
|
|
120
|
+
const result = decodeMultibyte(input, offset);
|
|
121
|
+
offset += result.bytesRead;
|
|
122
|
+
}
|
|
123
|
+
if (hasUncompressedSize) {
|
|
124
|
+
const result = decodeMultibyte(input, offset);
|
|
125
|
+
offset += result.bytesRead;
|
|
126
|
+
}
|
|
127
|
+
// Parse all filters
|
|
128
|
+
const filters = [];
|
|
129
|
+
let lzma2Props = null;
|
|
130
|
+
for(let i = 0; i < numFilters; i++){
|
|
131
|
+
const filterIdResult = decodeMultibyte(input, offset);
|
|
132
|
+
const filterId = filterIdResult.value;
|
|
133
|
+
offset += filterIdResult.bytesRead;
|
|
134
|
+
const propsSizeResult = decodeMultibyte(input, offset);
|
|
135
|
+
offset += propsSizeResult.bytesRead;
|
|
136
|
+
const filterProps = input.slice(offset, offset + propsSizeResult.value);
|
|
137
|
+
offset += propsSizeResult.value;
|
|
138
|
+
if (filterId === FILTER_LZMA2) {
|
|
139
|
+
// LZMA2 must be the last filter
|
|
140
|
+
lzma2Props = filterProps;
|
|
141
|
+
} else if (filterId === FILTER_DELTA || filterId >= FILTER_BCJ_X86 && filterId <= FILTER_BCJ_ARM64) {
|
|
142
|
+
// Preprocessing filter - store for later application
|
|
143
|
+
filters.push({
|
|
144
|
+
id: filterId,
|
|
145
|
+
props: filterProps
|
|
146
|
+
});
|
|
147
|
+
} else {
|
|
148
|
+
throw new Error(`Unsupported filter: 0x${filterId.toString(16)}`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
if (!lzma2Props) {
|
|
152
|
+
throw new Error('No LZMA2 filter found in XZ block');
|
|
153
|
+
}
|
|
154
|
+
// Skip to end of block header (must be aligned to 4 bytes)
|
|
155
|
+
const blockDataStart = blockHeaderStart + blockHeaderSize;
|
|
156
|
+
return {
|
|
157
|
+
filters,
|
|
158
|
+
lzma2Props,
|
|
159
|
+
headerSize: blockHeaderSize,
|
|
160
|
+
dataStart: blockDataStart,
|
|
161
|
+
dataEnd: input.length,
|
|
162
|
+
nextOffset: blockDataStart
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
/**
|
|
166
|
+
* Parse XZ Index to get block positions
|
|
167
|
+
*
|
|
168
|
+
* XZ Index stores "Unpadded Size" for each block which equals:
|
|
169
|
+
* Block Header Size + Compressed Data Size + Check Size
|
|
170
|
+
* (does NOT include padding to 4-byte boundary)
|
|
171
|
+
*/ function parseIndex(input, indexStart, checkSize) {
|
|
172
|
+
let offset = indexStart;
|
|
173
|
+
// Index indicator (0x00)
|
|
174
|
+
if (input[offset] !== 0x00) {
|
|
175
|
+
throw new Error('Invalid index indicator');
|
|
176
|
+
}
|
|
177
|
+
offset++;
|
|
178
|
+
// Number of records
|
|
179
|
+
const countResult = decodeMultibyte(input, offset);
|
|
180
|
+
const recordCount = countResult.value;
|
|
181
|
+
offset += countResult.bytesRead;
|
|
182
|
+
const records = [];
|
|
183
|
+
// Parse each record
|
|
184
|
+
for(let i = 0; i < recordCount; i++){
|
|
185
|
+
// Unpadded Size (header + compressed data + check)
|
|
186
|
+
const unpaddedResult = decodeMultibyte(input, offset);
|
|
187
|
+
offset += unpaddedResult.bytesRead;
|
|
188
|
+
// Uncompressed size
|
|
189
|
+
const uncompressedResult = decodeMultibyte(input, offset);
|
|
190
|
+
offset += uncompressedResult.bytesRead;
|
|
191
|
+
records.push({
|
|
192
|
+
compressedPos: 0,
|
|
193
|
+
unpaddedSize: unpaddedResult.value,
|
|
194
|
+
compressedDataSize: 0,
|
|
195
|
+
uncompressedSize: uncompressedResult.value
|
|
196
|
+
});
|
|
197
|
+
}
|
|
198
|
+
// Calculate actual positions by walking through blocks
|
|
199
|
+
let currentPos = 12; // After stream header
|
|
200
|
+
for(let i = 0; i < records.length; i++){
|
|
201
|
+
const record = records[i];
|
|
202
|
+
// Record where this block's header starts
|
|
203
|
+
record.compressedPos = currentPos;
|
|
204
|
+
// Get block header size from the actual data
|
|
205
|
+
const headerSizeRaw = input[currentPos];
|
|
206
|
+
const headerSize = (headerSizeRaw + 1) * 4;
|
|
207
|
+
// Calculate compressed data size from unpadded size
|
|
208
|
+
// unpaddedSize = headerSize + compressedDataSize + checkSize
|
|
209
|
+
record.compressedDataSize = record.unpaddedSize - headerSize - checkSize;
|
|
210
|
+
// Move to next block: unpaddedSize + padding to 4-byte boundary
|
|
211
|
+
const paddedSize = Math.ceil(record.unpaddedSize / 4) * 4;
|
|
212
|
+
currentPos += paddedSize;
|
|
213
|
+
}
|
|
214
|
+
return records;
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* Decompress XZ data synchronously
|
|
218
|
+
* Properly handles multi-block XZ files and stream padding
|
|
219
|
+
* @param input - XZ compressed data
|
|
220
|
+
* @returns Decompressed data
|
|
221
|
+
*/ export function decodeXZ(input) {
|
|
222
|
+
var _checkSizes_checkType;
|
|
223
|
+
// Verify XZ magic
|
|
224
|
+
if (input.length < 12 || !bufferEquals(input, 0, XZ_MAGIC)) {
|
|
225
|
+
throw new Error('Invalid XZ magic bytes');
|
|
226
|
+
}
|
|
227
|
+
// Stream flags at offset 6-7
|
|
228
|
+
const checkType = input[7] & 0x0f;
|
|
229
|
+
// Check sizes based on check type
|
|
230
|
+
const checkSizes = {
|
|
231
|
+
0: 0,
|
|
232
|
+
1: 4,
|
|
233
|
+
4: 8,
|
|
234
|
+
10: 32
|
|
235
|
+
};
|
|
236
|
+
const checkSize = (_checkSizes_checkType = checkSizes[checkType]) !== null && _checkSizes_checkType !== void 0 ? _checkSizes_checkType : 0;
|
|
237
|
+
// Find footer by skipping stream padding (null bytes at end before footer)
|
|
238
|
+
// Stream padding must be multiple of 4 bytes
|
|
239
|
+
let footerEnd = input.length;
|
|
240
|
+
while(footerEnd > 12 && input[footerEnd - 1] === 0x00){
|
|
241
|
+
footerEnd--;
|
|
242
|
+
}
|
|
243
|
+
// Align to 4-byte boundary (stream padding rules)
|
|
244
|
+
while(footerEnd % 4 !== 0 && footerEnd > 12){
|
|
245
|
+
footerEnd++;
|
|
246
|
+
}
|
|
247
|
+
// Verify footer magic (at footerEnd - 2)
|
|
248
|
+
if (!bufferEquals(input, footerEnd - 2, XZ_FOOTER_MAGIC)) {
|
|
249
|
+
throw new Error('Invalid XZ footer magic');
|
|
250
|
+
}
|
|
251
|
+
// Get backward size (tells us where index starts) - at footerEnd - 8
|
|
252
|
+
const backwardSize = (input.readUInt32LE(footerEnd - 8) + 1) * 4;
|
|
253
|
+
const indexStart = footerEnd - 12 - backwardSize;
|
|
254
|
+
// Parse Index to get block information
|
|
255
|
+
const blockRecords = parseIndex(input, indexStart, checkSize);
|
|
256
|
+
// Decompress each block
|
|
257
|
+
const outputChunks = [];
|
|
258
|
+
let _totalOutputSize = 0;
|
|
259
|
+
for(let i = 0; i < blockRecords.length; i++){
|
|
260
|
+
const record = blockRecords[i];
|
|
261
|
+
const recordStart = record.compressedPos;
|
|
262
|
+
// Parse block header
|
|
263
|
+
const blockInfo = parseBlockHeader(input, recordStart, checkSize);
|
|
264
|
+
// Extract compressed data for this block
|
|
265
|
+
const dataStart = recordStart + blockInfo.headerSize;
|
|
266
|
+
// compressedDataSize is calculated from the Index's Unpadded Size minus header and check
|
|
267
|
+
const dataEnd = dataStart + record.compressedDataSize;
|
|
268
|
+
// Note: XZ blocks have padding AFTER the check field to align to 4 bytes,
|
|
269
|
+
// but the compressedSize from index is exact - no need to strip padding.
|
|
270
|
+
// LZMA2 data includes a 0x00 end marker which must NOT be stripped.
|
|
271
|
+
const compressedData = input.slice(dataStart, dataEnd);
|
|
272
|
+
// Decompress this block with LZMA2
|
|
273
|
+
const blockChunks = [];
|
|
274
|
+
decodeLzma2(compressedData, blockInfo.lzma2Props, record.uncompressedSize, {
|
|
275
|
+
write: (chunk)=>{
|
|
276
|
+
blockChunks.push(chunk);
|
|
277
|
+
}
|
|
278
|
+
});
|
|
279
|
+
// Concatenate LZMA2 output
|
|
280
|
+
let blockOutput = Buffer.concat(blockChunks);
|
|
281
|
+
// Apply preprocessing filters in reverse order (BCJ/Delta applied after LZMA2)
|
|
282
|
+
// Filters are stored in order they were applied during compression,
|
|
283
|
+
// so we need to reverse for decompression
|
|
284
|
+
for(let j = blockInfo.filters.length - 1; j >= 0; j--){
|
|
285
|
+
blockOutput = applyFilter(blockOutput, blockInfo.filters[j]);
|
|
286
|
+
}
|
|
287
|
+
outputChunks.push(blockOutput);
|
|
288
|
+
_totalOutputSize += blockOutput.length;
|
|
289
|
+
}
|
|
290
|
+
return Buffer.concat(outputChunks);
|
|
291
|
+
}
|
|
292
|
+
/**
|
|
293
|
+
* Create an XZ decompression Transform stream
|
|
294
|
+
* @returns Transform stream that decompresses XZ data
|
|
295
|
+
*/ export function createXZDecoder() {
|
|
296
|
+
const chunks = [];
|
|
297
|
+
return new Transform({
|
|
298
|
+
transform (chunk, _encoding, callback) {
|
|
299
|
+
chunks.push(chunk);
|
|
300
|
+
callback();
|
|
301
|
+
},
|
|
302
|
+
flush (callback) {
|
|
303
|
+
try {
|
|
304
|
+
const input = Buffer.concat(chunks);
|
|
305
|
+
const output = decodeXZ(input);
|
|
306
|
+
this.push(output);
|
|
307
|
+
callback();
|
|
308
|
+
} catch (err) {
|
|
309
|
+
callback(err);
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
});
|
|
313
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/xz/Decoder.ts"],"sourcesContent":["/**\n * XZ Decompression Module\n *\n * XZ is a container format that wraps LZMA2 compressed data.\n * This module provides both synchronous and streaming XZ decoders.\n *\n * Pure JavaScript implementation, works on Node.js 0.8+\n */\n\nimport { Transform } from 'extract-base-iterator';\nimport type { Transform as TransformType } from 'stream';\nimport { decodeBcj } from '../filters/bcj/Bcj.ts';\nimport { decodeBcjArm } from '../filters/bcj/BcjArm.ts';\nimport { decodeBcjArm64 } from '../filters/bcj/BcjArm64.ts';\nimport { decodeBcjArmt } from '../filters/bcj/BcjArmt.ts';\nimport { decodeBcjIa64 } from '../filters/bcj/BcjIa64.ts';\nimport { decodeBcjPpc } from '../filters/bcj/BcjPpc.ts';\nimport { decodeBcjSparc } from '../filters/bcj/BcjSparc.ts';\nimport { decodeDelta } from '../filters/delta/Delta.ts';\nimport { decodeLzma2 } from '../lzma/index.ts';\n\n// XZ magic bytes\nconst XZ_MAGIC = [0xfd, 0x37, 0x7a, 0x58, 0x5a, 0x00];\nconst XZ_FOOTER_MAGIC = [0x59, 0x5a]; // \"YZ\"\n\n// Filter IDs (from XZ specification)\nconst FILTER_DELTA = 0x03;\nconst FILTER_BCJ_X86 = 0x04;\nconst FILTER_BCJ_PPC = 0x05;\nconst FILTER_BCJ_IA64 = 0x06;\nconst FILTER_BCJ_ARM = 0x07;\nconst FILTER_BCJ_ARMT = 0x08;\nconst FILTER_BCJ_SPARC = 0x09;\nconst FILTER_BCJ_ARM64 = 0x0a;\nconst FILTER_LZMA2 = 0x21;\n\n// Filter info for parsing\ninterface FilterInfo {\n id: number;\n props: Buffer;\n}\n\n/**\n * Simple buffer comparison\n */\nfunction bufferEquals(buf: Buffer, offset: number, expected: number[]): boolean {\n if (offset + expected.length > buf.length) {\n return false;\n }\n for (let i = 0; i < expected.length; i++) {\n if (buf[offset + i] !== expected[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Decode variable-length integer (XZ multibyte encoding)\n * Returns number, but limits to 32-bit to work on Node 0.8+\n */\nfunction decodeMultibyte(buf: Buffer, offset: number): { value: number; bytesRead: number } {\n let value = 0;\n let i = 0;\n let byte: number;\n do {\n if (offset + i >= buf.length) {\n throw new Error('Truncated multibyte integer');\n }\n byte = buf[offset + i];\n value |= (byte & 0x7f) << (i * 7);\n i++;\n if (i > 4) {\n // Reduced to prevent overflow on Node 0.8\n throw new Error('Multibyte integer too large');\n }\n } while (byte & 0x80);\n return { value, bytesRead: i };\n}\n\n/**\n * Apply a preprocessing filter (BCJ/Delta) to decompressed data\n */\nfunction applyFilter(data: Buffer, filter: FilterInfo): Buffer {\n switch (filter.id) {\n case FILTER_BCJ_X86:\n return decodeBcj(data, filter.props);\n case FILTER_BCJ_ARM:\n return decodeBcjArm(data, filter.props);\n case FILTER_BCJ_ARM64:\n return decodeBcjArm64(data, filter.props);\n case FILTER_BCJ_ARMT:\n return decodeBcjArmt(data, filter.props);\n case FILTER_BCJ_PPC:\n return decodeBcjPpc(data, filter.props);\n case FILTER_BCJ_SPARC:\n return decodeBcjSparc(data, filter.props);\n case FILTER_BCJ_IA64:\n return decodeBcjIa64(data, filter.props);\n case FILTER_DELTA:\n return decodeDelta(data, filter.props);\n default:\n throw new Error(`Unsupported filter: 0x${filter.id.toString(16)}`);\n }\n}\n\n/**\n * Parse XZ Block Header to extract filters and LZMA2 properties\n */\nfunction parseBlockHeader(\n input: Buffer,\n offset: number,\n _checkSize: number\n): {\n filters: FilterInfo[];\n lzma2Props: Buffer;\n headerSize: number;\n dataStart: number;\n dataEnd: number;\n nextOffset: number;\n} {\n // Block header size\n const blockHeaderSizeRaw = input[offset];\n if (blockHeaderSizeRaw === 0) {\n throw new Error('Invalid block header size (index indicator found instead of block)');\n }\n const blockHeaderSize = (blockHeaderSizeRaw + 1) * 4;\n\n // Parse block header\n const blockHeaderStart = offset;\n offset++; // skip size byte\n\n const blockFlags = input[offset++];\n const numFilters = (blockFlags & 0x03) + 1;\n const hasCompressedSize = (blockFlags & 0x40) !== 0;\n const hasUncompressedSize = (blockFlags & 0x80) !== 0;\n\n // Skip optional sizes\n if (hasCompressedSize) {\n const result = decodeMultibyte(input, offset);\n offset += result.bytesRead;\n }\n\n if (hasUncompressedSize) {\n const result = decodeMultibyte(input, offset);\n offset += result.bytesRead;\n }\n\n // Parse all filters\n const filters: FilterInfo[] = [];\n let lzma2Props: Buffer | null = null;\n\n for (let i = 0; i < numFilters; i++) {\n const filterIdResult = decodeMultibyte(input, offset);\n const filterId = filterIdResult.value;\n offset += filterIdResult.bytesRead;\n\n const propsSizeResult = decodeMultibyte(input, offset);\n offset += propsSizeResult.bytesRead;\n\n const filterProps = input.slice(offset, offset + propsSizeResult.value);\n offset += propsSizeResult.value;\n\n if (filterId === FILTER_LZMA2) {\n // LZMA2 must be the last filter\n lzma2Props = filterProps;\n } else if (filterId === FILTER_DELTA || (filterId >= FILTER_BCJ_X86 && filterId <= FILTER_BCJ_ARM64)) {\n // Preprocessing filter - store for later application\n filters.push({ id: filterId, props: filterProps });\n } else {\n throw new Error(`Unsupported filter: 0x${filterId.toString(16)}`);\n }\n }\n\n if (!lzma2Props) {\n throw new Error('No LZMA2 filter found in XZ block');\n }\n\n // Skip to end of block header (must be aligned to 4 bytes)\n const blockDataStart = blockHeaderStart + blockHeaderSize;\n\n return {\n filters,\n lzma2Props,\n headerSize: blockHeaderSize,\n dataStart: blockDataStart,\n dataEnd: input.length,\n nextOffset: blockDataStart,\n };\n}\n\n/**\n * Parse XZ Index to get block positions\n *\n * XZ Index stores \"Unpadded Size\" for each block which equals:\n * Block Header Size + Compressed Data Size + Check Size\n * (does NOT include padding to 4-byte boundary)\n */\nfunction parseIndex(\n input: Buffer,\n indexStart: number,\n checkSize: number\n): Array<{\n compressedPos: number;\n compressedDataSize: number;\n uncompressedSize: number;\n}> {\n let offset = indexStart;\n\n // Index indicator (0x00)\n if (input[offset] !== 0x00) {\n throw new Error('Invalid index indicator');\n }\n offset++;\n\n // Number of records\n const countResult = decodeMultibyte(input, offset);\n const recordCount = countResult.value;\n offset += countResult.bytesRead;\n\n const records: Array<{\n compressedPos: number;\n unpaddedSize: number;\n compressedDataSize: number;\n uncompressedSize: number;\n }> = [];\n\n // Parse each record\n for (let i = 0; i < recordCount; i++) {\n // Unpadded Size (header + compressed data + check)\n const unpaddedResult = decodeMultibyte(input, offset);\n offset += unpaddedResult.bytesRead;\n\n // Uncompressed size\n const uncompressedResult = decodeMultibyte(input, offset);\n offset += uncompressedResult.bytesRead;\n\n records.push({\n compressedPos: 0, // will be calculated\n unpaddedSize: unpaddedResult.value,\n compressedDataSize: 0, // will be calculated\n uncompressedSize: uncompressedResult.value,\n });\n }\n\n // Calculate actual positions by walking through blocks\n let currentPos = 12; // After stream header\n for (let i = 0; i < records.length; i++) {\n const record = records[i];\n // Record where this block's header starts\n record.compressedPos = currentPos;\n\n // Get block header size from the actual data\n const headerSizeRaw = input[currentPos];\n const headerSize = (headerSizeRaw + 1) * 4;\n\n // Calculate compressed data size from unpadded size\n // unpaddedSize = headerSize + compressedDataSize + checkSize\n record.compressedDataSize = record.unpaddedSize - headerSize - checkSize;\n\n // Move to next block: unpaddedSize + padding to 4-byte boundary\n const paddedSize = Math.ceil(record.unpaddedSize / 4) * 4;\n currentPos += paddedSize;\n }\n\n return records;\n}\n\n/**\n * Decompress XZ data synchronously\n * Properly handles multi-block XZ files and stream padding\n * @param input - XZ compressed data\n * @returns Decompressed data\n */\nexport function decodeXZ(input: Buffer): Buffer {\n // Verify XZ magic\n if (input.length < 12 || !bufferEquals(input, 0, XZ_MAGIC)) {\n throw new Error('Invalid XZ magic bytes');\n }\n\n // Stream flags at offset 6-7\n const checkType = input[7] & 0x0f;\n\n // Check sizes based on check type\n const checkSizes: { [key: number]: number } = {\n 0: 0, // None\n 1: 4, // CRC32\n 4: 8, // CRC64\n 10: 32, // SHA-256\n };\n const checkSize = checkSizes[checkType] ?? 0;\n\n // Find footer by skipping stream padding (null bytes at end before footer)\n // Stream padding must be multiple of 4 bytes\n let footerEnd = input.length;\n while (footerEnd > 12 && input[footerEnd - 1] === 0x00) {\n footerEnd--;\n }\n // Align to 4-byte boundary (stream padding rules)\n while (footerEnd % 4 !== 0 && footerEnd > 12) {\n footerEnd++;\n }\n\n // Verify footer magic (at footerEnd - 2)\n if (!bufferEquals(input, footerEnd - 2, XZ_FOOTER_MAGIC)) {\n throw new Error('Invalid XZ footer magic');\n }\n\n // Get backward size (tells us where index starts) - at footerEnd - 8\n const backwardSize = (input.readUInt32LE(footerEnd - 8) + 1) * 4;\n const indexStart = footerEnd - 12 - backwardSize;\n\n // Parse Index to get block information\n const blockRecords = parseIndex(input, indexStart, checkSize);\n\n // Decompress each block\n const outputChunks: Buffer[] = [];\n let _totalOutputSize = 0;\n\n for (let i = 0; i < blockRecords.length; i++) {\n const record = blockRecords[i];\n const recordStart = record.compressedPos;\n\n // Parse block header\n const blockInfo = parseBlockHeader(input, recordStart, checkSize);\n\n // Extract compressed data for this block\n const dataStart = recordStart + blockInfo.headerSize;\n // compressedDataSize is calculated from the Index's Unpadded Size minus header and check\n const dataEnd = dataStart + record.compressedDataSize;\n\n // Note: XZ blocks have padding AFTER the check field to align to 4 bytes,\n // but the compressedSize from index is exact - no need to strip padding.\n // LZMA2 data includes a 0x00 end marker which must NOT be stripped.\n const compressedData = input.slice(dataStart, dataEnd);\n\n // Decompress this block with LZMA2\n const blockChunks: Buffer[] = [];\n decodeLzma2(compressedData, blockInfo.lzma2Props, record.uncompressedSize, {\n write: (chunk: Buffer) => {\n blockChunks.push(chunk);\n },\n });\n\n // Concatenate LZMA2 output\n let blockOutput = Buffer.concat(blockChunks) as Buffer;\n\n // Apply preprocessing filters in reverse order (BCJ/Delta applied after LZMA2)\n // Filters are stored in order they were applied during compression,\n // so we need to reverse for decompression\n for (let j = blockInfo.filters.length - 1; j >= 0; j--) {\n blockOutput = applyFilter(blockOutput, blockInfo.filters[j]) as Buffer;\n }\n\n outputChunks.push(blockOutput);\n _totalOutputSize += blockOutput.length;\n }\n\n return Buffer.concat(outputChunks);\n}\n\n/**\n * Create an XZ decompression Transform stream\n * @returns Transform stream that decompresses XZ data\n */\nexport function createXZDecoder(): TransformType {\n const chunks: Buffer[] = [];\n\n return new Transform({\n transform(chunk: Buffer, _encoding: string, callback: (error?: Error | null) => void) {\n chunks.push(chunk);\n callback();\n },\n\n flush(callback: (error?: Error | null) => void) {\n try {\n const input = Buffer.concat(chunks);\n const output = decodeXZ(input);\n this.push(output);\n callback();\n } catch (err) {\n callback(err as Error);\n }\n },\n });\n}\n"],"names":["Transform","decodeBcj","decodeBcjArm","decodeBcjArm64","decodeBcjArmt","decodeBcjIa64","decodeBcjPpc","decodeBcjSparc","decodeDelta","decodeLzma2","XZ_MAGIC","XZ_FOOTER_MAGIC","FILTER_DELTA","FILTER_BCJ_X86","FILTER_BCJ_PPC","FILTER_BCJ_IA64","FILTER_BCJ_ARM","FILTER_BCJ_ARMT","FILTER_BCJ_SPARC","FILTER_BCJ_ARM64","FILTER_LZMA2","bufferEquals","buf","offset","expected","length","i","decodeMultibyte","value","byte","Error","bytesRead","applyFilter","data","filter","id","props","toString","parseBlockHeader","input","_checkSize","blockHeaderSizeRaw","blockHeaderSize","blockHeaderStart","blockFlags","numFilters","hasCompressedSize","hasUncompressedSize","result","filters","lzma2Props","filterIdResult","filterId","propsSizeResult","filterProps","slice","push","blockDataStart","headerSize","dataStart","dataEnd","nextOffset","parseIndex","indexStart","checkSize","countResult","recordCount","records","unpaddedResult","uncompressedResult","compressedPos","unpaddedSize","compressedDataSize","uncompressedSize","currentPos","record","headerSizeRaw","paddedSize","Math","ceil","decodeXZ","checkSizes","checkType","footerEnd","backwardSize","readUInt32LE","blockRecords","outputChunks","_totalOutputSize","recordStart","blockInfo","compressedData","blockChunks","write","chunk","blockOutput","Buffer","concat","j","createXZDecoder","chunks","transform","_encoding","callback","flush","output","err"],"mappings":"AAAA;;;;;;;CAOC,GAED,SAASA,SAAS,QAAQ,wBAAwB;AAElD,SAASC,SAAS,QAAQ,wBAAwB;AAClD,SAASC,YAAY,QAAQ,2BAA2B;AACxD,SAASC,cAAc,QAAQ,6BAA6B;AAC5D,SAASC,aAAa,QAAQ,4BAA4B;AAC1D,SAASC,aAAa,QAAQ,4BAA4B;AAC1D,SAASC,YAAY,QAAQ,2BAA2B;AACxD,SAASC,cAAc,QAAQ,6BAA6B;AAC5D,SAASC,WAAW,QAAQ,4BAA4B;AACxD,SAASC,WAAW,QAAQ,mBAAmB;AAE/C,iBAAiB;AACjB,MAAMC,WAAW;IAAC;IAAM;IAAM;IAAM;IAAM;IAAM;CAAK;AACrD,MAAMC,kBAAkB;IAAC;IAAM;CAAK,EAAE,OAAO;AAE7C,qCAAqC;AACrC,MAAMC,eAAe;AACrB,MAAMC,iBAAiB;AACvB,MAAMC,iBAAiB;AACvB,MAAMC,kBAAkB;AACxB,MAAMC,iBAAiB;AACvB,MAAMC,kBAAkB;AACxB,MAAMC,mBAAmB;AACzB,MAAMC,mBAAmB;AACzB,MAAMC,eAAe;AAQrB;;CAEC,GACD,SAASC,aAAaC,GAAW,EAAEC,MAAc,EAAEC,QAAkB;IACnE,IAAID,SAASC,SAASC,MAAM,GAAGH,IAAIG,MAAM,EAAE;QACzC,OAAO;IACT;IACA,IAAK,IAAIC,IAAI,GAAGA,IAAIF,SAASC,MAAM,EAAEC,IAAK;QACxC,IAAIJ,GAAG,CAACC,SAASG,EAAE,KAAKF,QAAQ,CAACE,EAAE,EAAE;YACnC,OAAO;QACT;IACF;IACA,OAAO;AACT;AAEA;;;CAGC,GACD,SAASC,gBAAgBL,GAAW,EAAEC,MAAc;IAClD,IAAIK,QAAQ;IACZ,IAAIF,IAAI;IACR,IAAIG;IACJ,GAAG;QACD,IAAIN,SAASG,KAAKJ,IAAIG,MAAM,EAAE;YAC5B,MAAM,IAAIK,MAAM;QAClB;QACAD,OAAOP,GAAG,CAACC,SAASG,EAAE;QACtBE,SAAS,AAACC,CAAAA,OAAO,IAAG,KAAOH,IAAI;QAC/BA;QACA,IAAIA,IAAI,GAAG;YACT,0CAA0C;YAC1C,MAAM,IAAII,MAAM;QAClB;IACF,QAASD,OAAO,KAAM;IACtB,OAAO;QAAED;QAAOG,WAAWL;IAAE;AAC/B;AAEA;;CAEC,GACD,SAASM,YAAYC,IAAY,EAAEC,MAAkB;IACnD,OAAQA,OAAOC,EAAE;QACf,KAAKtB;YACH,OAAOZ,UAAUgC,MAAMC,OAAOE,KAAK;QACrC,KAAKpB;YACH,OAAOd,aAAa+B,MAAMC,OAAOE,KAAK;QACxC,KAAKjB;YACH,OAAOhB,eAAe8B,MAAMC,OAAOE,KAAK;QAC1C,KAAKnB;YACH,OAAOb,cAAc6B,MAAMC,OAAOE,KAAK;QACzC,KAAKtB;YACH,OAAOR,aAAa2B,MAAMC,OAAOE,KAAK;QACxC,KAAKlB;YACH,OAAOX,eAAe0B,MAAMC,OAAOE,KAAK;QAC1C,KAAKrB;YACH,OAAOV,cAAc4B,MAAMC,OAAOE,KAAK;QACzC,KAAKxB;YACH,OAAOJ,YAAYyB,MAAMC,OAAOE,KAAK;QACvC;YACE,MAAM,IAAIN,MAAM,CAAC,sBAAsB,EAAEI,OAAOC,EAAE,CAACE,QAAQ,CAAC,KAAK;IACrE;AACF;AAEA;;CAEC,GACD,SAASC,iBACPC,KAAa,EACbhB,MAAc,EACdiB,UAAkB;IASlB,oBAAoB;IACpB,MAAMC,qBAAqBF,KAAK,CAAChB,OAAO;IACxC,IAAIkB,uBAAuB,GAAG;QAC5B,MAAM,IAAIX,MAAM;IAClB;IACA,MAAMY,kBAAkB,AAACD,CAAAA,qBAAqB,CAAA,IAAK;IAEnD,qBAAqB;IACrB,MAAME,mBAAmBpB;IACzBA,UAAU,iBAAiB;IAE3B,MAAMqB,aAAaL,KAAK,CAAChB,SAAS;IAClC,MAAMsB,aAAa,AAACD,CAAAA,aAAa,IAAG,IAAK;IACzC,MAAME,oBAAoB,AAACF,CAAAA,aAAa,IAAG,MAAO;IAClD,MAAMG,sBAAsB,AAACH,CAAAA,aAAa,IAAG,MAAO;IAEpD,sBAAsB;IACtB,IAAIE,mBAAmB;QACrB,MAAME,SAASrB,gBAAgBY,OAAOhB;QACtCA,UAAUyB,OAAOjB,SAAS;IAC5B;IAEA,IAAIgB,qBAAqB;QACvB,MAAMC,SAASrB,gBAAgBY,OAAOhB;QACtCA,UAAUyB,OAAOjB,SAAS;IAC5B;IAEA,oBAAoB;IACpB,MAAMkB,UAAwB,EAAE;IAChC,IAAIC,aAA4B;IAEhC,IAAK,IAAIxB,IAAI,GAAGA,IAAImB,YAAYnB,IAAK;QACnC,MAAMyB,iBAAiBxB,gBAAgBY,OAAOhB;QAC9C,MAAM6B,WAAWD,eAAevB,KAAK;QACrCL,UAAU4B,eAAepB,SAAS;QAElC,MAAMsB,kBAAkB1B,gBAAgBY,OAAOhB;QAC/CA,UAAU8B,gBAAgBtB,SAAS;QAEnC,MAAMuB,cAAcf,MAAMgB,KAAK,CAAChC,QAAQA,SAAS8B,gBAAgBzB,KAAK;QACtEL,UAAU8B,gBAAgBzB,KAAK;QAE/B,IAAIwB,aAAahC,cAAc;YAC7B,gCAAgC;YAChC8B,aAAaI;QACf,OAAO,IAAIF,aAAaxC,gBAAiBwC,YAAYvC,kBAAkBuC,YAAYjC,kBAAmB;YACpG,qDAAqD;YACrD8B,QAAQO,IAAI,CAAC;gBAAErB,IAAIiB;gBAAUhB,OAAOkB;YAAY;QAClD,OAAO;YACL,MAAM,IAAIxB,MAAM,CAAC,sBAAsB,EAAEsB,SAASf,QAAQ,CAAC,KAAK;QAClE;IACF;IAEA,IAAI,CAACa,YAAY;QACf,MAAM,IAAIpB,MAAM;IAClB;IAEA,2DAA2D;IAC3D,MAAM2B,iBAAiBd,mBAAmBD;IAE1C,OAAO;QACLO;QACAC;QACAQ,YAAYhB;QACZiB,WAAWF;QACXG,SAASrB,MAAMd,MAAM;QACrBoC,YAAYJ;IACd;AACF;AAEA;;;;;;CAMC,GACD,SAASK,WACPvB,KAAa,EACbwB,UAAkB,EAClBC,SAAiB;IAMjB,IAAIzC,SAASwC;IAEb,yBAAyB;IACzB,IAAIxB,KAAK,CAAChB,OAAO,KAAK,MAAM;QAC1B,MAAM,IAAIO,MAAM;IAClB;IACAP;IAEA,oBAAoB;IACpB,MAAM0C,cAActC,gBAAgBY,OAAOhB;IAC3C,MAAM2C,cAAcD,YAAYrC,KAAK;IACrCL,UAAU0C,YAAYlC,SAAS;IAE/B,MAAMoC,UAKD,EAAE;IAEP,oBAAoB;IACpB,IAAK,IAAIzC,IAAI,GAAGA,IAAIwC,aAAaxC,IAAK;QACpC,mDAAmD;QACnD,MAAM0C,iBAAiBzC,gBAAgBY,OAAOhB;QAC9CA,UAAU6C,eAAerC,SAAS;QAElC,oBAAoB;QACpB,MAAMsC,qBAAqB1C,gBAAgBY,OAAOhB;QAClDA,UAAU8C,mBAAmBtC,SAAS;QAEtCoC,QAAQX,IAAI,CAAC;YACXc,eAAe;YACfC,cAAcH,eAAexC,KAAK;YAClC4C,oBAAoB;YACpBC,kBAAkBJ,mBAAmBzC,KAAK;QAC5C;IACF;IAEA,uDAAuD;IACvD,IAAI8C,aAAa,IAAI,sBAAsB;IAC3C,IAAK,IAAIhD,IAAI,GAAGA,IAAIyC,QAAQ1C,MAAM,EAAEC,IAAK;QACvC,MAAMiD,SAASR,OAAO,CAACzC,EAAE;QACzB,0CAA0C;QAC1CiD,OAAOL,aAAa,GAAGI;QAEvB,6CAA6C;QAC7C,MAAME,gBAAgBrC,KAAK,CAACmC,WAAW;QACvC,MAAMhB,aAAa,AAACkB,CAAAA,gBAAgB,CAAA,IAAK;QAEzC,oDAAoD;QACpD,6DAA6D;QAC7DD,OAAOH,kBAAkB,GAAGG,OAAOJ,YAAY,GAAGb,aAAaM;QAE/D,gEAAgE;QAChE,MAAMa,aAAaC,KAAKC,IAAI,CAACJ,OAAOJ,YAAY,GAAG,KAAK;QACxDG,cAAcG;IAChB;IAEA,OAAOV;AACT;AAEA;;;;;CAKC,GACD,OAAO,SAASa,SAASzC,KAAa;QAgBlB0C;IAflB,kBAAkB;IAClB,IAAI1C,MAAMd,MAAM,GAAG,MAAM,CAACJ,aAAakB,OAAO,GAAG7B,WAAW;QAC1D,MAAM,IAAIoB,MAAM;IAClB;IAEA,6BAA6B;IAC7B,MAAMoD,YAAY3C,KAAK,CAAC,EAAE,GAAG;IAE7B,kCAAkC;IAClC,MAAM0C,aAAwC;QAC5C,GAAG;QACH,GAAG;QACH,GAAG;QACH,IAAI;IACN;IACA,MAAMjB,aAAYiB,wBAAAA,UAAU,CAACC,UAAU,cAArBD,mCAAAA,wBAAyB;IAE3C,2EAA2E;IAC3E,6CAA6C;IAC7C,IAAIE,YAAY5C,MAAMd,MAAM;IAC5B,MAAO0D,YAAY,MAAM5C,KAAK,CAAC4C,YAAY,EAAE,KAAK,KAAM;QACtDA;IACF;IACA,kDAAkD;IAClD,MAAOA,YAAY,MAAM,KAAKA,YAAY,GAAI;QAC5CA;IACF;IAEA,yCAAyC;IACzC,IAAI,CAAC9D,aAAakB,OAAO4C,YAAY,GAAGxE,kBAAkB;QACxD,MAAM,IAAImB,MAAM;IAClB;IAEA,qEAAqE;IACrE,MAAMsD,eAAe,AAAC7C,CAAAA,MAAM8C,YAAY,CAACF,YAAY,KAAK,CAAA,IAAK;IAC/D,MAAMpB,aAAaoB,YAAY,KAAKC;IAEpC,uCAAuC;IACvC,MAAME,eAAexB,WAAWvB,OAAOwB,YAAYC;IAEnD,wBAAwB;IACxB,MAAMuB,eAAyB,EAAE;IACjC,IAAIC,mBAAmB;IAEvB,IAAK,IAAI9D,IAAI,GAAGA,IAAI4D,aAAa7D,MAAM,EAAEC,IAAK;QAC5C,MAAMiD,SAASW,YAAY,CAAC5D,EAAE;QAC9B,MAAM+D,cAAcd,OAAOL,aAAa;QAExC,qBAAqB;QACrB,MAAMoB,YAAYpD,iBAAiBC,OAAOkD,aAAazB;QAEvD,yCAAyC;QACzC,MAAML,YAAY8B,cAAcC,UAAUhC,UAAU;QACpD,yFAAyF;QACzF,MAAME,UAAUD,YAAYgB,OAAOH,kBAAkB;QAErD,0EAA0E;QAC1E,yEAAyE;QACzE,oEAAoE;QACpE,MAAMmB,iBAAiBpD,MAAMgB,KAAK,CAACI,WAAWC;QAE9C,mCAAmC;QACnC,MAAMgC,cAAwB,EAAE;QAChCnF,YAAYkF,gBAAgBD,UAAUxC,UAAU,EAAEyB,OAAOF,gBAAgB,EAAE;YACzEoB,OAAO,CAACC;gBACNF,YAAYpC,IAAI,CAACsC;YACnB;QACF;QAEA,2BAA2B;QAC3B,IAAIC,cAAcC,OAAOC,MAAM,CAACL;QAEhC,+EAA+E;QAC/E,oEAAoE;QACpE,0CAA0C;QAC1C,IAAK,IAAIM,IAAIR,UAAUzC,OAAO,CAACxB,MAAM,GAAG,GAAGyE,KAAK,GAAGA,IAAK;YACtDH,cAAc/D,YAAY+D,aAAaL,UAAUzC,OAAO,CAACiD,EAAE;QAC7D;QAEAX,aAAa/B,IAAI,CAACuC;QAClBP,oBAAoBO,YAAYtE,MAAM;IACxC;IAEA,OAAOuE,OAAOC,MAAM,CAACV;AACvB;AAEA;;;CAGC,GACD,OAAO,SAASY;IACd,MAAMC,SAAmB,EAAE;IAE3B,OAAO,IAAIpG,UAAU;QACnBqG,WAAUP,KAAa,EAAEQ,SAAiB,EAAEC,QAAwC;YAClFH,OAAO5C,IAAI,CAACsC;YACZS;QACF;QAEAC,OAAMD,QAAwC;YAC5C,IAAI;gBACF,MAAMhE,QAAQyD,OAAOC,MAAM,CAACG;gBAC5B,MAAMK,SAASzB,SAASzC;gBACxB,IAAI,CAACiB,IAAI,CAACiD;gBACVF;YACF,EAAE,OAAOG,KAAK;gBACZH,SAASG;YACX;QACF;IACF;AACF"}
|
package/package.json
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "xz-compat",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "XZ Decompression Library",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"extract",
|
|
7
|
+
"iterator",
|
|
8
|
+
"asyncIterator",
|
|
9
|
+
"Symbol.asyncIterator",
|
|
10
|
+
"7z",
|
|
11
|
+
"7zip",
|
|
12
|
+
"seven-zip",
|
|
13
|
+
"decompress",
|
|
14
|
+
"lzma",
|
|
15
|
+
"lzma2",
|
|
16
|
+
"archive",
|
|
17
|
+
"stream"
|
|
18
|
+
],
|
|
19
|
+
"homepage": "https://github.com/kmalakoff/xz-compat",
|
|
20
|
+
"bugs": {
|
|
21
|
+
"url": "https://github.com/kmalakoff/xz-compat/issues"
|
|
22
|
+
},
|
|
23
|
+
"repository": {
|
|
24
|
+
"type": "git",
|
|
25
|
+
"url": "git+ssh://git@github.com/kmalakoff/xz-compat.git"
|
|
26
|
+
},
|
|
27
|
+
"license": "MIT",
|
|
28
|
+
"author": "Kevin Malakoff <kmalakoff@gmail.com> (https://github.com/kmalakoff)",
|
|
29
|
+
"type": "module",
|
|
30
|
+
"exports": {
|
|
31
|
+
".": {
|
|
32
|
+
"import": "./dist/esm/index.js",
|
|
33
|
+
"require": "./dist/cjs/index.js"
|
|
34
|
+
},
|
|
35
|
+
"./package.json": "./package.json"
|
|
36
|
+
},
|
|
37
|
+
"main": "dist/cjs/index.js",
|
|
38
|
+
"source": "src/index.ts",
|
|
39
|
+
"types": "dist/cjs/index.d.ts",
|
|
40
|
+
"directories": {
|
|
41
|
+
"doc": "docs",
|
|
42
|
+
"test": "test"
|
|
43
|
+
},
|
|
44
|
+
"files": [
|
|
45
|
+
"dist"
|
|
46
|
+
],
|
|
47
|
+
"scripts": {
|
|
48
|
+
"build": "tsds build",
|
|
49
|
+
"format": "tsds format",
|
|
50
|
+
"prepublishOnly": "tsds validate",
|
|
51
|
+
"test": "tsds test:node --no-timeouts",
|
|
52
|
+
"test:engines": "nvu engines tsds test:node --no-timeouts",
|
|
53
|
+
"version": "tsds version"
|
|
54
|
+
},
|
|
55
|
+
"dependencies": {
|
|
56
|
+
"extract-base-iterator": "^3.0.0",
|
|
57
|
+
"os-shim": "^0.1.3"
|
|
58
|
+
},
|
|
59
|
+
"devDependencies": {
|
|
60
|
+
"@types/mocha": "*",
|
|
61
|
+
"@types/node": "*",
|
|
62
|
+
"fs-iterator": "^7.0.0",
|
|
63
|
+
"fs-remove-compat": "^1.0.1",
|
|
64
|
+
"fs-stats-spys": "^1.0.0",
|
|
65
|
+
"get-file-compat": "^2.0.0",
|
|
66
|
+
"mkdirp-classic": "^0.5.3",
|
|
67
|
+
"node-version-use": "*",
|
|
68
|
+
"tar-iterator": "^3.3.1",
|
|
69
|
+
"ts-dev-stack": "*",
|
|
70
|
+
"tsds-config": "*"
|
|
71
|
+
},
|
|
72
|
+
"engines": {
|
|
73
|
+
"node": ">=0.8"
|
|
74
|
+
}
|
|
75
|
+
}
|