7z-iterator 1.1.2 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/FileEntry.d.cts +12 -4
- package/dist/cjs/FileEntry.d.ts +12 -4
- package/dist/cjs/FileEntry.js +52 -24
- package/dist/cjs/FileEntry.js.map +1 -1
- package/dist/cjs/SevenZipIterator.d.cts +25 -2
- package/dist/cjs/SevenZipIterator.d.ts +25 -2
- package/dist/cjs/SevenZipIterator.js +68 -21
- package/dist/cjs/SevenZipIterator.js.map +1 -1
- package/dist/cjs/index.d.cts +1 -2
- package/dist/cjs/index.d.ts +1 -2
- package/dist/cjs/index.js +19 -3
- package/dist/cjs/index.js.map +1 -1
- package/dist/cjs/lib/streamToSource.d.cts +8 -11
- package/dist/cjs/lib/streamToSource.d.ts +8 -11
- package/dist/cjs/lib/streamToSource.js +21 -67
- package/dist/cjs/lib/streamToSource.js.map +1 -1
- package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js +148 -0
- package/dist/cjs/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/cjs/lzma/index.d.cts +13 -0
- package/dist/cjs/lzma/index.d.ts +13 -0
- package/dist/cjs/lzma/index.js +63 -0
- package/dist/cjs/lzma/index.js.map +1 -0
- package/dist/cjs/lzma/stream/transforms.d.cts +38 -0
- package/dist/cjs/lzma/stream/transforms.d.ts +38 -0
- package/dist/cjs/lzma/stream/transforms.js +149 -0
- package/dist/cjs/lzma/stream/transforms.js.map +1 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +30 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +30 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js +135 -0
- package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +82 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +82 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js +440 -0
- package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.cts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js +162 -0
- package/dist/cjs/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/cjs/lzma/types.d.cts +110 -0
- package/dist/cjs/lzma/types.d.ts +110 -0
- package/dist/cjs/lzma/types.js +264 -0
- package/dist/cjs/lzma/types.js.map +1 -0
- package/dist/cjs/nextEntry.js +24 -26
- package/dist/cjs/nextEntry.js.map +1 -1
- package/dist/cjs/sevenz/ArchiveSource.d.cts +16 -0
- package/dist/cjs/sevenz/ArchiveSource.d.ts +16 -0
- package/dist/cjs/sevenz/ArchiveSource.js +69 -0
- package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
- package/dist/cjs/sevenz/FolderStreamSplitter.d.cts +101 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.d.ts +101 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.js +229 -0
- package/dist/cjs/sevenz/FolderStreamSplitter.js.map +1 -0
- package/dist/cjs/sevenz/SevenZipParser.d.cts +71 -10
- package/dist/cjs/sevenz/SevenZipParser.d.ts +71 -10
- package/dist/cjs/sevenz/SevenZipParser.js +574 -203
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BZip2.js +2 -1
- package/dist/cjs/sevenz/codecs/BZip2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Bcj.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/Bcj.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/Bcj.js +102 -8
- package/dist/cjs/sevenz/codecs/Bcj.js.map +1 -1
- package/dist/cjs/sevenz/codecs/BcjArm.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/BcjArm.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/BcjArm.js +51 -9
- package/dist/cjs/sevenz/codecs/BcjArm.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Copy.d.cts +2 -4
- package/dist/cjs/sevenz/codecs/Copy.d.ts +2 -4
- package/dist/cjs/sevenz/codecs/Copy.js +2 -15
- package/dist/cjs/sevenz/codecs/Copy.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Deflate.d.cts +6 -4
- package/dist/cjs/sevenz/codecs/Deflate.d.ts +6 -4
- package/dist/cjs/sevenz/codecs/Deflate.js +4 -9
- package/dist/cjs/sevenz/codecs/Deflate.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Delta.d.cts +5 -4
- package/dist/cjs/sevenz/codecs/Delta.d.ts +5 -4
- package/dist/cjs/sevenz/codecs/Delta.js +29 -10
- package/dist/cjs/sevenz/codecs/Delta.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma.d.cts +5 -2
- package/dist/cjs/sevenz/codecs/Lzma.d.ts +5 -2
- package/dist/cjs/sevenz/codecs/Lzma.js +13 -28
- package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma2.d.cts +9 -3
- package/dist/cjs/sevenz/codecs/Lzma2.d.ts +9 -3
- package/dist/cjs/sevenz/codecs/Lzma2.js +17 -198
- package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.cts +2 -2
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.js +2 -15
- package/dist/cjs/sevenz/codecs/createBufferingDecoder.js.map +1 -1
- package/dist/cjs/types.d.cts +2 -16
- package/dist/cjs/types.d.ts +2 -16
- package/dist/cjs/types.js.map +1 -1
- package/dist/esm/FileEntry.d.ts +12 -4
- package/dist/esm/FileEntry.js +52 -26
- package/dist/esm/FileEntry.js.map +1 -1
- package/dist/esm/SevenZipIterator.d.ts +25 -2
- package/dist/esm/SevenZipIterator.js +69 -22
- package/dist/esm/SevenZipIterator.js.map +1 -1
- package/dist/esm/index.d.ts +1 -2
- package/dist/esm/index.js +2 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/streamToSource.d.ts +8 -11
- package/dist/esm/lib/streamToSource.js +22 -68
- package/dist/esm/lib/streamToSource.js.map +1 -1
- package/dist/esm/lzma/Lzma2ChunkParser.d.ts +73 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js +137 -0
- package/dist/esm/lzma/Lzma2ChunkParser.js.map +1 -0
- package/dist/esm/lzma/index.d.ts +13 -0
- package/dist/esm/lzma/index.js +15 -0
- package/dist/esm/lzma/index.js.map +1 -0
- package/dist/esm/lzma/stream/transforms.d.ts +38 -0
- package/dist/esm/lzma/stream/transforms.js +150 -0
- package/dist/esm/lzma/stream/transforms.js.map +1 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +30 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js +115 -0
- package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -0
- package/dist/esm/lzma/sync/LzmaDecoder.d.ts +82 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js +403 -0
- package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -0
- package/dist/esm/lzma/sync/RangeDecoder.d.ts +69 -0
- package/dist/esm/lzma/sync/RangeDecoder.js +132 -0
- package/dist/esm/lzma/sync/RangeDecoder.js.map +1 -0
- package/dist/esm/lzma/types.d.ts +110 -0
- package/dist/esm/lzma/types.js +154 -0
- package/dist/esm/lzma/types.js.map +1 -0
- package/dist/esm/nextEntry.js +24 -26
- package/dist/esm/nextEntry.js.map +1 -1
- package/dist/esm/sevenz/ArchiveSource.d.ts +16 -0
- package/dist/esm/sevenz/ArchiveSource.js +70 -1
- package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
- package/dist/esm/sevenz/FolderStreamSplitter.d.ts +101 -0
- package/dist/esm/sevenz/FolderStreamSplitter.js +207 -0
- package/dist/esm/sevenz/FolderStreamSplitter.js.map +1 -0
- package/dist/esm/sevenz/SevenZipParser.d.ts +71 -10
- package/dist/esm/sevenz/SevenZipParser.js +414 -198
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/BZip2.js +2 -1
- package/dist/esm/sevenz/codecs/BZip2.js.map +1 -1
- package/dist/esm/sevenz/codecs/Bcj.d.ts +5 -4
- package/dist/esm/sevenz/codecs/Bcj.js +106 -6
- package/dist/esm/sevenz/codecs/Bcj.js.map +1 -1
- package/dist/esm/sevenz/codecs/BcjArm.d.ts +5 -4
- package/dist/esm/sevenz/codecs/BcjArm.js +55 -7
- package/dist/esm/sevenz/codecs/BcjArm.js.map +1 -1
- package/dist/esm/sevenz/codecs/Copy.d.ts +2 -4
- package/dist/esm/sevenz/codecs/Copy.js +1 -9
- package/dist/esm/sevenz/codecs/Copy.js.map +1 -1
- package/dist/esm/sevenz/codecs/Deflate.d.ts +6 -4
- package/dist/esm/sevenz/codecs/Deflate.js +9 -7
- package/dist/esm/sevenz/codecs/Deflate.js.map +1 -1
- package/dist/esm/sevenz/codecs/Delta.d.ts +5 -4
- package/dist/esm/sevenz/codecs/Delta.js +33 -8
- package/dist/esm/sevenz/codecs/Delta.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma.d.ts +5 -2
- package/dist/esm/sevenz/codecs/Lzma.js +17 -24
- package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma2.d.ts +9 -3
- package/dist/esm/sevenz/codecs/Lzma2.js +15 -196
- package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/esm/sevenz/codecs/createBufferingDecoder.d.ts +2 -2
- package/dist/esm/sevenz/codecs/createBufferingDecoder.js +1 -9
- package/dist/esm/sevenz/codecs/createBufferingDecoder.js.map +1 -1
- package/dist/esm/types.d.ts +2 -16
- package/dist/esm/types.js.map +1 -1
- package/package.json +3 -3
- package/assets/lzma-purejs/LICENSE +0 -11
- package/assets/lzma-purejs/index.js +0 -19
- package/assets/lzma-purejs/lib/LZ/OutWindow.js +0 -78
- package/assets/lzma-purejs/lib/LZ.js +0 -6
- package/assets/lzma-purejs/lib/LZMA/Base.js +0 -48
- package/assets/lzma-purejs/lib/LZMA/Decoder.js +0 -328
- package/assets/lzma-purejs/lib/LZMA.js +0 -6
- package/assets/lzma-purejs/lib/RangeCoder/BitTreeDecoder.js +0 -41
- package/assets/lzma-purejs/lib/RangeCoder/Decoder.js +0 -58
- package/assets/lzma-purejs/lib/RangeCoder/Encoder.js +0 -106
- package/assets/lzma-purejs/lib/RangeCoder.js +0 -10
- package/assets/lzma-purejs/lib/Stream.js +0 -41
- package/assets/lzma-purejs/lib/Util.js +0 -114
- package/assets/lzma-purejs/lib/makeBuffer.js +0 -25
- package/assets/lzma-purejs/package-lock.json +0 -13
- package/assets/lzma-purejs/package.json +0 -8
|
@@ -1,7 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
const _require = typeof require === 'undefined' ? Module.createRequire(import.meta.url) : require;
|
|
3
|
-
// LZMA2 codec using lzma-purejs
|
|
4
|
-
// LZMA2 is a container format that wraps LZMA chunks with framing
|
|
1
|
+
// LZMA2 codec using TypeScript LZMA decoder
|
|
5
2
|
//
|
|
6
3
|
// LZMA2 format specification:
|
|
7
4
|
// https://github.com/ulikunitz/xz/blob/master/doc/LZMA2.md
|
|
@@ -11,209 +8,31 @@ const _require = typeof require === 'undefined' ? Module.createRequire(import.me
|
|
|
11
8
|
// 0x01 = Uncompressed chunk, dictionary reset
|
|
12
9
|
// 0x02 = Uncompressed chunk, no dictionary reset
|
|
13
10
|
// 0x80-0xFF = LZMA compressed chunk (bits encode reset flags and size)
|
|
14
|
-
import {
|
|
15
|
-
import createBufferingDecoder from './createBufferingDecoder.js';
|
|
16
|
-
import { createInputStream, createOutputStream } from './streams.js';
|
|
17
|
-
// Import vendored lzma-purejs - provides raw LZMA decoder (patched for LZMA2 support)
|
|
18
|
-
// Path accounts for build output in dist/esm/sevenz/codecs/
|
|
19
|
-
const { LZMA } = _require('../../../../assets/lzma-purejs');
|
|
20
|
-
const LzmaDecoder = LZMA.Decoder;
|
|
21
|
-
/**
|
|
22
|
-
* Decode LZMA2 dictionary size from properties byte
|
|
23
|
-
* Properties byte encodes dictionary size as: 2^(dictByte/2 + 12) or similar
|
|
24
|
-
*
|
|
25
|
-
* Per XZ spec, dictionary sizes are:
|
|
26
|
-
* 0x00 = 4 KiB (2^12)
|
|
27
|
-
* 0x01 = 6 KiB
|
|
28
|
-
* 0x02 = 8 KiB (2^13)
|
|
29
|
-
* ...
|
|
30
|
-
* 0x28 = 1.5 GiB
|
|
31
|
-
*/ function decodeDictionarySize(propByte) {
|
|
32
|
-
if (propByte > 40) {
|
|
33
|
-
throw new Error(`Invalid LZMA2 dictionary size property: ${propByte}`);
|
|
34
|
-
}
|
|
35
|
-
if (propByte === 40) {
|
|
36
|
-
// Max dictionary size: 4 GiB - 1
|
|
37
|
-
return 0xffffffff;
|
|
38
|
-
}
|
|
39
|
-
// Dictionary size = 2 | (propByte & 1) << (propByte / 2 + 11)
|
|
40
|
-
const base = 2 | propByte & 1;
|
|
41
|
-
const shift = Math.floor(propByte / 2) + 11;
|
|
42
|
-
return base << shift;
|
|
43
|
-
}
|
|
11
|
+
import { createLzma2Decoder as createLzma2Transform, decodeLzma2 as lzma2Decode } from '../../lzma/index.js';
|
|
44
12
|
/**
|
|
45
13
|
* Decode LZMA2 compressed data to buffer
|
|
46
14
|
*
|
|
47
15
|
* @param input - LZMA2 compressed data
|
|
48
16
|
* @param properties - Properties buffer (1 byte: dictionary size)
|
|
49
|
-
* @param unpackSize - Expected output size (
|
|
17
|
+
* @param unpackSize - Expected output size (optional, for pre-allocation)
|
|
50
18
|
* @returns Decompressed data
|
|
51
19
|
*/ export function decodeLzma2(input, properties, unpackSize) {
|
|
52
20
|
if (!properties || properties.length < 1) {
|
|
53
21
|
throw new Error('LZMA2 requires properties byte');
|
|
54
22
|
}
|
|
55
|
-
|
|
56
|
-
// Memory optimization: pre-allocate output buffer if size is known
|
|
57
|
-
// This avoids double-memory during Buffer.concat
|
|
58
|
-
let outputBuffer = null;
|
|
59
|
-
let outputPos = 0;
|
|
60
|
-
const outputChunks = [];
|
|
61
|
-
if (unpackSize && unpackSize > 0) {
|
|
62
|
-
outputBuffer = allocBufferUnsafe(unpackSize);
|
|
63
|
-
}
|
|
64
|
-
let offset = 0;
|
|
65
|
-
// LZMA decoder instance - reused across chunks
|
|
66
|
-
// The vendored decoder supports setSolid() for LZMA2 state preservation
|
|
67
|
-
// The decoder also has _nowPos64 which tracks cumulative position for rep0 validation
|
|
68
|
-
// and _prevByte which is used for literal decoder context selection
|
|
69
|
-
const decoder = new LzmaDecoder();
|
|
70
|
-
decoder.setDictionarySize(dictSize);
|
|
71
|
-
const outWindow = decoder._outWindow;
|
|
72
|
-
// Track current LZMA properties (lc, lp, pb)
|
|
73
|
-
let propsSet = false;
|
|
74
|
-
while(offset < input.length){
|
|
75
|
-
const control = input[offset++];
|
|
76
|
-
if (control === 0x00) {
|
|
77
|
-
break;
|
|
78
|
-
}
|
|
79
|
-
if (control === 0x01 || control === 0x02) {
|
|
80
|
-
// Uncompressed chunk
|
|
81
|
-
// 0x01 = dictionary reset + uncompressed
|
|
82
|
-
// 0x02 = uncompressed (no reset)
|
|
83
|
-
// Handle dictionary reset for 0x01
|
|
84
|
-
if (control === 0x01) {
|
|
85
|
-
outWindow._pos = 0;
|
|
86
|
-
outWindow._streamPos = 0;
|
|
87
|
-
decoder._nowPos64 = 0;
|
|
88
|
-
}
|
|
89
|
-
if (offset + 2 > input.length) {
|
|
90
|
-
throw new Error('Truncated LZMA2 uncompressed chunk header');
|
|
91
|
-
}
|
|
92
|
-
// Size is big-endian, 16-bit, value + 1
|
|
93
|
-
const uncompSize = (input[offset] << 8 | input[offset + 1]) + 1;
|
|
94
|
-
offset += 2;
|
|
95
|
-
if (offset + uncompSize > input.length) {
|
|
96
|
-
throw new Error('Truncated LZMA2 uncompressed data');
|
|
97
|
-
}
|
|
98
|
-
// Get the uncompressed data
|
|
99
|
-
const uncompData = input.slice(offset, offset + uncompSize);
|
|
100
|
-
// Copy uncompressed data to output
|
|
101
|
-
if (outputBuffer) {
|
|
102
|
-
uncompData.copy(outputBuffer, outputPos);
|
|
103
|
-
outputPos += uncompData.length;
|
|
104
|
-
} else {
|
|
105
|
-
outputChunks === null || outputChunks === void 0 ? void 0 : outputChunks.push(uncompData);
|
|
106
|
-
}
|
|
107
|
-
// Also update the decoder's internal dictionary so subsequent LZMA chunks can reference it
|
|
108
|
-
// The decoder needs to track this data for LZ77 back-references
|
|
109
|
-
// We write directly to _buffer to avoid flush() which requires _stream to be set
|
|
110
|
-
// We must also update _streamPos to match _pos so that flush() doesn't try to write
|
|
111
|
-
for(let i = 0; i < uncompData.length; i++){
|
|
112
|
-
outWindow._buffer[outWindow._pos++] = uncompData[i];
|
|
113
|
-
// Handle circular buffer wrap-around
|
|
114
|
-
if (outWindow._pos >= outWindow._windowSize) {
|
|
115
|
-
outWindow._pos = 0;
|
|
116
|
-
}
|
|
117
|
-
}
|
|
118
|
-
// Keep _streamPos in sync so flush() doesn't try to write these bytes
|
|
119
|
-
// (they're already in our output buffer)
|
|
120
|
-
outWindow._streamPos = outWindow._pos;
|
|
121
|
-
// Update decoder's cumulative position so subsequent LZMA chunks have correct rep0 validation
|
|
122
|
-
decoder._nowPos64 += uncompSize;
|
|
123
|
-
// Update prevByte for literal decoder context in subsequent LZMA chunks
|
|
124
|
-
decoder._prevByte = uncompData[uncompData.length - 1];
|
|
125
|
-
offset += uncompSize;
|
|
126
|
-
} else if (control >= 0x80) {
|
|
127
|
-
// LZMA compressed chunk
|
|
128
|
-
// Control byte format (bits 7-0):
|
|
129
|
-
// Bit 7: always 1 for LZMA chunk
|
|
130
|
-
// Bits 6-5: reset mode (00=nothing, 01=state, 10=state+props, 11=all)
|
|
131
|
-
// Bits 4-0: high 5 bits of uncompressed size - 1
|
|
132
|
-
// Control byte ranges (based on bits 6-5):
|
|
133
|
-
// 0x80-0x9F (00): no reset - continue existing state (solid mode)
|
|
134
|
-
// 0xA0-0xBF (01): reset state only
|
|
135
|
-
// 0xC0-0xDF (10): reset state + new properties
|
|
136
|
-
// 0xE0-0xFF (11): reset dictionary + state + new properties
|
|
137
|
-
const resetState = control >= 0xa0;
|
|
138
|
-
const newProps = control >= 0xc0;
|
|
139
|
-
const dictReset = control >= 0xe0;
|
|
140
|
-
const useSolidMode = !resetState;
|
|
141
|
-
// Handle dictionary reset for control bytes 0xE0-0xFF
|
|
142
|
-
if (dictReset) {
|
|
143
|
-
outWindow._pos = 0;
|
|
144
|
-
outWindow._streamPos = 0;
|
|
145
|
-
}
|
|
146
|
-
if (offset + 4 > input.length) {
|
|
147
|
-
throw new Error('Truncated LZMA2 LZMA chunk header');
|
|
148
|
-
}
|
|
149
|
-
// Uncompressed size: 5 bits from control + 16 bits from next 2 bytes + 1
|
|
150
|
-
const uncompHigh = control & 0x1f;
|
|
151
|
-
const uncompSize2 = (uncompHigh << 16 | input[offset] << 8 | input[offset + 1]) + 1;
|
|
152
|
-
offset += 2;
|
|
153
|
-
// Compressed size: 16 bits + 1
|
|
154
|
-
const compSize = (input[offset] << 8 | input[offset + 1]) + 1;
|
|
155
|
-
offset += 2;
|
|
156
|
-
// If new properties, read 1-byte LZMA properties
|
|
157
|
-
if (newProps) {
|
|
158
|
-
if (offset >= input.length) {
|
|
159
|
-
throw new Error('Truncated LZMA2 properties byte');
|
|
160
|
-
}
|
|
161
|
-
const propsByte = input[offset++];
|
|
162
|
-
// Properties byte: pb * 45 + lp * 9 + lc
|
|
163
|
-
// where pb, lp, lc are LZMA parameters
|
|
164
|
-
const lc = propsByte % 9;
|
|
165
|
-
const remainder = Math.floor(propsByte / 9);
|
|
166
|
-
const lp = remainder % 5;
|
|
167
|
-
const pb = Math.floor(remainder / 5);
|
|
168
|
-
if (!decoder.setLcLpPb(lc, lp, pb)) {
|
|
169
|
-
throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);
|
|
170
|
-
}
|
|
171
|
-
propsSet = true;
|
|
172
|
-
}
|
|
173
|
-
if (!propsSet) {
|
|
174
|
-
throw new Error('LZMA chunk without properties');
|
|
175
|
-
}
|
|
176
|
-
if (offset + compSize > input.length) {
|
|
177
|
-
throw new Error('Truncated LZMA2 compressed data');
|
|
178
|
-
}
|
|
179
|
-
// Decode LZMA chunk
|
|
180
|
-
const inStream = createInputStream(input, offset, compSize);
|
|
181
|
-
const outStream = createOutputStream(uncompSize2); // Pre-allocate for memory efficiency
|
|
182
|
-
// Set solid mode based on control byte - this preserves state across code() calls
|
|
183
|
-
// For state reset WITHOUT dict reset (0xa0-0xdf), use resetProbabilities() to
|
|
184
|
-
// reset probability tables while preserving _nowPos64 for dictionary references
|
|
185
|
-
if (resetState && !dictReset) {
|
|
186
|
-
decoder.resetProbabilities();
|
|
187
|
-
decoder.setSolid(true); // Preserve _nowPos64 in code()
|
|
188
|
-
} else {
|
|
189
|
-
decoder.setSolid(useSolidMode);
|
|
190
|
-
}
|
|
191
|
-
// Decode the chunk
|
|
192
|
-
const success = decoder.code(inStream, outStream, uncompSize2);
|
|
193
|
-
if (!success) {
|
|
194
|
-
throw new Error('LZMA decompression failed');
|
|
195
|
-
}
|
|
196
|
-
const chunkOutput = outStream.toBuffer();
|
|
197
|
-
if (outputBuffer) {
|
|
198
|
-
chunkOutput.copy(outputBuffer, outputPos);
|
|
199
|
-
outputPos += chunkOutput.length;
|
|
200
|
-
} else {
|
|
201
|
-
outputChunks === null || outputChunks === void 0 ? void 0 : outputChunks.push(chunkOutput);
|
|
202
|
-
}
|
|
203
|
-
offset += compSize;
|
|
204
|
-
} else {
|
|
205
|
-
throw new Error(`Invalid LZMA2 control byte: 0x${control.toString(16)}`);
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
// Return pre-allocated buffer or concatenated chunks
|
|
209
|
-
if (outputBuffer) {
|
|
210
|
-
// Return only the used portion if we didn't fill the buffer
|
|
211
|
-
return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;
|
|
212
|
-
}
|
|
213
|
-
return Buffer.concat(outputChunks);
|
|
23
|
+
return lzma2Decode(input, properties, unpackSize);
|
|
214
24
|
}
|
|
215
25
|
/**
|
|
216
26
|
* Create an LZMA2 decoder Transform stream
|
|
217
|
-
|
|
218
|
-
|
|
27
|
+
*
|
|
28
|
+
* This is a true streaming decoder that processes LZMA2 chunks incrementally.
|
|
29
|
+
* Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).
|
|
30
|
+
*
|
|
31
|
+
* LZMA2 chunks are up to ~2MB uncompressed, so memory is bounded regardless of
|
|
32
|
+
* total archive size.
|
|
33
|
+
*/ export function createLzma2Decoder(properties, _unpackSize) {
|
|
34
|
+
if (!properties || properties.length < 1) {
|
|
35
|
+
throw new Error('LZMA2 requires properties byte');
|
|
36
|
+
}
|
|
37
|
+
return createLzma2Transform(properties);
|
|
219
38
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Lzma2.ts"],"sourcesContent":["import Module from 'module';\n\nconst _require = typeof require === 'undefined' ? Module.createRequire(import.meta.url) : require;\n\n// LZMA2 codec using lzma-purejs\n// LZMA2 is a container format that wraps LZMA chunks with framing\n//\n// LZMA2 format specification:\n// https://github.com/ulikunitz/xz/blob/master/doc/LZMA2.md\n//\n// Control byte values:\n// 0x00 = End of stream\n// 0x01 = Uncompressed chunk, dictionary reset\n// 0x02 = Uncompressed chunk, no dictionary reset\n// 0x80-0xFF = LZMA compressed chunk (bits encode reset flags and size)\n\nimport { allocBufferUnsafe } from 'extract-base-iterator';\nimport type { Transform } from 'readable-stream';\nimport createBufferingDecoder from './createBufferingDecoder.ts';\nimport { createInputStream, createOutputStream } from './streams.ts';\n\n// Import vendored lzma-purejs - provides raw LZMA decoder (patched for LZMA2 support)\n// Path accounts for build output in dist/esm/sevenz/codecs/\nconst { LZMA } = _require('../../../../assets/lzma-purejs');\nconst LzmaDecoder = LZMA.Decoder;\n\n/**\n * Decode LZMA2 dictionary size from properties byte\n * Properties byte encodes dictionary size as: 2^(dictByte/2 + 12) or similar\n *\n * Per XZ spec, dictionary sizes are:\n * 0x00 = 4 KiB (2^12)\n * 0x01 = 6 KiB\n * 0x02 = 8 KiB (2^13)\n * ...\n * 0x28 = 1.5 GiB\n */\nfunction decodeDictionarySize(propByte: number): number {\n if (propByte > 40) {\n throw new Error(`Invalid LZMA2 dictionary size property: ${propByte}`);\n }\n if (propByte === 40) {\n // Max dictionary size: 4 GiB - 1\n return 0xffffffff;\n }\n // Dictionary size = 2 | (propByte & 1) << (propByte / 2 + 11)\n const base = 2 | (propByte & 1);\n const shift = Math.floor(propByte / 2) + 11;\n return base << shift;\n}\n\n/**\n * Decode LZMA2 compressed data to buffer\n *\n * @param input - LZMA2 compressed data\n * @param properties - Properties buffer (1 byte: dictionary size)\n * @param unpackSize - Expected output size (used for pre-allocation to reduce memory)\n * @returns Decompressed data\n */\nexport function decodeLzma2(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n const dictSize = decodeDictionarySize(properties[0]);\n\n // Memory optimization: pre-allocate output buffer if size is known\n // This avoids double-memory during Buffer.concat\n let outputBuffer: Buffer | null = null;\n let outputPos = 0;\n const outputChunks: Buffer[] = [];\n\n if (unpackSize && unpackSize > 0) {\n outputBuffer = allocBufferUnsafe(unpackSize);\n }\n\n let offset = 0;\n\n // LZMA decoder instance - reused across chunks\n // The vendored decoder supports setSolid() for LZMA2 state preservation\n // The decoder also has _nowPos64 which tracks cumulative position for rep0 validation\n // and _prevByte which is used for literal decoder context selection\n const decoder = new LzmaDecoder() as InstanceType<typeof LzmaDecoder> & {\n setSolid: (solid: boolean) => void;\n resetProbabilities: () => void;\n _nowPos64: number;\n _prevByte: number;\n };\n decoder.setDictionarySize(dictSize);\n\n // Access internal _outWindow for dictionary management\n // We need to preserve dictionary state across LZMA2 chunks\n type OutWindowType = {\n _buffer: Buffer;\n _pos: number;\n _streamPos: number;\n _windowSize: number;\n init: (solid: boolean) => void;\n };\n const outWindow = (decoder as unknown as { _outWindow: OutWindowType })._outWindow;\n\n // Track current LZMA properties (lc, lp, pb)\n let propsSet = false;\n\n while (offset < input.length) {\n const control = input[offset++];\n\n if (control === 0x00) {\n // End of LZMA2 stream\n break;\n }\n\n if (control === 0x01 || control === 0x02) {\n // Uncompressed chunk\n // 0x01 = dictionary reset + uncompressed\n // 0x02 = uncompressed (no reset)\n\n // Handle dictionary reset for 0x01\n if (control === 0x01) {\n outWindow._pos = 0;\n outWindow._streamPos = 0;\n decoder._nowPos64 = 0;\n }\n\n if (offset + 2 > input.length) {\n throw new Error('Truncated LZMA2 uncompressed chunk header');\n }\n\n // Size is big-endian, 16-bit, value + 1\n const uncompSize = ((input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n if (offset + uncompSize > input.length) {\n throw new Error('Truncated LZMA2 uncompressed data');\n }\n\n // Get the uncompressed data\n const uncompData = input.slice(offset, offset + uncompSize);\n\n // Copy uncompressed data to output\n if (outputBuffer) {\n uncompData.copy(outputBuffer, outputPos);\n outputPos += uncompData.length;\n } else {\n outputChunks?.push(uncompData);\n }\n\n // Also update the decoder's internal dictionary so subsequent LZMA chunks can reference it\n // The decoder needs to track this data for LZ77 back-references\n // We write directly to _buffer to avoid flush() which requires _stream to be set\n // We must also update _streamPos to match _pos so that flush() doesn't try to write\n for (let i = 0; i < uncompData.length; i++) {\n outWindow._buffer[outWindow._pos++] = uncompData[i];\n // Handle circular buffer wrap-around\n if (outWindow._pos >= outWindow._windowSize) {\n outWindow._pos = 0;\n }\n }\n // Keep _streamPos in sync so flush() doesn't try to write these bytes\n // (they're already in our output buffer)\n outWindow._streamPos = outWindow._pos;\n\n // Update decoder's cumulative position so subsequent LZMA chunks have correct rep0 validation\n decoder._nowPos64 += uncompSize;\n\n // Update prevByte for literal decoder context in subsequent LZMA chunks\n decoder._prevByte = uncompData[uncompData.length - 1];\n\n offset += uncompSize;\n } else if (control >= 0x80) {\n // LZMA compressed chunk\n // Control byte format (bits 7-0):\n // Bit 7: always 1 for LZMA chunk\n // Bits 6-5: reset mode (00=nothing, 01=state, 10=state+props, 11=all)\n // Bits 4-0: high 5 bits of uncompressed size - 1\n\n // Control byte ranges (based on bits 6-5):\n // 0x80-0x9F (00): no reset - continue existing state (solid mode)\n // 0xA0-0xBF (01): reset state only\n // 0xC0-0xDF (10): reset state + new properties\n // 0xE0-0xFF (11): reset dictionary + state + new properties\n const resetState = control >= 0xa0;\n const newProps = control >= 0xc0;\n const dictReset = control >= 0xe0;\n const useSolidMode = !resetState;\n\n // Handle dictionary reset for control bytes 0xE0-0xFF\n if (dictReset) {\n outWindow._pos = 0;\n outWindow._streamPos = 0;\n }\n\n if (offset + 4 > input.length) {\n throw new Error('Truncated LZMA2 LZMA chunk header');\n }\n\n // Uncompressed size: 5 bits from control + 16 bits from next 2 bytes + 1\n const uncompHigh = control & 0x1f;\n const uncompSize2 = ((uncompHigh << 16) | (input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n // Compressed size: 16 bits + 1\n const compSize = ((input[offset] << 8) | input[offset + 1]) + 1;\n offset += 2;\n\n // If new properties, read 1-byte LZMA properties\n if (newProps) {\n if (offset >= input.length) {\n throw new Error('Truncated LZMA2 properties byte');\n }\n const propsByte = input[offset++];\n\n // Properties byte: pb * 45 + lp * 9 + lc\n // where pb, lp, lc are LZMA parameters\n const lc = propsByte % 9;\n const remainder = Math.floor(propsByte / 9);\n const lp = remainder % 5;\n const pb = Math.floor(remainder / 5);\n\n if (!decoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n propsSet = true;\n }\n\n if (!propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n if (offset + compSize > input.length) {\n throw new Error('Truncated LZMA2 compressed data');\n }\n\n // Decode LZMA chunk\n const inStream = createInputStream(input, offset, compSize);\n const outStream = createOutputStream(uncompSize2); // Pre-allocate for memory efficiency\n\n // Set solid mode based on control byte - this preserves state across code() calls\n // For state reset WITHOUT dict reset (0xa0-0xdf), use resetProbabilities() to\n // reset probability tables while preserving _nowPos64 for dictionary references\n if (resetState && !dictReset) {\n decoder.resetProbabilities();\n decoder.setSolid(true); // Preserve _nowPos64 in code()\n } else {\n decoder.setSolid(useSolidMode);\n }\n\n // Decode the chunk\n const success = decoder.code(inStream, outStream, uncompSize2);\n if (!success) {\n throw new Error('LZMA decompression failed');\n }\n\n const chunkOutput = outStream.toBuffer();\n if (outputBuffer) {\n chunkOutput.copy(outputBuffer, outputPos);\n outputPos += chunkOutput.length;\n } else {\n outputChunks?.push(chunkOutput);\n }\n\n offset += compSize;\n } else {\n throw new Error(`Invalid LZMA2 control byte: 0x${control.toString(16)}`);\n }\n }\n\n // Return pre-allocated buffer or concatenated chunks\n if (outputBuffer) {\n // Return only the used portion if we didn't fill the buffer\n return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;\n }\n return Buffer.concat(outputChunks);\n}\n\n/**\n * Create an LZMA2 decoder Transform stream\n */\nexport function createLzma2Decoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeLzma2, properties, unpackSize);\n}\n"],"names":["Module","_require","require","createRequire","url","allocBufferUnsafe","createBufferingDecoder","createInputStream","createOutputStream","LZMA","LzmaDecoder","Decoder","decodeDictionarySize","propByte","Error","base","shift","Math","floor","decodeLzma2","input","properties","unpackSize","length","dictSize","outputBuffer","outputPos","outputChunks","offset","decoder","setDictionarySize","outWindow","_outWindow","propsSet","control","_pos","_streamPos","_nowPos64","uncompSize","uncompData","slice","copy","push","i","_buffer","_windowSize","_prevByte","resetState","newProps","dictReset","useSolidMode","uncompHigh","uncompSize2","compSize","propsByte","lc","remainder","lp","pb","setLcLpPb","inStream","outStream","resetProbabilities","setSolid","success","code","chunkOutput","toBuffer","toString","Buffer","concat","createLzma2Decoder"],"mappings":"AAAA,OAAOA,YAAY,SAAS;AAE5B,MAAMC,WAAW,OAAOC,YAAY,cAAcF,OAAOG,aAAa,CAAC,YAAYC,GAAG,IAAIF;AAE1F,gCAAgC;AAChC,kEAAkE;AAClE,EAAE;AACF,8BAA8B;AAC9B,2DAA2D;AAC3D,EAAE;AACF,uBAAuB;AACvB,+BAA+B;AAC/B,sDAAsD;AACtD,yDAAyD;AACzD,0EAA0E;AAE1E,SAASG,iBAAiB,QAAQ,wBAAwB;AAE1D,OAAOC,4BAA4B,8BAA8B;AACjE,SAASC,iBAAiB,EAAEC,kBAAkB,QAAQ,eAAe;AAErE,sFAAsF;AACtF,4DAA4D;AAC5D,MAAM,EAAEC,IAAI,EAAE,GAAGR,SAAS;AAC1B,MAAMS,cAAcD,KAAKE,OAAO;AAEhC;;;;;;;;;;CAUC,GACD,SAASC,qBAAqBC,QAAgB;IAC5C,IAAIA,WAAW,IAAI;QACjB,MAAM,IAAIC,MAAM,CAAC,wCAAwC,EAAED,UAAU;IACvE;IACA,IAAIA,aAAa,IAAI;QACnB,iCAAiC;QACjC,OAAO;IACT;IACA,8DAA8D;IAC9D,MAAME,OAAO,IAAKF,WAAW;IAC7B,MAAMG,QAAQC,KAAKC,KAAK,CAACL,WAAW,KAAK;IACzC,OAAOE,QAAQC;AACjB;AAEA;;;;;;;CAOC,GACD,OAAO,SAASG,YAAYC,KAAa,EAAEC,UAAmB,EAAEC,UAAmB;IACjF,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIT,MAAM;IAClB;IAEA,MAAMU,WAAWZ,qBAAqBS,UAAU,CAAC,EAAE;IAEnD,mEAAmE;IACnE,iDAAiD;IACjD,IAAII,eAA8B;IAClC,IAAIC,YAAY;IAChB,MAAMC,eAAyB,EAAE;IAEjC,IAAIL,cAAcA,aAAa,GAAG;QAChCG,eAAepB,kBAAkBiB;IACnC;IAEA,IAAIM,SAAS;IAEb,+CAA+C;IAC/C,wEAAwE;IACxE,sFAAsF;IACtF,oEAAoE;IACpE,MAAMC,UAAU,IAAInB;IAMpBmB,QAAQC,iBAAiB,CAACN;IAW1B,MAAMO,YAAY,AAACF,QAAqDG,UAAU;IAElF,6CAA6C;IAC7C,IAAIC,WAAW;IAEf,MAAOL,SAASR,MAAMG,MAAM,CAAE;QAC5B,MAAMW,UAAUd,KAAK,CAACQ,SAAS;QAE/B,IAAIM,YAAY,MAAM;YAEpB;QACF;QAEA,IAAIA,YAAY,QAAQA,YAAY,MAAM;YACxC,qBAAqB;YACrB,yCAAyC;YACzC,iCAAiC;YAEjC,mCAAmC;YACnC,IAAIA,YAAY,MAAM;gBACpBH,UAAUI,IAAI,GAAG;gBACjBJ,UAAUK,UAAU,GAAG;gBACvBP,QAAQQ,SAAS,GAAG;YACtB;YAEA,IAAIT,SAAS,IAAIR,MAAMG,MAAM,EAAE;gBAC7B,MAAM,IAAIT,MAAM;YAClB;YAEA,wCAAwC;YACxC,MAAMwB,aAAa,AAAC,CAAA,AAAClB,KAAK,CAACQ,OAAO,IAAI,IAAKR,KAAK,CAACQ,SAAS,EAAE,AAAD,IAAK;YAChEA,UAAU;YAEV,IAAIA,SAASU,aAAalB,MAAMG,MAAM,EAAE;gBACtC,MAAM,IAAIT,MAAM;YAClB;YAEA,4BAA4B;YAC5B,MAAMyB,aAAanB,MAAMoB,KAAK,CAACZ,QAAQA,SAASU;YAEhD,mCAAmC;YACnC,IAAIb,cAAc;gBAChBc,WAAWE,IAAI,CAAChB,cAAcC;gBAC9BA,aAAaa,WAAWhB,MAAM;YAChC,OAAO;gBACLI,yBAAAA,mCAAAA,aAAce,IAAI,CAACH;YACrB;YAEA,2FAA2F;YAC3F,gEAAgE;YAChE,iFAAiF;YACjF,oFAAoF;YACpF,IAAK,IAAII,IAAI,GAAGA,IAAIJ,WAAWhB,MAAM,EAAEoB,IAAK;gBAC1CZ,UAAUa,OAAO,CAACb,UAAUI,IAAI,GAAG,GAAGI,UAAU,CAACI,EAAE;gBACnD,qCAAqC;gBACrC,IAAIZ,UAAUI,IAAI,IAAIJ,UAAUc,WAAW,EAAE;oBAC3Cd,UAAUI,IAAI,GAAG;gBACnB;YACF;YACA,sEAAsE;YACtE,yCAAyC;YACzCJ,UAAUK,UAAU,GAAGL,UAAUI,IAAI;YAErC,8FAA8F;YAC9FN,QAAQQ,SAAS,IAAIC;YAErB,wEAAwE;YACxET,QAAQiB,SAAS,GAAGP,UAAU,CAACA,WAAWhB,MAAM,GAAG,EAAE;YAErDK,UAAUU;QACZ,OAAO,IAAIJ,WAAW,MAAM;YAC1B,wBAAwB;YACxB,kCAAkC;YAClC,iCAAiC;YACjC,sEAAsE;YACtE,iDAAiD;YAEjD,2CAA2C;YAC3C,kEAAkE;YAClE,mCAAmC;YACnC,+CAA+C;YAC/C,4DAA4D;YAC5D,MAAMa,aAAab,WAAW;YAC9B,MAAMc,WAAWd,WAAW;YAC5B,MAAMe,YAAYf,WAAW;YAC7B,MAAMgB,eAAe,CAACH;YAEtB,sDAAsD;YACtD,IAAIE,WAAW;gBACblB,UAAUI,IAAI,GAAG;gBACjBJ,UAAUK,UAAU,GAAG;YACzB;YAEA,IAAIR,SAAS,IAAIR,MAAMG,MAAM,EAAE;gBAC7B,MAAM,IAAIT,MAAM;YAClB;YAEA,yEAAyE;YACzE,MAAMqC,aAAajB,UAAU;YAC7B,MAAMkB,cAAc,AAAC,CAAA,AAACD,cAAc,KAAO/B,KAAK,CAACQ,OAAO,IAAI,IAAKR,KAAK,CAACQ,SAAS,EAAE,AAAD,IAAK;YACtFA,UAAU;YAEV,+BAA+B;YAC/B,MAAMyB,WAAW,AAAC,CAAA,AAACjC,KAAK,CAACQ,OAAO,IAAI,IAAKR,KAAK,CAACQ,SAAS,EAAE,AAAD,IAAK;YAC9DA,UAAU;YAEV,iDAAiD;YACjD,IAAIoB,UAAU;gBACZ,IAAIpB,UAAUR,MAAMG,MAAM,EAAE;oBAC1B,MAAM,IAAIT,MAAM;gBAClB;gBACA,MAAMwC,YAAYlC,KAAK,CAACQ,SAAS;gBAEjC,yCAAyC;gBACzC,uCAAuC;gBACvC,MAAM2B,KAAKD,YAAY;gBACvB,MAAME,YAAYvC,KAAKC,KAAK,CAACoC,YAAY;gBACzC,MAAMG,KAAKD,YAAY;gBACvB,MAAME,KAAKzC,KAAKC,KAAK,CAACsC,YAAY;gBAElC,IAAI,CAAC3B,QAAQ8B,SAAS,CAACJ,IAAIE,IAAIC,KAAK;oBAClC,MAAM,IAAI5C,MAAM,CAAC,4BAA4B,EAAEyC,GAAG,IAAI,EAAEE,GAAG,IAAI,EAAEC,IAAI;gBACvE;gBACAzB,WAAW;YACb;YAEA,IAAI,CAACA,UAAU;gBACb,MAAM,IAAInB,MAAM;YAClB;YAEA,IAAIc,SAASyB,WAAWjC,MAAMG,MAAM,EAAE;gBACpC,MAAM,IAAIT,MAAM;YAClB;YAEA,oBAAoB;YACpB,MAAM8C,WAAWrD,kBAAkBa,OAAOQ,QAAQyB;YAClD,MAAMQ,YAAYrD,mBAAmB4C,cAAc,qCAAqC;YAExF,kFAAkF;YAClF,8EAA8E;YAC9E,gFAAgF;YAChF,IAAIL,cAAc,CAACE,WAAW;gBAC5BpB,QAAQiC,kBAAkB;gBAC1BjC,QAAQkC,QAAQ,CAAC,OAAO,+BAA+B;YACzD,OAAO;gBACLlC,QAAQkC,QAAQ,CAACb;YACnB;YAEA,mBAAmB;YACnB,MAAMc,UAAUnC,QAAQoC,IAAI,CAACL,UAAUC,WAAWT;YAClD,IAAI,CAACY,SAAS;gBACZ,MAAM,IAAIlD,MAAM;YAClB;YAEA,MAAMoD,cAAcL,UAAUM,QAAQ;YACtC,IAAI1C,cAAc;gBAChByC,YAAYzB,IAAI,CAAChB,cAAcC;gBAC/BA,aAAawC,YAAY3C,MAAM;YACjC,OAAO;gBACLI,yBAAAA,mCAAAA,aAAce,IAAI,CAACwB;YACrB;YAEAtC,UAAUyB;QACZ,OAAO;YACL,MAAM,IAAIvC,MAAM,CAAC,8BAA8B,EAAEoB,QAAQkC,QAAQ,CAAC,KAAK;QACzE;IACF;IAEA,qDAAqD;IACrD,IAAI3C,cAAc;QAChB,4DAA4D;QAC5D,OAAOC,YAAYD,aAAaF,MAAM,GAAGE,aAAae,KAAK,CAAC,GAAGd,aAAaD;IAC9E;IACA,OAAO4C,OAAOC,MAAM,CAAC3C;AACvB;AAEA;;CAEC,GACD,OAAO,SAAS4C,mBAAmBlD,UAAmB,EAAEC,UAAmB;IACzE,OAAOhB,uBAAuBa,aAAaE,YAAYC;AACzD"}
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Lzma2.ts"],"sourcesContent":["// LZMA2 codec using TypeScript LZMA decoder\n//\n// LZMA2 format specification:\n// https://github.com/ulikunitz/xz/blob/master/doc/LZMA2.md\n//\n// Control byte values:\n// 0x00 = End of stream\n// 0x01 = Uncompressed chunk, dictionary reset\n// 0x02 = Uncompressed chunk, no dictionary reset\n// 0x80-0xFF = LZMA compressed chunk (bits encode reset flags and size)\n\nimport type { Transform } from 'stream';\nimport { createLzma2Decoder as createLzma2Transform, decodeLzma2 as lzma2Decode } from '../../lzma/index.ts';\n\n/**\n * Decode LZMA2 compressed data to buffer\n *\n * @param input - LZMA2 compressed data\n * @param properties - Properties buffer (1 byte: dictionary size)\n * @param unpackSize - Expected output size (optional, for pre-allocation)\n * @returns Decompressed data\n */\nexport function decodeLzma2(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n return lzma2Decode(input, properties, unpackSize);\n}\n\n/**\n * Create an LZMA2 decoder Transform stream\n *\n * This is a true streaming decoder that processes LZMA2 chunks incrementally.\n * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).\n *\n * LZMA2 chunks are up to ~2MB uncompressed, so memory is bounded regardless of\n * total archive size.\n */\nexport function createLzma2Decoder(properties?: Buffer, _unpackSize?: number): Transform {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n return createLzma2Transform(properties) as Transform;\n}\n"],"names":["createLzma2Decoder","createLzma2Transform","decodeLzma2","lzma2Decode","input","properties","unpackSize","length","Error","_unpackSize"],"mappings":"AAAA,4CAA4C;AAC5C,EAAE;AACF,8BAA8B;AAC9B,2DAA2D;AAC3D,EAAE;AACF,uBAAuB;AACvB,+BAA+B;AAC/B,sDAAsD;AACtD,yDAAyD;AACzD,0EAA0E;AAG1E,SAASA,sBAAsBC,oBAAoB,EAAEC,eAAeC,WAAW,QAAQ,sBAAsB;AAE7G;;;;;;;CAOC,GACD,OAAO,SAASD,YAAYE,KAAa,EAAEC,UAAmB,EAAEC,UAAmB;IACjF,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,OAAOL,YAAYC,OAAOC,YAAYC;AACxC;AAEA;;;;;;;;CAQC,GACD,OAAO,SAASN,mBAAmBK,UAAmB,EAAEI,WAAoB;IAC1E,IAAI,CAACJ,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,OAAOP,qBAAqBI;AAC9B"}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { Transform } from 'extract-base-iterator';
|
|
2
2
|
export type DecodeFn = (input: Buffer, properties?: Buffer, unpackSize?: number) => Buffer;
|
|
3
3
|
/**
|
|
4
4
|
* Create a Transform stream that buffers all input, then decodes in flush
|
|
5
5
|
* This is the common pattern for codecs that can't stream (need full input)
|
|
6
6
|
*/
|
|
7
|
-
export default function createBufferingDecoder(decodeFn: DecodeFn, properties?: Buffer, unpackSize?: number):
|
|
7
|
+
export default function createBufferingDecoder(decodeFn: DecodeFn, properties?: Buffer, unpackSize?: number): InstanceType<typeof Transform>;
|
|
@@ -1,14 +1,6 @@
|
|
|
1
1
|
// Helper to create a Transform stream that buffers all input before decoding
|
|
2
2
|
// Used by codecs that need the full input before decompression (LZMA, LZMA2, BZip2, etc.)
|
|
3
|
-
import
|
|
4
|
-
// Use native streams when available, readable-stream only for Node 0.x
|
|
5
|
-
const major = +process.versions.node.split('.')[0];
|
|
6
|
-
let Transform;
|
|
7
|
-
if (major > 0) {
|
|
8
|
-
Transform = Stream.Transform;
|
|
9
|
-
} else {
|
|
10
|
-
Transform = require('readable-stream').Transform;
|
|
11
|
-
}
|
|
3
|
+
import { Transform } from 'extract-base-iterator';
|
|
12
4
|
/**
|
|
13
5
|
* Create a Transform stream that buffers all input, then decodes in flush
|
|
14
6
|
* This is the common pattern for codecs that can't stream (need full input)
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/createBufferingDecoder.ts"],"sourcesContent":["// Helper to create a Transform stream that buffers all input before decoding\n// Used by codecs that need the full input before decompression (LZMA, LZMA2, BZip2, etc.)\n\nimport
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/createBufferingDecoder.ts"],"sourcesContent":["// Helper to create a Transform stream that buffers all input before decoding\n// Used by codecs that need the full input before decompression (LZMA, LZMA2, BZip2, etc.)\n\nimport { Transform } from 'extract-base-iterator';\n\ntype TransformCallback = (error?: Error | null, data?: Buffer) => void;\n\nexport type DecodeFn = (input: Buffer, properties?: Buffer, unpackSize?: number) => Buffer;\n\n/**\n * Create a Transform stream that buffers all input, then decodes in flush\n * This is the common pattern for codecs that can't stream (need full input)\n */\nexport default function createBufferingDecoder(decodeFn: DecodeFn, properties?: Buffer, unpackSize?: number): InstanceType<typeof Transform> {\n const chunks: Buffer[] = [];\n\n return new Transform({\n transform: (chunk: Buffer, _encoding: string, callback: TransformCallback) => {\n chunks.push(chunk);\n callback();\n },\n flush: function (callback: TransformCallback) {\n try {\n const input = Buffer.concat(chunks);\n const output = decodeFn(input, properties, unpackSize);\n this.push(output);\n callback();\n } catch (err) {\n callback(err as Error);\n }\n },\n });\n}\n"],"names":["Transform","createBufferingDecoder","decodeFn","properties","unpackSize","chunks","transform","chunk","_encoding","callback","push","flush","input","Buffer","concat","output","err"],"mappings":"AAAA,6EAA6E;AAC7E,0FAA0F;AAE1F,SAASA,SAAS,QAAQ,wBAAwB;AAMlD;;;CAGC,GACD,eAAe,SAASC,uBAAuBC,QAAkB,EAAEC,UAAmB,EAAEC,UAAmB;IACzG,MAAMC,SAAmB,EAAE;IAE3B,OAAO,IAAIL,UAAU;QACnBM,WAAW,CAACC,OAAeC,WAAmBC;YAC5CJ,OAAOK,IAAI,CAACH;YACZE;QACF;QACAE,OAAO,SAAUF,QAA2B;YAC1C,IAAI;gBACF,MAAMG,QAAQC,OAAOC,MAAM,CAACT;gBAC5B,MAAMU,SAASb,SAASU,OAAOT,YAAYC;gBAC3C,IAAI,CAACM,IAAI,CAACK;gBACVN;YACF,EAAE,OAAOO,KAAK;gBACZP,SAASO;YACX;QACF;IACF;AACF"}
|
package/dist/esm/types.d.ts
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
export { DirectoryEntry, LinkEntry, Lock, SymbolicLinkEntry } from 'extract-base-iterator';
|
|
2
|
+
export { default as FileEntry } from './FileEntry.js';
|
|
3
|
+
export type { SevenZipEntry } from './sevenz/SevenZipParser.js';
|
|
2
4
|
import type { ExtractOptions as BaseExtractOptions, DirectoryEntry, LinkEntry, SymbolicLinkEntry } from 'extract-base-iterator';
|
|
3
5
|
import type FileEntry from './FileEntry.js';
|
|
4
6
|
export type Entry = DirectoryEntry | FileEntry | LinkEntry | SymbolicLinkEntry;
|
|
@@ -10,21 +12,5 @@ export interface ExtractOptions extends BaseExtractOptions {
|
|
|
10
12
|
* Password for encrypted archives
|
|
11
13
|
*/
|
|
12
14
|
password?: string;
|
|
13
|
-
/**
|
|
14
|
-
* Memory threshold in bytes for stream input.
|
|
15
|
-
* Archives smaller than this are buffered in memory for faster processing.
|
|
16
|
-
* Archives larger than this are written to a temp file.
|
|
17
|
-
* Default: 100 MB (100 * 1024 * 1024)
|
|
18
|
-
*/
|
|
19
|
-
memoryThreshold?: number;
|
|
20
|
-
}
|
|
21
|
-
export { default as FileEntry } from './FileEntry.js';
|
|
22
|
-
import type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.js';
|
|
23
|
-
export interface SevenZipFile {
|
|
24
|
-
getStream: () => NodeJS.ReadableStream;
|
|
25
|
-
}
|
|
26
|
-
export interface SevenZipFileIterator {
|
|
27
|
-
next: () => SevenZipEntry | null;
|
|
28
|
-
getParser: () => SevenZipParser;
|
|
29
15
|
}
|
|
30
16
|
export type EntryCallback = (error?: Error, result?: IteratorResult<Entry>) => void;
|
package/dist/esm/types.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/types.ts"],"sourcesContent":["export { DirectoryEntry, LinkEntry, Lock, SymbolicLinkEntry } from 'extract-base-iterator';\n\nimport type { ExtractOptions as BaseExtractOptions, DirectoryEntry, LinkEntry, SymbolicLinkEntry } from 'extract-base-iterator';\nimport type FileEntry from './FileEntry.ts';\n\n// 7z-specific Entry union type with 7z-specific FileEntry\nexport type Entry = DirectoryEntry | FileEntry | LinkEntry | SymbolicLinkEntry;\n\n/**\n * Options for SevenZipIterator\n */\nexport interface ExtractOptions extends BaseExtractOptions {\n /**\n * Password for encrypted archives\n */\n password?: string;\n
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/types.ts"],"sourcesContent":["export { DirectoryEntry, LinkEntry, Lock, SymbolicLinkEntry } from 'extract-base-iterator';\nexport { default as FileEntry } from './FileEntry.ts';\nexport type { SevenZipEntry } from './sevenz/SevenZipParser.ts';\n\nimport type { ExtractOptions as BaseExtractOptions, DirectoryEntry, LinkEntry, SymbolicLinkEntry } from 'extract-base-iterator';\nimport type FileEntry from './FileEntry.ts';\n\n// 7z-specific Entry union type with 7z-specific FileEntry\nexport type Entry = DirectoryEntry | FileEntry | LinkEntry | SymbolicLinkEntry;\n\n/**\n * Options for SevenZipIterator\n */\nexport interface ExtractOptions extends BaseExtractOptions {\n /**\n * Password for encrypted archives\n */\n password?: string;\n}\n\nexport type EntryCallback = (error?: Error, result?: IteratorResult<Entry>) => void;\n"],"names":["DirectoryEntry","LinkEntry","Lock","SymbolicLinkEntry","default","FileEntry"],"mappings":"AAAA,SAASA,cAAc,EAAEC,SAAS,EAAEC,IAAI,EAAEC,iBAAiB,QAAQ,wBAAwB;AAC3F,SAASC,WAAWC,SAAS,QAAQ,iBAAiB"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "7z-iterator",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.3.0",
|
|
4
4
|
"description": "Extract contents from 7z archives using an iterator API. Pure JavaScript, works on Node.js 0.8+",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"extract",
|
|
@@ -35,10 +35,10 @@
|
|
|
35
35
|
"source": "src/index.ts",
|
|
36
36
|
"types": "dist/cjs/index.d.ts",
|
|
37
37
|
"files": [
|
|
38
|
-
"dist"
|
|
39
|
-
"assets"
|
|
38
|
+
"dist"
|
|
40
39
|
],
|
|
41
40
|
"scripts": {
|
|
41
|
+
"benchmark:streaming": "node benchmark/streaming.cjs",
|
|
42
42
|
"build": "tsds build",
|
|
43
43
|
"format": "tsds format",
|
|
44
44
|
"prepublishOnly": "tsds validate",
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
BSD License
|
|
2
|
-
|
|
3
|
-
lzma-purejs - Pure JavaScript LZMA de/compression
|
|
4
|
-
https://github.com/cscott/lzma-purejs
|
|
5
|
-
|
|
6
|
-
Copyright (c) Gary Linscott, Juan Mellado, C. Scott Ananian
|
|
7
|
-
|
|
8
|
-
This vendored copy includes modifications to support LZMA2 solid mode
|
|
9
|
-
(setSolid method for state preservation across chunks).
|
|
10
|
-
|
|
11
|
-
Original package: https://www.npmjs.com/package/lzma-purejs
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
var LZ = require('./lib/LZ');
|
|
3
|
-
var LZMA = require('./lib/LZMA');
|
|
4
|
-
var RangeCoder = require('./lib/RangeCoder');
|
|
5
|
-
var Stream = require('./lib/Stream');
|
|
6
|
-
var Util = require('./lib/Util');
|
|
7
|
-
|
|
8
|
-
module.exports = {
|
|
9
|
-
version: "0.9.0",
|
|
10
|
-
LZ: LZ,
|
|
11
|
-
LZMA: LZMA,
|
|
12
|
-
RangeCoder: RangeCoder,
|
|
13
|
-
Stream: Stream,
|
|
14
|
-
Util: Util,
|
|
15
|
-
compress: Util.compress,
|
|
16
|
-
compressFile: Util.compressFile,
|
|
17
|
-
decompress: Util.decompress,
|
|
18
|
-
decompressFile: Util.decompressFile
|
|
19
|
-
};
|
|
@@ -1,78 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
var makeBuffer = require('../makeBuffer');
|
|
3
|
-
|
|
4
|
-
var OutWindow = function(){
|
|
5
|
-
this._windowSize = 0;
|
|
6
|
-
};
|
|
7
|
-
|
|
8
|
-
OutWindow.prototype.create = function(windowSize){
|
|
9
|
-
if ((!this._buffer) || (this._windowSize !== windowSize)){
|
|
10
|
-
this._buffer = makeBuffer(windowSize);
|
|
11
|
-
}
|
|
12
|
-
this._windowSize = windowSize;
|
|
13
|
-
this._pos = 0;
|
|
14
|
-
this._streamPos = 0;
|
|
15
|
-
};
|
|
16
|
-
|
|
17
|
-
OutWindow.prototype.flush = function(){
|
|
18
|
-
var size = this._pos - this._streamPos;
|
|
19
|
-
if (size !== 0){
|
|
20
|
-
while(size--){
|
|
21
|
-
this._stream.writeByte(this._buffer[this._streamPos++]);
|
|
22
|
-
}
|
|
23
|
-
if (this._pos >= this._windowSize){
|
|
24
|
-
this._pos = 0;
|
|
25
|
-
}
|
|
26
|
-
this._streamPos = this._pos;
|
|
27
|
-
}
|
|
28
|
-
};
|
|
29
|
-
|
|
30
|
-
OutWindow.prototype.releaseStream = function(){
|
|
31
|
-
this.flush();
|
|
32
|
-
this._stream = null;
|
|
33
|
-
};
|
|
34
|
-
|
|
35
|
-
OutWindow.prototype.setStream = function(stream){
|
|
36
|
-
this.releaseStream();
|
|
37
|
-
this._stream = stream;
|
|
38
|
-
};
|
|
39
|
-
|
|
40
|
-
OutWindow.prototype.init = function(solid){
|
|
41
|
-
if (!solid){
|
|
42
|
-
this._streamPos = 0;
|
|
43
|
-
this._pos = 0;
|
|
44
|
-
}
|
|
45
|
-
};
|
|
46
|
-
|
|
47
|
-
OutWindow.prototype.copyBlock = function(distance, len){
|
|
48
|
-
var pos = this._pos - distance - 1;
|
|
49
|
-
if (pos < 0){
|
|
50
|
-
pos += this._windowSize;
|
|
51
|
-
}
|
|
52
|
-
while(len--){
|
|
53
|
-
if (pos >= this._windowSize){
|
|
54
|
-
pos = 0;
|
|
55
|
-
}
|
|
56
|
-
this._buffer[this._pos++] = this._buffer[pos++];
|
|
57
|
-
if (this._pos >= this._windowSize){
|
|
58
|
-
this.flush();
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
};
|
|
62
|
-
|
|
63
|
-
OutWindow.prototype.putByte = function(b){
|
|
64
|
-
this._buffer[this._pos++] = b;
|
|
65
|
-
if (this._pos >= this._windowSize){
|
|
66
|
-
this.flush();
|
|
67
|
-
}
|
|
68
|
-
};
|
|
69
|
-
|
|
70
|
-
OutWindow.prototype.getByte = function(distance){
|
|
71
|
-
var pos = this._pos - distance - 1;
|
|
72
|
-
if (pos < 0){
|
|
73
|
-
pos += this._windowSize;
|
|
74
|
-
}
|
|
75
|
-
return this._buffer[pos];
|
|
76
|
-
};
|
|
77
|
-
|
|
78
|
-
module.exports = OutWindow;
|
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var Base = {};
|
|
4
|
-
Base.kNumRepDistances = 4;
|
|
5
|
-
Base.kNumStates = 12;
|
|
6
|
-
|
|
7
|
-
Base.stateInit = function() { return 0; };
|
|
8
|
-
Base.stateUpdateChar = function(index) {
|
|
9
|
-
if (index < 4) return 0;
|
|
10
|
-
if (index < 10) return index - 3;
|
|
11
|
-
return index - 6;
|
|
12
|
-
};
|
|
13
|
-
Base.stateUpdateMatch = function(index) { return (index < 7 ? 7 : 10); };
|
|
14
|
-
Base.stateUpdateRep = function(index) { return (index < 7 ? 8 : 11); };
|
|
15
|
-
Base.stateUpdateShortRep = function(index) { return (index < 7 ? 9 : 11); };
|
|
16
|
-
Base.stateIsCharState = function(index) { return index < 7; };
|
|
17
|
-
|
|
18
|
-
Base.kNumPosSlotBits = 6;
|
|
19
|
-
Base.kDicLogSizeMin = 0;
|
|
20
|
-
Base.kNumLenToPosStatesBits = 2;
|
|
21
|
-
Base.kNumLenToPosStates = 1 << Base.kNumLenToPosStatesBits;
|
|
22
|
-
Base.kMatchMinLen = 2;
|
|
23
|
-
Base.getLenToPosState = function(len) {
|
|
24
|
-
len -= Base.kMatchMinLen;
|
|
25
|
-
return len < Base.kNumLenToPosStates ? len : (Base.kNumLenToPosStates - 1);
|
|
26
|
-
};
|
|
27
|
-
Base.kNumAlignBits = 4;
|
|
28
|
-
Base.kAlignTableSize = 1 << Base.kNumAlignBits;
|
|
29
|
-
Base.kAlignMask = (Base.kAlignTableSize - 1);
|
|
30
|
-
Base.kStartPosModelIndex = 4;
|
|
31
|
-
Base.kEndPosModelIndex = 14;
|
|
32
|
-
Base.kNumPosModels = Base.kEndPosModelIndex - Base.kStartPosModelIndex;
|
|
33
|
-
Base.kNumFullDistances = 1 << (Base.kEndPosModelIndex / 2);
|
|
34
|
-
Base.kNumLitPosStatesBitsEncodingMax = 4;
|
|
35
|
-
Base.kNumLitContextBitsMax = 8;
|
|
36
|
-
Base.kNumPosStatesBitsMax = 4;
|
|
37
|
-
Base.kNumPosStatesMax = (1 << Base.kNumPosStatesBitsMax);
|
|
38
|
-
Base.kNumPosStatesBitsEncodingMax = 4;
|
|
39
|
-
Base.kNumPosStatesEncodingMax = (1 << Base.kNumPosStatesBitsEncodingMax);
|
|
40
|
-
Base.kNumLowLenBits = 3;
|
|
41
|
-
Base.kNumMidLenBits = 3;
|
|
42
|
-
Base.kNumHighLenBits = 8;
|
|
43
|
-
Base.kNumLowLenSymbols = 1 << Base.kNumLowLenBits;
|
|
44
|
-
Base.kNumMidLenSymbols = 1 << Base.kNumMidLenBits;
|
|
45
|
-
Base.kNumLenSymbols = Base.kNumLowLenSymbols + Base.kNumMidLenSymbols + (1 << Base.kNumHighLenBits);
|
|
46
|
-
Base.kMatchMaxLen = Base.kMatchMinLen + Base.kNumLenSymbols - 1;
|
|
47
|
-
|
|
48
|
-
module.exports = Base;
|