7z-iterator 1.4.0 → 2.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.d.cts +3 -3
- package/dist/cjs/index.d.ts +3 -3
- package/dist/cjs/index.js +7 -38
- package/dist/cjs/index.js.map +1 -1
- package/dist/cjs/nextEntry.js +1 -2
- package/dist/cjs/nextEntry.js.map +1 -1
- package/dist/cjs/sevenz/ArchiveSource.d.cts +1 -0
- package/dist/cjs/sevenz/ArchiveSource.d.ts +1 -0
- package/dist/cjs/sevenz/ArchiveSource.js +23 -0
- package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
- package/dist/cjs/sevenz/SevenZipParser.js +22 -3
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/index.js +52 -30
- package/dist/cjs/sevenz/codecs/index.js.map +1 -1
- package/dist/cjs/sevenz/constants.d.cts +1 -0
- package/dist/cjs/sevenz/constants.d.ts +1 -0
- package/dist/cjs/sevenz/constants.js +1 -0
- package/dist/cjs/sevenz/constants.js.map +1 -1
- package/dist/esm/index.d.ts +3 -3
- package/dist/esm/index.js +4 -3
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/nextEntry.js +1 -2
- package/dist/esm/nextEntry.js.map +1 -1
- package/dist/esm/sevenz/ArchiveSource.d.ts +1 -0
- package/dist/esm/sevenz/ArchiveSource.js +23 -0
- package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
- package/dist/esm/sevenz/SevenZipParser.js +22 -3
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/index.js +32 -10
- package/dist/esm/sevenz/codecs/index.js.map +1 -1
- package/dist/esm/sevenz/constants.d.ts +1 -0
- package/dist/esm/sevenz/constants.js +1 -0
- package/dist/esm/sevenz/constants.js.map +1 -1
- package/package.json +3 -3
- package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +0 -73
- package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +0 -73
- package/dist/cjs/lzma/Lzma2ChunkParser.js +0 -148
- package/dist/cjs/lzma/Lzma2ChunkParser.js.map +0 -1
- package/dist/cjs/lzma/index.d.cts +0 -31
- package/dist/cjs/lzma/index.d.ts +0 -31
- package/dist/cjs/lzma/index.js +0 -83
- package/dist/cjs/lzma/index.js.map +0 -1
- package/dist/cjs/lzma/stream/transforms.d.cts +0 -46
- package/dist/cjs/lzma/stream/transforms.d.ts +0 -46
- package/dist/cjs/lzma/stream/transforms.js +0 -209
- package/dist/cjs/lzma/stream/transforms.js.map +0 -1
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +0 -63
- package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +0 -63
- package/dist/cjs/lzma/sync/Lzma2Decoder.js +0 -231
- package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +0 -1
- package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +0 -97
- package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +0 -97
- package/dist/cjs/lzma/sync/LzmaDecoder.js +0 -580
- package/dist/cjs/lzma/sync/LzmaDecoder.js.map +0 -1
- package/dist/cjs/lzma/sync/RangeDecoder.d.cts +0 -69
- package/dist/cjs/lzma/sync/RangeDecoder.d.ts +0 -69
- package/dist/cjs/lzma/sync/RangeDecoder.js +0 -162
- package/dist/cjs/lzma/sync/RangeDecoder.js.map +0 -1
- package/dist/cjs/lzma/types.d.cts +0 -117
- package/dist/cjs/lzma/types.d.ts +0 -117
- package/dist/cjs/lzma/types.js +0 -264
- package/dist/cjs/lzma/types.js.map +0 -1
- package/dist/cjs/sevenz/codecs/Bcj.d.cts +0 -16
- package/dist/cjs/sevenz/codecs/Bcj.d.ts +0 -16
- package/dist/cjs/sevenz/codecs/Bcj.js +0 -183
- package/dist/cjs/sevenz/codecs/Bcj.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjArm.d.cts +0 -21
- package/dist/cjs/sevenz/codecs/BcjArm.d.ts +0 -21
- package/dist/cjs/sevenz/codecs/BcjArm.js +0 -104
- package/dist/cjs/sevenz/codecs/BcjArm.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjArm64.d.cts +0 -21
- package/dist/cjs/sevenz/codecs/BcjArm64.d.ts +0 -21
- package/dist/cjs/sevenz/codecs/BcjArm64.js +0 -65
- package/dist/cjs/sevenz/codecs/BcjArm64.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjArmt.d.cts +0 -19
- package/dist/cjs/sevenz/codecs/BcjArmt.d.ts +0 -19
- package/dist/cjs/sevenz/codecs/BcjArmt.js +0 -76
- package/dist/cjs/sevenz/codecs/BcjArmt.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjIa64.d.cts +0 -15
- package/dist/cjs/sevenz/codecs/BcjIa64.d.ts +0 -15
- package/dist/cjs/sevenz/codecs/BcjIa64.js +0 -141
- package/dist/cjs/sevenz/codecs/BcjIa64.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjPpc.d.cts +0 -20
- package/dist/cjs/sevenz/codecs/BcjPpc.d.ts +0 -20
- package/dist/cjs/sevenz/codecs/BcjPpc.js +0 -64
- package/dist/cjs/sevenz/codecs/BcjPpc.js.map +0 -1
- package/dist/cjs/sevenz/codecs/BcjSparc.d.cts +0 -19
- package/dist/cjs/sevenz/codecs/BcjSparc.d.ts +0 -19
- package/dist/cjs/sevenz/codecs/BcjSparc.js +0 -69
- package/dist/cjs/sevenz/codecs/BcjSparc.js.map +0 -1
- package/dist/cjs/sevenz/codecs/Delta.d.cts +0 -16
- package/dist/cjs/sevenz/codecs/Delta.d.ts +0 -16
- package/dist/cjs/sevenz/codecs/Delta.js +0 -74
- package/dist/cjs/sevenz/codecs/Delta.js.map +0 -1
- package/dist/cjs/sevenz/codecs/Lzma.d.cts +0 -17
- package/dist/cjs/sevenz/codecs/Lzma.d.ts +0 -17
- package/dist/cjs/sevenz/codecs/Lzma.js +0 -40
- package/dist/cjs/sevenz/codecs/Lzma.js.map +0 -1
- package/dist/cjs/sevenz/codecs/Lzma2.d.cts +0 -20
- package/dist/cjs/sevenz/codecs/Lzma2.d.ts +0 -20
- package/dist/cjs/sevenz/codecs/Lzma2.js +0 -42
- package/dist/cjs/sevenz/codecs/Lzma2.js.map +0 -1
- package/dist/cjs/xz/Decoder.d.cts +0 -25
- package/dist/cjs/xz/Decoder.d.ts +0 -25
- package/dist/cjs/xz/Decoder.js +0 -194
- package/dist/cjs/xz/Decoder.js.map +0 -1
- package/dist/esm/lzma/Lzma2ChunkParser.d.ts +0 -73
- package/dist/esm/lzma/Lzma2ChunkParser.js +0 -137
- package/dist/esm/lzma/Lzma2ChunkParser.js.map +0 -1
- package/dist/esm/lzma/index.d.ts +0 -31
- package/dist/esm/lzma/index.js +0 -44
- package/dist/esm/lzma/index.js.map +0 -1
- package/dist/esm/lzma/stream/transforms.d.ts +0 -46
- package/dist/esm/lzma/stream/transforms.js +0 -189
- package/dist/esm/lzma/stream/transforms.js.map +0 -1
- package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +0 -63
- package/dist/esm/lzma/sync/Lzma2Decoder.js +0 -211
- package/dist/esm/lzma/sync/Lzma2Decoder.js.map +0 -1
- package/dist/esm/lzma/sync/LzmaDecoder.d.ts +0 -97
- package/dist/esm/lzma/sync/LzmaDecoder.js +0 -543
- package/dist/esm/lzma/sync/LzmaDecoder.js.map +0 -1
- package/dist/esm/lzma/sync/RangeDecoder.d.ts +0 -69
- package/dist/esm/lzma/sync/RangeDecoder.js +0 -132
- package/dist/esm/lzma/sync/RangeDecoder.js.map +0 -1
- package/dist/esm/lzma/types.d.ts +0 -117
- package/dist/esm/lzma/types.js +0 -154
- package/dist/esm/lzma/types.js.map +0 -1
- package/dist/esm/sevenz/codecs/Bcj.d.ts +0 -16
- package/dist/esm/sevenz/codecs/Bcj.js +0 -175
- package/dist/esm/sevenz/codecs/Bcj.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjArm.d.ts +0 -21
- package/dist/esm/sevenz/codecs/BcjArm.js +0 -101
- package/dist/esm/sevenz/codecs/BcjArm.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjArm64.d.ts +0 -21
- package/dist/esm/sevenz/codecs/BcjArm64.js +0 -57
- package/dist/esm/sevenz/codecs/BcjArm64.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjArmt.d.ts +0 -19
- package/dist/esm/sevenz/codecs/BcjArmt.js +0 -66
- package/dist/esm/sevenz/codecs/BcjArmt.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjIa64.d.ts +0 -15
- package/dist/esm/sevenz/codecs/BcjIa64.js +0 -127
- package/dist/esm/sevenz/codecs/BcjIa64.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjPpc.d.ts +0 -20
- package/dist/esm/sevenz/codecs/BcjPpc.js +0 -55
- package/dist/esm/sevenz/codecs/BcjPpc.js.map +0 -1
- package/dist/esm/sevenz/codecs/BcjSparc.d.ts +0 -19
- package/dist/esm/sevenz/codecs/BcjSparc.js +0 -59
- package/dist/esm/sevenz/codecs/BcjSparc.js.map +0 -1
- package/dist/esm/sevenz/codecs/Delta.d.ts +0 -16
- package/dist/esm/sevenz/codecs/Delta.js +0 -66
- package/dist/esm/sevenz/codecs/Delta.js.map +0 -1
- package/dist/esm/sevenz/codecs/Lzma.d.ts +0 -17
- package/dist/esm/sevenz/codecs/Lzma.js +0 -33
- package/dist/esm/sevenz/codecs/Lzma.js.map +0 -1
- package/dist/esm/sevenz/codecs/Lzma2.d.ts +0 -20
- package/dist/esm/sevenz/codecs/Lzma2.js +0 -38
- package/dist/esm/sevenz/codecs/Lzma2.js.map +0 -1
- package/dist/esm/xz/Decoder.d.ts +0 -25
- package/dist/esm/xz/Decoder.js +0 -185
- package/dist/esm/xz/Decoder.js.map +0 -1
|
@@ -1,64 +0,0 @@
|
|
|
1
|
-
// BCJ (PowerPC) filter codec - converts PowerPC branch instruction addresses
|
|
2
|
-
// This filter makes PowerPC executables more compressible by LZMA
|
|
3
|
-
//
|
|
4
|
-
// PowerPC is big-endian. Branch instructions use 26-bit signed offsets.
|
|
5
|
-
//
|
|
6
|
-
// Reference: https://github.com/kornelski/7z/blob/main/C/Bra.c
|
|
7
|
-
"use strict";
|
|
8
|
-
Object.defineProperty(exports, "__esModule", {
|
|
9
|
-
value: true
|
|
10
|
-
});
|
|
11
|
-
function _export(target, all) {
|
|
12
|
-
for(var name in all)Object.defineProperty(target, name, {
|
|
13
|
-
enumerable: true,
|
|
14
|
-
get: Object.getOwnPropertyDescriptor(all, name).get
|
|
15
|
-
});
|
|
16
|
-
}
|
|
17
|
-
_export(exports, {
|
|
18
|
-
get createBcjPpcDecoder () {
|
|
19
|
-
return createBcjPpcDecoder;
|
|
20
|
-
},
|
|
21
|
-
get decodeBcjPpc () {
|
|
22
|
-
return decodeBcjPpc;
|
|
23
|
-
}
|
|
24
|
-
});
|
|
25
|
-
var _extractbaseiterator = require("extract-base-iterator");
|
|
26
|
-
var _createBufferingDecoderts = /*#__PURE__*/ _interop_require_default(require("./createBufferingDecoder.js"));
|
|
27
|
-
function _interop_require_default(obj) {
|
|
28
|
-
return obj && obj.__esModule ? obj : {
|
|
29
|
-
default: obj
|
|
30
|
-
};
|
|
31
|
-
}
|
|
32
|
-
function decodeBcjPpc(input, _properties, _unpackSize) {
|
|
33
|
-
var output = (0, _extractbaseiterator.bufferFrom)(input); // Copy since we modify in place
|
|
34
|
-
var pos = 0;
|
|
35
|
-
// Process 4-byte aligned positions
|
|
36
|
-
while(pos + 4 <= output.length){
|
|
37
|
-
// Read 32-bit value (big-endian)
|
|
38
|
-
var instr = output[pos] << 24 | output[pos + 1] << 16 | output[pos + 2] << 8 | output[pos + 3];
|
|
39
|
-
// Check for B/BL instruction: (instr & 0xFC000003) === 0x48000001
|
|
40
|
-
if ((instr & 0xfc000003) === 0x48000001) {
|
|
41
|
-
// Extract 26-bit offset (bits 2-27, the LI field)
|
|
42
|
-
var addr = instr & 0x03fffffc;
|
|
43
|
-
// Sign-extend 26-bit to 32-bit
|
|
44
|
-
if (addr & 0x02000000) {
|
|
45
|
-
addr |= 0xfc000000;
|
|
46
|
-
}
|
|
47
|
-
// Convert absolute to relative: subtract current position
|
|
48
|
-
var relAddr = addr - pos;
|
|
49
|
-
// Clear old offset and write new one
|
|
50
|
-
instr = instr & 0xfc000003 | relAddr & 0x03fffffc;
|
|
51
|
-
// Write back (big-endian)
|
|
52
|
-
output[pos] = instr >>> 24 & 0xff;
|
|
53
|
-
output[pos + 1] = instr >>> 16 & 0xff;
|
|
54
|
-
output[pos + 2] = instr >>> 8 & 0xff;
|
|
55
|
-
output[pos + 3] = instr & 0xff;
|
|
56
|
-
}
|
|
57
|
-
pos += 4;
|
|
58
|
-
}
|
|
59
|
-
return output;
|
|
60
|
-
}
|
|
61
|
-
function createBcjPpcDecoder(properties, unpackSize) {
|
|
62
|
-
return (0, _createBufferingDecoderts.default)(decodeBcjPpc, properties, unpackSize);
|
|
63
|
-
}
|
|
64
|
-
/* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/BcjPpc.ts"],"sourcesContent":["// BCJ (PowerPC) filter codec - converts PowerPC branch instruction addresses\n// This filter makes PowerPC executables more compressible by LZMA\n//\n// PowerPC is big-endian. Branch instructions use 26-bit signed offsets.\n//\n// Reference: https://github.com/kornelski/7z/blob/main/C/Bra.c\n\nimport { bufferFrom } from 'extract-base-iterator';\nimport type { Transform } from 'stream';\nimport createBufferingDecoder from './createBufferingDecoder.ts';\n\n/**\n * Decode PowerPC BCJ filtered data\n * Reverses the BCJ transformation by converting absolute addresses back to relative\n *\n * PowerPC B/BL instruction format (big-endian):\n * - 4 bytes aligned\n * - Opcode 0x48 in high byte with AA=0, LK=1 (0x48000001 mask 0xFC000003)\n * - Bits 6-29 are 24-bit signed offset (in words)\n *\n * @param input - PowerPC BCJ filtered data\n * @param _properties - Unused for PowerPC BCJ\n * @param _unpackSize - Unused for PowerPC BCJ\n * @returns Unfiltered data\n */\nexport function decodeBcjPpc(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer {\n const output = bufferFrom(input); // Copy since we modify in place\n let pos = 0;\n\n // Process 4-byte aligned positions\n while (pos + 4 <= output.length) {\n // Read 32-bit value (big-endian)\n let instr = (output[pos] << 24) | (output[pos + 1] << 16) | (output[pos + 2] << 8) | output[pos + 3];\n\n // Check for B/BL instruction: (instr & 0xFC000003) === 0x48000001\n if ((instr & 0xfc000003) === 0x48000001) {\n // Extract 26-bit offset (bits 2-27, the LI field)\n let addr = instr & 0x03fffffc;\n\n // Sign-extend 26-bit to 32-bit\n if (addr & 0x02000000) {\n addr |= 0xfc000000;\n }\n\n // Convert absolute to relative: subtract current position\n const relAddr = addr - pos;\n\n // Clear old offset and write new one\n instr = (instr & 0xfc000003) | (relAddr & 0x03fffffc);\n\n // Write back (big-endian)\n output[pos] = (instr >>> 24) & 0xff;\n output[pos + 1] = (instr >>> 16) & 0xff;\n output[pos + 2] = (instr >>> 8) & 0xff;\n output[pos + 3] = instr & 0xff;\n }\n pos += 4;\n }\n\n return output;\n}\n\n/**\n * Create a PowerPC BCJ decoder Transform stream\n */\nexport function createBcjPpcDecoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeBcjPpc, properties, unpackSize);\n}\n"],"names":["createBcjPpcDecoder","decodeBcjPpc","input","_properties","_unpackSize","output","bufferFrom","pos","length","instr","addr","relAddr","properties","unpackSize","createBufferingDecoder"],"mappings":"AAAA,6EAA6E;AAC7E,kEAAkE;AAClE,EAAE;AACF,wEAAwE;AACxE,EAAE;AACF,+DAA+D;;;;;;;;;;;;QA4D/CA;eAAAA;;QAxCAC;eAAAA;;;mCAlBW;+EAEQ;;;;;;AAgB5B,SAASA,aAAaC,KAAa,EAAEC,WAAoB,EAAEC,WAAoB;IACpF,IAAMC,SAASC,IAAAA,+BAAU,EAACJ,QAAQ,gCAAgC;IAClE,IAAIK,MAAM;IAEV,mCAAmC;IACnC,MAAOA,MAAM,KAAKF,OAAOG,MAAM,CAAE;QAC/B,iCAAiC;QACjC,IAAIC,QAAQ,AAACJ,MAAM,CAACE,IAAI,IAAI,KAAOF,MAAM,CAACE,MAAM,EAAE,IAAI,KAAOF,MAAM,CAACE,MAAM,EAAE,IAAI,IAAKF,MAAM,CAACE,MAAM,EAAE;QAEpG,kEAAkE;QAClE,IAAI,AAACE,CAAAA,QAAQ,UAAS,MAAO,YAAY;YACvC,kDAAkD;YAClD,IAAIC,OAAOD,QAAQ;YAEnB,+BAA+B;YAC/B,IAAIC,OAAO,YAAY;gBACrBA,QAAQ;YACV;YAEA,0DAA0D;YAC1D,IAAMC,UAAUD,OAAOH;YAEvB,qCAAqC;YACrCE,QAAQ,AAACA,QAAQ,aAAeE,UAAU;YAE1C,0BAA0B;YAC1BN,MAAM,CAACE,IAAI,GAAG,AAACE,UAAU,KAAM;YAC/BJ,MAAM,CAACE,MAAM,EAAE,GAAG,AAACE,UAAU,KAAM;YACnCJ,MAAM,CAACE,MAAM,EAAE,GAAG,AAACE,UAAU,IAAK;YAClCJ,MAAM,CAACE,MAAM,EAAE,GAAGE,QAAQ;QAC5B;QACAF,OAAO;IACT;IAEA,OAAOF;AACT;AAKO,SAASL,oBAAoBY,UAAmB,EAAEC,UAAmB;IAC1E,OAAOC,IAAAA,iCAAsB,EAACb,cAAcW,YAAYC;AAC1D"}
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
import type { Transform } from 'stream';
|
|
2
|
-
/**
|
|
3
|
-
* Decode SPARC BCJ filtered data
|
|
4
|
-
* Reverses the BCJ transformation by converting absolute addresses back to relative
|
|
5
|
-
*
|
|
6
|
-
* SPARC CALL instruction matching (big-endian):
|
|
7
|
-
* - First byte 0x40 and (second byte & 0xC0) == 0x00, OR
|
|
8
|
-
* - First byte 0x7F and (second byte & 0xC0) == 0xC0
|
|
9
|
-
*
|
|
10
|
-
* @param input - SPARC BCJ filtered data
|
|
11
|
-
* @param _properties - Unused for SPARC BCJ
|
|
12
|
-
* @param _unpackSize - Unused for SPARC BCJ
|
|
13
|
-
* @returns Unfiltered data
|
|
14
|
-
*/
|
|
15
|
-
export declare function decodeBcjSparc(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer;
|
|
16
|
-
/**
|
|
17
|
-
* Create a SPARC BCJ decoder Transform stream
|
|
18
|
-
*/
|
|
19
|
-
export declare function createBcjSparcDecoder(properties?: Buffer, unpackSize?: number): Transform;
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
import type { Transform } from 'stream';
|
|
2
|
-
/**
|
|
3
|
-
* Decode SPARC BCJ filtered data
|
|
4
|
-
* Reverses the BCJ transformation by converting absolute addresses back to relative
|
|
5
|
-
*
|
|
6
|
-
* SPARC CALL instruction matching (big-endian):
|
|
7
|
-
* - First byte 0x40 and (second byte & 0xC0) == 0x00, OR
|
|
8
|
-
* - First byte 0x7F and (second byte & 0xC0) == 0xC0
|
|
9
|
-
*
|
|
10
|
-
* @param input - SPARC BCJ filtered data
|
|
11
|
-
* @param _properties - Unused for SPARC BCJ
|
|
12
|
-
* @param _unpackSize - Unused for SPARC BCJ
|
|
13
|
-
* @returns Unfiltered data
|
|
14
|
-
*/
|
|
15
|
-
export declare function decodeBcjSparc(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer;
|
|
16
|
-
/**
|
|
17
|
-
* Create a SPARC BCJ decoder Transform stream
|
|
18
|
-
*/
|
|
19
|
-
export declare function createBcjSparcDecoder(properties?: Buffer, unpackSize?: number): Transform;
|
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
// BCJ (SPARC) filter codec - converts SPARC branch instruction addresses
|
|
2
|
-
// This filter makes SPARC executables more compressible by LZMA
|
|
3
|
-
//
|
|
4
|
-
// SPARC is big-endian. CALL instructions use 30-bit signed offsets.
|
|
5
|
-
// The filter only transforms CALL instructions with specific byte patterns.
|
|
6
|
-
//
|
|
7
|
-
// Reference: https://github.com/kornelski/7z/blob/main/C/Bra.c
|
|
8
|
-
"use strict";
|
|
9
|
-
Object.defineProperty(exports, "__esModule", {
|
|
10
|
-
value: true
|
|
11
|
-
});
|
|
12
|
-
function _export(target, all) {
|
|
13
|
-
for(var name in all)Object.defineProperty(target, name, {
|
|
14
|
-
enumerable: true,
|
|
15
|
-
get: Object.getOwnPropertyDescriptor(all, name).get
|
|
16
|
-
});
|
|
17
|
-
}
|
|
18
|
-
_export(exports, {
|
|
19
|
-
get createBcjSparcDecoder () {
|
|
20
|
-
return createBcjSparcDecoder;
|
|
21
|
-
},
|
|
22
|
-
get decodeBcjSparc () {
|
|
23
|
-
return decodeBcjSparc;
|
|
24
|
-
}
|
|
25
|
-
});
|
|
26
|
-
var _extractbaseiterator = require("extract-base-iterator");
|
|
27
|
-
var _createBufferingDecoderts = /*#__PURE__*/ _interop_require_default(require("./createBufferingDecoder.js"));
|
|
28
|
-
function _interop_require_default(obj) {
|
|
29
|
-
return obj && obj.__esModule ? obj : {
|
|
30
|
-
default: obj
|
|
31
|
-
};
|
|
32
|
-
}
|
|
33
|
-
function decodeBcjSparc(input, _properties, _unpackSize) {
|
|
34
|
-
var output = (0, _extractbaseiterator.bufferFrom)(input); // Copy since we modify in place
|
|
35
|
-
var pos = 0;
|
|
36
|
-
// Process 4-byte aligned positions
|
|
37
|
-
while(pos + 4 <= output.length){
|
|
38
|
-
var b0 = output[pos];
|
|
39
|
-
var b1 = output[pos + 1];
|
|
40
|
-
// Check for CALL instruction with specific byte patterns:
|
|
41
|
-
// (b0 == 0x40 && (b1 & 0xC0) == 0x00) || (b0 == 0x7F && (b1 & 0xC0) == 0xC0)
|
|
42
|
-
if (b0 === 0x40 && (b1 & 0xc0) === 0x00 || b0 === 0x7f && (b1 & 0xc0) === 0xc0) {
|
|
43
|
-
// Read 32-bit value (big-endian)
|
|
44
|
-
var src = b0 << 24 | b1 << 16 | output[pos + 2] << 8 | output[pos + 3];
|
|
45
|
-
// Shift left by 2 (multiply by 4 for word addressing)
|
|
46
|
-
src <<= 2;
|
|
47
|
-
// Decoding: subtract position
|
|
48
|
-
var dest = src - pos;
|
|
49
|
-
// Shift right by 2
|
|
50
|
-
dest >>>= 2;
|
|
51
|
-
// Reconstruct with sign extension and opcode
|
|
52
|
-
// (((0 - ((dest >> 22) & 1)) << 22) & 0x3FFFFFFF) | (dest & 0x3FFFFF) | 0x40000000
|
|
53
|
-
var signBit = dest >>> 22 & 1;
|
|
54
|
-
var signExtend = signBit ? 0x3fc00000 : 0;
|
|
55
|
-
dest = signExtend | dest & 0x3fffff | 0x40000000;
|
|
56
|
-
// Write back (big-endian)
|
|
57
|
-
output[pos] = dest >>> 24 & 0xff;
|
|
58
|
-
output[pos + 1] = dest >>> 16 & 0xff;
|
|
59
|
-
output[pos + 2] = dest >>> 8 & 0xff;
|
|
60
|
-
output[pos + 3] = dest & 0xff;
|
|
61
|
-
}
|
|
62
|
-
pos += 4;
|
|
63
|
-
}
|
|
64
|
-
return output;
|
|
65
|
-
}
|
|
66
|
-
function createBcjSparcDecoder(properties, unpackSize) {
|
|
67
|
-
return (0, _createBufferingDecoderts.default)(decodeBcjSparc, properties, unpackSize);
|
|
68
|
-
}
|
|
69
|
-
/* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/BcjSparc.ts"],"sourcesContent":["// BCJ (SPARC) filter codec - converts SPARC branch instruction addresses\n// This filter makes SPARC executables more compressible by LZMA\n//\n// SPARC is big-endian. CALL instructions use 30-bit signed offsets.\n// The filter only transforms CALL instructions with specific byte patterns.\n//\n// Reference: https://github.com/kornelski/7z/blob/main/C/Bra.c\n\nimport { bufferFrom } from 'extract-base-iterator';\nimport type { Transform } from 'stream';\nimport createBufferingDecoder from './createBufferingDecoder.ts';\n\n/**\n * Decode SPARC BCJ filtered data\n * Reverses the BCJ transformation by converting absolute addresses back to relative\n *\n * SPARC CALL instruction matching (big-endian):\n * - First byte 0x40 and (second byte & 0xC0) == 0x00, OR\n * - First byte 0x7F and (second byte & 0xC0) == 0xC0\n *\n * @param input - SPARC BCJ filtered data\n * @param _properties - Unused for SPARC BCJ\n * @param _unpackSize - Unused for SPARC BCJ\n * @returns Unfiltered data\n */\nexport function decodeBcjSparc(input: Buffer, _properties?: Buffer, _unpackSize?: number): Buffer {\n const output = bufferFrom(input); // Copy since we modify in place\n let pos = 0;\n\n // Process 4-byte aligned positions\n while (pos + 4 <= output.length) {\n const b0 = output[pos];\n const b1 = output[pos + 1];\n\n // Check for CALL instruction with specific byte patterns:\n // (b0 == 0x40 && (b1 & 0xC0) == 0x00) || (b0 == 0x7F && (b1 & 0xC0) == 0xC0)\n if ((b0 === 0x40 && (b1 & 0xc0) === 0x00) || (b0 === 0x7f && (b1 & 0xc0) === 0xc0)) {\n // Read 32-bit value (big-endian)\n let src = (b0 << 24) | (b1 << 16) | (output[pos + 2] << 8) | output[pos + 3];\n\n // Shift left by 2 (multiply by 4 for word addressing)\n src <<= 2;\n\n // Decoding: subtract position\n let dest = src - pos;\n\n // Shift right by 2\n dest >>>= 2;\n\n // Reconstruct with sign extension and opcode\n // (((0 - ((dest >> 22) & 1)) << 22) & 0x3FFFFFFF) | (dest & 0x3FFFFF) | 0x40000000\n const signBit = (dest >>> 22) & 1;\n const signExtend = signBit ? 0x3fc00000 : 0;\n dest = signExtend | (dest & 0x3fffff) | 0x40000000;\n\n // Write back (big-endian)\n output[pos] = (dest >>> 24) & 0xff;\n output[pos + 1] = (dest >>> 16) & 0xff;\n output[pos + 2] = (dest >>> 8) & 0xff;\n output[pos + 3] = dest & 0xff;\n }\n\n pos += 4;\n }\n\n return output;\n}\n\n/**\n * Create a SPARC BCJ decoder Transform stream\n */\nexport function createBcjSparcDecoder(properties?: Buffer, unpackSize?: number): Transform {\n return createBufferingDecoder(decodeBcjSparc, properties, unpackSize);\n}\n"],"names":["createBcjSparcDecoder","decodeBcjSparc","input","_properties","_unpackSize","output","bufferFrom","pos","length","b0","b1","src","dest","signBit","signExtend","properties","unpackSize","createBufferingDecoder"],"mappings":"AAAA,yEAAyE;AACzE,gEAAgE;AAChE,EAAE;AACF,oEAAoE;AACpE,4EAA4E;AAC5E,EAAE;AACF,+DAA+D;;;;;;;;;;;;QAiE/CA;eAAAA;;QA9CAC;eAAAA;;;mCAjBW;+EAEQ;;;;;;AAe5B,SAASA,eAAeC,KAAa,EAAEC,WAAoB,EAAEC,WAAoB;IACtF,IAAMC,SAASC,IAAAA,+BAAU,EAACJ,QAAQ,gCAAgC;IAClE,IAAIK,MAAM;IAEV,mCAAmC;IACnC,MAAOA,MAAM,KAAKF,OAAOG,MAAM,CAAE;QAC/B,IAAMC,KAAKJ,MAAM,CAACE,IAAI;QACtB,IAAMG,KAAKL,MAAM,CAACE,MAAM,EAAE;QAE1B,0DAA0D;QAC1D,6EAA6E;QAC7E,IAAI,AAACE,OAAO,QAAQ,AAACC,CAAAA,KAAK,IAAG,MAAO,QAAUD,OAAO,QAAQ,AAACC,CAAAA,KAAK,IAAG,MAAO,MAAO;YAClF,iCAAiC;YACjC,IAAIC,MAAM,AAACF,MAAM,KAAOC,MAAM,KAAOL,MAAM,CAACE,MAAM,EAAE,IAAI,IAAKF,MAAM,CAACE,MAAM,EAAE;YAE5E,sDAAsD;YACtDI,QAAQ;YAER,8BAA8B;YAC9B,IAAIC,OAAOD,MAAMJ;YAEjB,mBAAmB;YACnBK,UAAU;YAEV,6CAA6C;YAC7C,mFAAmF;YACnF,IAAMC,UAAU,AAACD,SAAS,KAAM;YAChC,IAAME,aAAaD,UAAU,aAAa;YAC1CD,OAAOE,aAAcF,OAAO,WAAY;YAExC,0BAA0B;YAC1BP,MAAM,CAACE,IAAI,GAAG,AAACK,SAAS,KAAM;YAC9BP,MAAM,CAACE,MAAM,EAAE,GAAG,AAACK,SAAS,KAAM;YAClCP,MAAM,CAACE,MAAM,EAAE,GAAG,AAACK,SAAS,IAAK;YACjCP,MAAM,CAACE,MAAM,EAAE,GAAGK,OAAO;QAC3B;QAEAL,OAAO;IACT;IAEA,OAAOF;AACT;AAKO,SAASL,sBAAsBe,UAAmB,EAAEC,UAAmB;IAC5E,OAAOC,IAAAA,iCAAsB,EAAChB,gBAAgBc,YAAYC;AAC5D"}
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import { Transform } from 'extract-base-iterator';
|
|
2
|
-
/**
|
|
3
|
-
* Decode Delta filtered data (synchronous, for buffered use)
|
|
4
|
-
* Reverses the delta transformation by adding previous values
|
|
5
|
-
*
|
|
6
|
-
* @param input - Delta filtered data
|
|
7
|
-
* @param properties - Optional 1-byte properties (distance - 1)
|
|
8
|
-
* @param _unpackSize - Unused for Delta
|
|
9
|
-
* @returns Unfiltered data
|
|
10
|
-
*/
|
|
11
|
-
export declare function decodeDelta(input: Buffer, properties?: Buffer, _unpackSize?: number): Buffer;
|
|
12
|
-
/**
|
|
13
|
-
* Create a streaming Delta decoder Transform.
|
|
14
|
-
* Processes data chunk by chunk, maintaining state between chunks.
|
|
15
|
-
*/
|
|
16
|
-
export declare function createDeltaDecoder(properties?: Buffer, _unpackSize?: number): InstanceType<typeof Transform>;
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import { Transform } from 'extract-base-iterator';
|
|
2
|
-
/**
|
|
3
|
-
* Decode Delta filtered data (synchronous, for buffered use)
|
|
4
|
-
* Reverses the delta transformation by adding previous values
|
|
5
|
-
*
|
|
6
|
-
* @param input - Delta filtered data
|
|
7
|
-
* @param properties - Optional 1-byte properties (distance - 1)
|
|
8
|
-
* @param _unpackSize - Unused for Delta
|
|
9
|
-
* @returns Unfiltered data
|
|
10
|
-
*/
|
|
11
|
-
export declare function decodeDelta(input: Buffer, properties?: Buffer, _unpackSize?: number): Buffer;
|
|
12
|
-
/**
|
|
13
|
-
* Create a streaming Delta decoder Transform.
|
|
14
|
-
* Processes data chunk by chunk, maintaining state between chunks.
|
|
15
|
-
*/
|
|
16
|
-
export declare function createDeltaDecoder(properties?: Buffer, _unpackSize?: number): InstanceType<typeof Transform>;
|
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
// Delta filter codec - stores differences between consecutive bytes
|
|
2
|
-
// Useful for data with gradual changes (images, audio, sensor data)
|
|
3
|
-
//
|
|
4
|
-
// The Delta filter stores the difference between each byte and the byte
|
|
5
|
-
// N positions before it, where N is the "distance" parameter (default 1).
|
|
6
|
-
// This makes data with regular patterns more compressible.
|
|
7
|
-
//
|
|
8
|
-
// This implementation uses true streaming - processes data chunk by chunk
|
|
9
|
-
// while maintaining state between chunks.
|
|
10
|
-
"use strict";
|
|
11
|
-
Object.defineProperty(exports, "__esModule", {
|
|
12
|
-
value: true
|
|
13
|
-
});
|
|
14
|
-
function _export(target, all) {
|
|
15
|
-
for(var name in all)Object.defineProperty(target, name, {
|
|
16
|
-
enumerable: true,
|
|
17
|
-
get: Object.getOwnPropertyDescriptor(all, name).get
|
|
18
|
-
});
|
|
19
|
-
}
|
|
20
|
-
_export(exports, {
|
|
21
|
-
get createDeltaDecoder () {
|
|
22
|
-
return createDeltaDecoder;
|
|
23
|
-
},
|
|
24
|
-
get decodeDelta () {
|
|
25
|
-
return decodeDelta;
|
|
26
|
-
}
|
|
27
|
-
});
|
|
28
|
-
var _extractbaseiterator = require("extract-base-iterator");
|
|
29
|
-
function decodeDelta(input, properties, _unpackSize) {
|
|
30
|
-
// Distance parameter: default is 1
|
|
31
|
-
var distance = 1;
|
|
32
|
-
if (properties && properties.length >= 1) {
|
|
33
|
-
// Properties byte contains (distance - 1)
|
|
34
|
-
distance = properties[0] + 1;
|
|
35
|
-
}
|
|
36
|
-
var output = (0, _extractbaseiterator.bufferFrom)(input); // Copy since we modify in place
|
|
37
|
-
// State buffer for multi-byte distance
|
|
38
|
-
var state = [];
|
|
39
|
-
for(var i = 0; i < distance; i++){
|
|
40
|
-
state.push(0);
|
|
41
|
-
}
|
|
42
|
-
for(var j = 0; j < output.length; j++){
|
|
43
|
-
var idx = j % distance;
|
|
44
|
-
state[idx] = state[idx] + output[j] & 0xff;
|
|
45
|
-
output[j] = state[idx];
|
|
46
|
-
}
|
|
47
|
-
return output;
|
|
48
|
-
}
|
|
49
|
-
function createDeltaDecoder(properties, _unpackSize) {
|
|
50
|
-
// Distance parameter: default is 1
|
|
51
|
-
var distance = 1;
|
|
52
|
-
if (properties && properties.length >= 1) {
|
|
53
|
-
distance = properties[0] + 1;
|
|
54
|
-
}
|
|
55
|
-
// State buffer for multi-byte distance
|
|
56
|
-
var state = [];
|
|
57
|
-
for(var i = 0; i < distance; i++){
|
|
58
|
-
state.push(0);
|
|
59
|
-
}
|
|
60
|
-
var byteIndex = 0;
|
|
61
|
-
return new _extractbaseiterator.Transform({
|
|
62
|
-
transform: function(chunk, _encoding, callback) {
|
|
63
|
-
var output = (0, _extractbaseiterator.allocBuffer)(chunk.length);
|
|
64
|
-
for(var j = 0; j < chunk.length; j++){
|
|
65
|
-
var idx = byteIndex % distance;
|
|
66
|
-
state[idx] = state[idx] + chunk[j] & 0xff;
|
|
67
|
-
output[j] = state[idx];
|
|
68
|
-
byteIndex++;
|
|
69
|
-
}
|
|
70
|
-
callback(null, output);
|
|
71
|
-
}
|
|
72
|
-
});
|
|
73
|
-
}
|
|
74
|
-
/* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Delta.ts"],"sourcesContent":["// Delta filter codec - stores differences between consecutive bytes\n// Useful for data with gradual changes (images, audio, sensor data)\n//\n// The Delta filter stores the difference between each byte and the byte\n// N positions before it, where N is the \"distance\" parameter (default 1).\n// This makes data with regular patterns more compressible.\n//\n// This implementation uses true streaming - processes data chunk by chunk\n// while maintaining state between chunks.\n\nimport { allocBuffer, bufferFrom, Transform } from 'extract-base-iterator';\n\n/**\n * Decode Delta filtered data (synchronous, for buffered use)\n * Reverses the delta transformation by adding previous values\n *\n * @param input - Delta filtered data\n * @param properties - Optional 1-byte properties (distance - 1)\n * @param _unpackSize - Unused for Delta\n * @returns Unfiltered data\n */\nexport function decodeDelta(input: Buffer, properties?: Buffer, _unpackSize?: number): Buffer {\n // Distance parameter: default is 1\n let distance = 1;\n if (properties && properties.length >= 1) {\n // Properties byte contains (distance - 1)\n distance = properties[0] + 1;\n }\n\n const output = bufferFrom(input); // Copy since we modify in place\n\n // State buffer for multi-byte distance\n const state: number[] = [];\n for (let i = 0; i < distance; i++) {\n state.push(0);\n }\n\n for (let j = 0; j < output.length; j++) {\n const idx = j % distance;\n state[idx] = (state[idx] + output[j]) & 0xff;\n output[j] = state[idx];\n }\n\n return output;\n}\n\n/**\n * Create a streaming Delta decoder Transform.\n * Processes data chunk by chunk, maintaining state between chunks.\n */\nexport function createDeltaDecoder(properties?: Buffer, _unpackSize?: number): InstanceType<typeof Transform> {\n // Distance parameter: default is 1\n let distance = 1;\n if (properties && properties.length >= 1) {\n distance = properties[0] + 1;\n }\n\n // State buffer for multi-byte distance\n const state: number[] = [];\n for (let i = 0; i < distance; i++) {\n state.push(0);\n }\n\n let byteIndex = 0;\n\n return new Transform({\n transform: (chunk: Buffer, _encoding: string, callback: (err?: Error | null, data?: Buffer) => void) => {\n const output = allocBuffer(chunk.length);\n\n for (let j = 0; j < chunk.length; j++) {\n const idx = byteIndex % distance;\n state[idx] = (state[idx] + chunk[j]) & 0xff;\n output[j] = state[idx];\n byteIndex++;\n }\n\n callback(null, output);\n },\n });\n}\n"],"names":["createDeltaDecoder","decodeDelta","input","properties","_unpackSize","distance","length","output","bufferFrom","state","i","push","j","idx","byteIndex","Transform","transform","chunk","_encoding","callback","allocBuffer"],"mappings":"AAAA,oEAAoE;AACpE,oEAAoE;AACpE,EAAE;AACF,wEAAwE;AACxE,0EAA0E;AAC1E,2DAA2D;AAC3D,EAAE;AACF,0EAA0E;AAC1E,0CAA0C;;;;;;;;;;;;QA0C1BA;eAAAA;;QA7BAC;eAAAA;;;mCAXmC;AAW5C,SAASA,YAAYC,KAAa,EAAEC,UAAmB,EAAEC,WAAoB;IAClF,mCAAmC;IACnC,IAAIC,WAAW;IACf,IAAIF,cAAcA,WAAWG,MAAM,IAAI,GAAG;QACxC,0CAA0C;QAC1CD,WAAWF,UAAU,CAAC,EAAE,GAAG;IAC7B;IAEA,IAAMI,SAASC,IAAAA,+BAAU,EAACN,QAAQ,gCAAgC;IAElE,uCAAuC;IACvC,IAAMO,QAAkB,EAAE;IAC1B,IAAK,IAAIC,IAAI,GAAGA,IAAIL,UAAUK,IAAK;QACjCD,MAAME,IAAI,CAAC;IACb;IAEA,IAAK,IAAIC,IAAI,GAAGA,IAAIL,OAAOD,MAAM,EAAEM,IAAK;QACtC,IAAMC,MAAMD,IAAIP;QAChBI,KAAK,CAACI,IAAI,GAAG,AAACJ,KAAK,CAACI,IAAI,GAAGN,MAAM,CAACK,EAAE,GAAI;QACxCL,MAAM,CAACK,EAAE,GAAGH,KAAK,CAACI,IAAI;IACxB;IAEA,OAAON;AACT;AAMO,SAASP,mBAAmBG,UAAmB,EAAEC,WAAoB;IAC1E,mCAAmC;IACnC,IAAIC,WAAW;IACf,IAAIF,cAAcA,WAAWG,MAAM,IAAI,GAAG;QACxCD,WAAWF,UAAU,CAAC,EAAE,GAAG;IAC7B;IAEA,uCAAuC;IACvC,IAAMM,QAAkB,EAAE;IAC1B,IAAK,IAAIC,IAAI,GAAGA,IAAIL,UAAUK,IAAK;QACjCD,MAAME,IAAI,CAAC;IACb;IAEA,IAAIG,YAAY;IAEhB,OAAO,IAAIC,8BAAS,CAAC;QACnBC,WAAW,SAACC,OAAeC,WAAmBC;YAC5C,IAAMZ,SAASa,IAAAA,gCAAW,EAACH,MAAMX,MAAM;YAEvC,IAAK,IAAIM,IAAI,GAAGA,IAAIK,MAAMX,MAAM,EAAEM,IAAK;gBACrC,IAAMC,MAAMC,YAAYT;gBACxBI,KAAK,CAACI,IAAI,GAAG,AAACJ,KAAK,CAACI,IAAI,GAAGI,KAAK,CAACL,EAAE,GAAI;gBACvCL,MAAM,CAACK,EAAE,GAAGH,KAAK,CAACI,IAAI;gBACtBC;YACF;YAEAK,SAAS,MAAMZ;QACjB;IACF;AACF"}
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
import type { Transform } from 'stream';
|
|
2
|
-
/**
|
|
3
|
-
* Decode LZMA compressed data to buffer
|
|
4
|
-
*
|
|
5
|
-
* @param input - LZMA compressed data
|
|
6
|
-
* @param properties - Properties buffer (5 bytes: lc/lp/pb + dict size)
|
|
7
|
-
* @param unpackSize - Expected output size
|
|
8
|
-
* @returns Decompressed data
|
|
9
|
-
*/
|
|
10
|
-
export declare function decodeLzma(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer;
|
|
11
|
-
/**
|
|
12
|
-
* Create an LZMA decoder Transform stream
|
|
13
|
-
*
|
|
14
|
-
* Note: LZMA1 has no chunk boundaries, so this buffers all input
|
|
15
|
-
* and decompresses when the stream ends.
|
|
16
|
-
*/
|
|
17
|
-
export declare function createLzmaDecoder(properties?: Buffer, unpackSize?: number): Transform;
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
import type { Transform } from 'stream';
|
|
2
|
-
/**
|
|
3
|
-
* Decode LZMA compressed data to buffer
|
|
4
|
-
*
|
|
5
|
-
* @param input - LZMA compressed data
|
|
6
|
-
* @param properties - Properties buffer (5 bytes: lc/lp/pb + dict size)
|
|
7
|
-
* @param unpackSize - Expected output size
|
|
8
|
-
* @returns Decompressed data
|
|
9
|
-
*/
|
|
10
|
-
export declare function decodeLzma(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer;
|
|
11
|
-
/**
|
|
12
|
-
* Create an LZMA decoder Transform stream
|
|
13
|
-
*
|
|
14
|
-
* Note: LZMA1 has no chunk boundaries, so this buffers all input
|
|
15
|
-
* and decompresses when the stream ends.
|
|
16
|
-
*/
|
|
17
|
-
export declare function createLzmaDecoder(properties?: Buffer, unpackSize?: number): Transform;
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
// LZMA codec using TypeScript LZMA decoder
|
|
2
|
-
// LZMA properties in 7z are 5 bytes: 1 byte lc/lp/pb + 4 bytes dictionary size (little-endian)
|
|
3
|
-
"use strict";
|
|
4
|
-
Object.defineProperty(exports, "__esModule", {
|
|
5
|
-
value: true
|
|
6
|
-
});
|
|
7
|
-
function _export(target, all) {
|
|
8
|
-
for(var name in all)Object.defineProperty(target, name, {
|
|
9
|
-
enumerable: true,
|
|
10
|
-
get: Object.getOwnPropertyDescriptor(all, name).get
|
|
11
|
-
});
|
|
12
|
-
}
|
|
13
|
-
_export(exports, {
|
|
14
|
-
get createLzmaDecoder () {
|
|
15
|
-
return createLzmaDecoder;
|
|
16
|
-
},
|
|
17
|
-
get decodeLzma () {
|
|
18
|
-
return decodeLzma;
|
|
19
|
-
}
|
|
20
|
-
});
|
|
21
|
-
var _indexts = require("../../lzma/index.js");
|
|
22
|
-
function decodeLzma(input, properties, unpackSize) {
|
|
23
|
-
if (!properties || properties.length < 5) {
|
|
24
|
-
throw new Error('LZMA requires 5-byte properties');
|
|
25
|
-
}
|
|
26
|
-
if (typeof unpackSize !== 'number' || unpackSize < 0) {
|
|
27
|
-
throw new Error('LZMA requires known unpack size');
|
|
28
|
-
}
|
|
29
|
-
return (0, _indexts.decodeLzma)(input, properties, unpackSize);
|
|
30
|
-
}
|
|
31
|
-
function createLzmaDecoder(properties, unpackSize) {
|
|
32
|
-
if (!properties || properties.length < 5) {
|
|
33
|
-
throw new Error('LZMA requires 5-byte properties');
|
|
34
|
-
}
|
|
35
|
-
if (typeof unpackSize !== 'number' || unpackSize < 0) {
|
|
36
|
-
throw new Error('LZMA requires known unpack size');
|
|
37
|
-
}
|
|
38
|
-
return (0, _indexts.createLzmaDecoder)(properties, unpackSize);
|
|
39
|
-
}
|
|
40
|
-
/* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Lzma.ts"],"sourcesContent":["// LZMA codec using TypeScript LZMA decoder\n// LZMA properties in 7z are 5 bytes: 1 byte lc/lp/pb + 4 bytes dictionary size (little-endian)\n\nimport type { Transform } from 'stream';\nimport { createLzmaDecoder as createLzmaTransform, decodeLzma as lzmaDecode } from '../../lzma/index.ts';\n\n/**\n * Decode LZMA compressed data to buffer\n *\n * @param input - LZMA compressed data\n * @param properties - Properties buffer (5 bytes: lc/lp/pb + dict size)\n * @param unpackSize - Expected output size\n * @returns Decompressed data\n */\nexport function decodeLzma(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer {\n if (!properties || properties.length < 5) {\n throw new Error('LZMA requires 5-byte properties');\n }\n\n if (typeof unpackSize !== 'number' || unpackSize < 0) {\n throw new Error('LZMA requires known unpack size');\n }\n\n return lzmaDecode(input, properties, unpackSize) as Buffer;\n}\n\n/**\n * Create an LZMA decoder Transform stream\n *\n * Note: LZMA1 has no chunk boundaries, so this buffers all input\n * and decompresses when the stream ends.\n */\nexport function createLzmaDecoder(properties?: Buffer, unpackSize?: number): Transform {\n if (!properties || properties.length < 5) {\n throw new Error('LZMA requires 5-byte properties');\n }\n\n if (typeof unpackSize !== 'number' || unpackSize < 0) {\n throw new Error('LZMA requires known unpack size');\n }\n\n return createLzmaTransform(properties, unpackSize) as Transform;\n}\n"],"names":["createLzmaDecoder","decodeLzma","input","properties","unpackSize","length","Error","lzmaDecode","createLzmaTransform"],"mappings":"AAAA,2CAA2C;AAC3C,+FAA+F;;;;;;;;;;;;QA+B/EA;eAAAA;;QAlBAC;eAAAA;;;uBAVmE;AAU5E,SAASA,WAAWC,KAAa,EAAEC,UAAmB,EAAEC,UAAmB;IAChF,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,IAAI,OAAOF,eAAe,YAAYA,aAAa,GAAG;QACpD,MAAM,IAAIE,MAAM;IAClB;IAEA,OAAOC,IAAAA,mBAAU,EAACL,OAAOC,YAAYC;AACvC;AAQO,SAASJ,kBAAkBG,UAAmB,EAAEC,UAAmB;IACxE,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,IAAI,OAAOF,eAAe,YAAYA,aAAa,GAAG;QACpD,MAAM,IAAIE,MAAM;IAClB;IAEA,OAAOE,IAAAA,0BAAmB,EAACL,YAAYC;AACzC"}
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import type { Transform } from 'stream';
|
|
2
|
-
/**
|
|
3
|
-
* Decode LZMA2 compressed data to buffer
|
|
4
|
-
*
|
|
5
|
-
* @param input - LZMA2 compressed data
|
|
6
|
-
* @param properties - Properties buffer (1 byte: dictionary size)
|
|
7
|
-
* @param unpackSize - Expected output size (optional, for pre-allocation)
|
|
8
|
-
* @returns Decompressed data
|
|
9
|
-
*/
|
|
10
|
-
export declare function decodeLzma2(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer;
|
|
11
|
-
/**
|
|
12
|
-
* Create an LZMA2 decoder Transform stream
|
|
13
|
-
*
|
|
14
|
-
* This is a true streaming decoder that processes LZMA2 chunks incrementally.
|
|
15
|
-
* Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).
|
|
16
|
-
*
|
|
17
|
-
* LZMA2 chunks are up to ~2MB uncompressed, so memory is bounded regardless of
|
|
18
|
-
* total archive size.
|
|
19
|
-
*/
|
|
20
|
-
export declare function createLzma2Decoder(properties?: Buffer, _unpackSize?: number): Transform;
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import type { Transform } from 'stream';
|
|
2
|
-
/**
|
|
3
|
-
* Decode LZMA2 compressed data to buffer
|
|
4
|
-
*
|
|
5
|
-
* @param input - LZMA2 compressed data
|
|
6
|
-
* @param properties - Properties buffer (1 byte: dictionary size)
|
|
7
|
-
* @param unpackSize - Expected output size (optional, for pre-allocation)
|
|
8
|
-
* @returns Decompressed data
|
|
9
|
-
*/
|
|
10
|
-
export declare function decodeLzma2(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer;
|
|
11
|
-
/**
|
|
12
|
-
* Create an LZMA2 decoder Transform stream
|
|
13
|
-
*
|
|
14
|
-
* This is a true streaming decoder that processes LZMA2 chunks incrementally.
|
|
15
|
-
* Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).
|
|
16
|
-
*
|
|
17
|
-
* LZMA2 chunks are up to ~2MB uncompressed, so memory is bounded regardless of
|
|
18
|
-
* total archive size.
|
|
19
|
-
*/
|
|
20
|
-
export declare function createLzma2Decoder(properties?: Buffer, _unpackSize?: number): Transform;
|
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
// LZMA2 codec using TypeScript LZMA decoder
|
|
2
|
-
//
|
|
3
|
-
// LZMA2 format specification:
|
|
4
|
-
// https://github.com/ulikunitz/xz/blob/master/doc/LZMA2.md
|
|
5
|
-
//
|
|
6
|
-
// Control byte values:
|
|
7
|
-
// 0x00 = End of stream
|
|
8
|
-
// 0x01 = Uncompressed chunk, dictionary reset
|
|
9
|
-
// 0x02 = Uncompressed chunk, no dictionary reset
|
|
10
|
-
// 0x80-0xFF = LZMA compressed chunk (bits encode reset flags and size)
|
|
11
|
-
"use strict";
|
|
12
|
-
Object.defineProperty(exports, "__esModule", {
|
|
13
|
-
value: true
|
|
14
|
-
});
|
|
15
|
-
function _export(target, all) {
|
|
16
|
-
for(var name in all)Object.defineProperty(target, name, {
|
|
17
|
-
enumerable: true,
|
|
18
|
-
get: Object.getOwnPropertyDescriptor(all, name).get
|
|
19
|
-
});
|
|
20
|
-
}
|
|
21
|
-
_export(exports, {
|
|
22
|
-
get createLzma2Decoder () {
|
|
23
|
-
return createLzma2Decoder;
|
|
24
|
-
},
|
|
25
|
-
get decodeLzma2 () {
|
|
26
|
-
return decodeLzma2;
|
|
27
|
-
}
|
|
28
|
-
});
|
|
29
|
-
var _indexts = require("../../lzma/index.js");
|
|
30
|
-
function decodeLzma2(input, properties, unpackSize) {
|
|
31
|
-
if (!properties || properties.length < 1) {
|
|
32
|
-
throw new Error('LZMA2 requires properties byte');
|
|
33
|
-
}
|
|
34
|
-
return (0, _indexts.decodeLzma2)(input, properties, unpackSize);
|
|
35
|
-
}
|
|
36
|
-
function createLzma2Decoder(properties, _unpackSize) {
|
|
37
|
-
if (!properties || properties.length < 1) {
|
|
38
|
-
throw new Error('LZMA2 requires properties byte');
|
|
39
|
-
}
|
|
40
|
-
return (0, _indexts.createLzma2Decoder)(properties);
|
|
41
|
-
}
|
|
42
|
-
/* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/Lzma2.ts"],"sourcesContent":["// LZMA2 codec using TypeScript LZMA decoder\n//\n// LZMA2 format specification:\n// https://github.com/ulikunitz/xz/blob/master/doc/LZMA2.md\n//\n// Control byte values:\n// 0x00 = End of stream\n// 0x01 = Uncompressed chunk, dictionary reset\n// 0x02 = Uncompressed chunk, no dictionary reset\n// 0x80-0xFF = LZMA compressed chunk (bits encode reset flags and size)\n\nimport type { Transform } from 'stream';\nimport { createLzma2Decoder as createLzma2Transform, decodeLzma2 as lzma2Decode } from '../../lzma/index.ts';\n\n/**\n * Decode LZMA2 compressed data to buffer\n *\n * @param input - LZMA2 compressed data\n * @param properties - Properties buffer (1 byte: dictionary size)\n * @param unpackSize - Expected output size (optional, for pre-allocation)\n * @returns Decompressed data\n */\nexport function decodeLzma2(input: Buffer, properties?: Buffer, unpackSize?: number): Buffer {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n return lzma2Decode(input, properties, unpackSize) as Buffer;\n}\n\n/**\n * Create an LZMA2 decoder Transform stream\n *\n * This is a true streaming decoder that processes LZMA2 chunks incrementally.\n * Memory usage is O(dictionary_size + max_chunk_size) instead of O(folder_size).\n *\n * LZMA2 chunks are up to ~2MB uncompressed, so memory is bounded regardless of\n * total archive size.\n */\nexport function createLzma2Decoder(properties?: Buffer, _unpackSize?: number): Transform {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n return createLzma2Transform(properties) as Transform;\n}\n"],"names":["createLzma2Decoder","decodeLzma2","input","properties","unpackSize","length","Error","lzma2Decode","_unpackSize","createLzma2Transform"],"mappings":"AAAA,4CAA4C;AAC5C,EAAE;AACF,8BAA8B;AAC9B,2DAA2D;AAC3D,EAAE;AACF,uBAAuB;AACvB,+BAA+B;AAC/B,sDAAsD;AACtD,yDAAyD;AACzD,0EAA0E;;;;;;;;;;;;QA8B1DA;eAAAA;;QAjBAC;eAAAA;;;uBAVuE;AAUhF,SAASA,YAAYC,KAAa,EAAEC,UAAmB,EAAEC,UAAmB;IACjF,IAAI,CAACD,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,OAAOC,IAAAA,oBAAW,EAACL,OAAOC,YAAYC;AACxC;AAWO,SAASJ,mBAAmBG,UAAmB,EAAEK,WAAoB;IAC1E,IAAI,CAACL,cAAcA,WAAWE,MAAM,GAAG,GAAG;QACxC,MAAM,IAAIC,MAAM;IAClB;IAEA,OAAOG,IAAAA,2BAAoB,EAACN;AAC9B"}
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* XZ Decompression Module
|
|
3
|
-
*
|
|
4
|
-
* XZ is a container format that wraps LZMA2 compressed data.
|
|
5
|
-
* This module provides both synchronous and streaming XZ decoders.
|
|
6
|
-
*
|
|
7
|
-
* Pure JavaScript implementation, works on Node.js 0.8+
|
|
8
|
-
*/
|
|
9
|
-
import type { Transform as TransformType } from 'stream';
|
|
10
|
-
/**
|
|
11
|
-
* Decompress XZ data synchronously
|
|
12
|
-
* @param input - XZ compressed data
|
|
13
|
-
* @returns Decompressed data
|
|
14
|
-
*/
|
|
15
|
-
export declare function decodeXZ(input: Buffer): Buffer;
|
|
16
|
-
/**
|
|
17
|
-
* Create an XZ decompression Transform stream
|
|
18
|
-
*
|
|
19
|
-
* Note: XZ buffers all input before decompressing, as it's a single-frame format.
|
|
20
|
-
* For true streaming with better performance on large files, consider using XZ directly
|
|
21
|
-
* with the sync decodeXZ() function.
|
|
22
|
-
*
|
|
23
|
-
* @returns Transform stream that decompresses XZ data
|
|
24
|
-
*/
|
|
25
|
-
export declare function createXZDecoder(): TransformType;
|
package/dist/cjs/xz/Decoder.d.ts
DELETED
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* XZ Decompression Module
|
|
3
|
-
*
|
|
4
|
-
* XZ is a container format that wraps LZMA2 compressed data.
|
|
5
|
-
* This module provides both synchronous and streaming XZ decoders.
|
|
6
|
-
*
|
|
7
|
-
* Pure JavaScript implementation, works on Node.js 0.8+
|
|
8
|
-
*/
|
|
9
|
-
import type { Transform as TransformType } from 'stream';
|
|
10
|
-
/**
|
|
11
|
-
* Decompress XZ data synchronously
|
|
12
|
-
* @param input - XZ compressed data
|
|
13
|
-
* @returns Decompressed data
|
|
14
|
-
*/
|
|
15
|
-
export declare function decodeXZ(input: Buffer): Buffer;
|
|
16
|
-
/**
|
|
17
|
-
* Create an XZ decompression Transform stream
|
|
18
|
-
*
|
|
19
|
-
* Note: XZ buffers all input before decompressing, as it's a single-frame format.
|
|
20
|
-
* For true streaming with better performance on large files, consider using XZ directly
|
|
21
|
-
* with the sync decodeXZ() function.
|
|
22
|
-
*
|
|
23
|
-
* @returns Transform stream that decompresses XZ data
|
|
24
|
-
*/
|
|
25
|
-
export declare function createXZDecoder(): TransformType;
|