xz-compat 0.1.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/README.md +55 -7
  2. package/dist/cjs/index.d.cts +13 -3
  3. package/dist/cjs/index.d.ts +13 -3
  4. package/dist/cjs/index.js +24 -12
  5. package/dist/cjs/index.js.map +1 -1
  6. package/dist/cjs/lzma/index.d.cts +2 -18
  7. package/dist/cjs/lzma/index.d.ts +2 -18
  8. package/dist/cjs/lzma/index.js +0 -43
  9. package/dist/cjs/lzma/index.js.map +1 -1
  10. package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +4 -2
  11. package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +4 -2
  12. package/dist/cjs/lzma/sync/Lzma2Decoder.js +7 -6
  13. package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +1 -1
  14. package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +20 -2
  15. package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +20 -2
  16. package/dist/cjs/lzma/sync/LzmaDecoder.js +22 -7
  17. package/dist/cjs/lzma/sync/LzmaDecoder.js.map +1 -1
  18. package/dist/cjs/native.d.cts +31 -0
  19. package/dist/cjs/native.d.ts +31 -0
  20. package/dist/cjs/native.js +65 -0
  21. package/dist/cjs/native.js.map +1 -0
  22. package/dist/cjs/sevenz.d.cts +47 -0
  23. package/dist/cjs/sevenz.d.ts +47 -0
  24. package/dist/cjs/sevenz.js +86 -0
  25. package/dist/cjs/sevenz.js.map +1 -0
  26. package/dist/cjs/xz/Decoder.d.cts +13 -0
  27. package/dist/cjs/xz/Decoder.d.ts +13 -0
  28. package/dist/cjs/xz/Decoder.js +42 -32
  29. package/dist/cjs/xz/Decoder.js.map +1 -1
  30. package/dist/esm/index.d.ts +13 -3
  31. package/dist/esm/index.js +26 -4
  32. package/dist/esm/index.js.map +1 -1
  33. package/dist/esm/lzma/index.d.ts +2 -18
  34. package/dist/esm/lzma/index.js +2 -30
  35. package/dist/esm/lzma/index.js.map +1 -1
  36. package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +4 -2
  37. package/dist/esm/lzma/sync/Lzma2Decoder.js +8 -7
  38. package/dist/esm/lzma/sync/Lzma2Decoder.js.map +1 -1
  39. package/dist/esm/lzma/sync/LzmaDecoder.d.ts +20 -2
  40. package/dist/esm/lzma/sync/LzmaDecoder.js +26 -7
  41. package/dist/esm/lzma/sync/LzmaDecoder.js.map +1 -1
  42. package/dist/esm/native.d.ts +31 -0
  43. package/dist/esm/native.js +51 -0
  44. package/dist/esm/native.js.map +1 -0
  45. package/dist/esm/sevenz.d.ts +47 -0
  46. package/dist/esm/sevenz.js +81 -0
  47. package/dist/esm/sevenz.js.map +1 -0
  48. package/dist/esm/xz/Decoder.d.ts +13 -0
  49. package/dist/esm/xz/Decoder.js +21 -9
  50. package/dist/esm/xz/Decoder.js.map +1 -1
  51. package/package.json +4 -1
@@ -5,6 +5,18 @@
5
5
  * This module provides both synchronous and streaming XZ decoders.
6
6
  *
7
7
  * Pure JavaScript implementation, works on Node.js 0.8+
8
+ *
9
+ * IMPORTANT: Buffer Management Pattern
10
+ *
11
+ * When calling decodeLzma2(), use the direct return pattern:
12
+ *
13
+ * ✅ CORRECT - Fast path:
14
+ * const output = decodeLzma2(data, props, size) as Buffer;
15
+ *
16
+ * ❌ WRONG - Slow path (do NOT buffer):
17
+ * const chunks: Buffer[] = [];
18
+ * decodeLzma2(data, props, size, { write: c => chunks.push(c) });
19
+ * return Buffer.concat(chunks); // ← Unnecessary copies!
8
20
  */ "use strict";
9
21
  Object.defineProperty(exports, "__esModule", {
10
22
  value: true
@@ -33,6 +45,7 @@ var _BcjPpcts = require("../filters/bcj/BcjPpc.js");
33
45
  var _BcjSparcts = require("../filters/bcj/BcjSparc.js");
34
46
  var _Deltats = require("../filters/delta/Delta.js");
35
47
  var _indexts = require("../lzma/index.js");
48
+ var _nativets = require("../native.js");
36
49
  // XZ magic bytes
37
50
  var XZ_MAGIC = [
38
51
  0xfd,
@@ -232,38 +245,12 @@ var FILTER_LZMA2 = 0x21;
232
245
  return records;
233
246
  }
234
247
  function decodeXZ(input) {
235
- var _loop = function(i) {
236
- var record = blockRecords[i];
237
- var recordStart = record.compressedPos;
238
- // Parse block header
239
- var blockInfo = parseBlockHeader(input, recordStart, checkSize);
240
- // Extract compressed data for this block
241
- var dataStart = recordStart + blockInfo.headerSize;
242
- // compressedDataSize is calculated from the Index's Unpadded Size minus header and check
243
- var dataEnd = dataStart + record.compressedDataSize;
244
- // Note: XZ blocks have padding AFTER the check field to align to 4 bytes,
245
- // but the compressedSize from index is exact - no need to strip padding.
246
- // LZMA2 data includes a 0x00 end marker which must NOT be stripped.
247
- var compressedData = input.slice(dataStart, dataEnd);
248
- // Decompress this block with LZMA2
249
- var blockChunks = [];
250
- (0, _indexts.decodeLzma2)(compressedData, blockInfo.lzma2Props, record.uncompressedSize, {
251
- write: function(chunk) {
252
- blockChunks.push(chunk);
253
- }
254
- });
255
- // Concatenate LZMA2 output
256
- var blockOutput = Buffer.concat(blockChunks);
257
- // Apply preprocessing filters in reverse order (BCJ/Delta applied after LZMA2)
258
- // Filters are stored in order they were applied during compression,
259
- // so we need to reverse for decompression
260
- for(var j = blockInfo.filters.length - 1; j >= 0; j--){
261
- blockOutput = applyFilter(blockOutput, blockInfo.filters[j]);
262
- }
263
- outputChunks.push(blockOutput);
264
- _totalOutputSize += blockOutput.length;
265
- };
266
248
  var _checkSizes_checkType;
249
+ // Try native acceleration first (Node 14+ with @napi-rs/lzma installed)
250
+ var native = (0, _nativets.tryLoadNative)();
251
+ if (native) {
252
+ return native.xz.decompressSync(input);
253
+ }
267
254
  // Verify XZ magic
268
255
  if (input.length < 12 || !bufferEquals(input, 0, XZ_MAGIC)) {
269
256
  throw new Error('Invalid XZ magic bytes');
@@ -300,7 +287,30 @@ function decodeXZ(input) {
300
287
  // Decompress each block
301
288
  var outputChunks = [];
302
289
  var _totalOutputSize = 0;
303
- for(var i = 0; i < blockRecords.length; i++)_loop(i);
290
+ for(var i = 0; i < blockRecords.length; i++){
291
+ var record = blockRecords[i];
292
+ var recordStart = record.compressedPos;
293
+ // Parse block header
294
+ var blockInfo = parseBlockHeader(input, recordStart, checkSize);
295
+ // Extract compressed data for this block
296
+ var dataStart = recordStart + blockInfo.headerSize;
297
+ // compressedDataSize is calculated from the Index's Unpadded Size minus header and check
298
+ var dataEnd = dataStart + record.compressedDataSize;
299
+ // Note: XZ blocks have padding AFTER the check field to align to 4 bytes,
300
+ // but the compressedSize from index is exact - no need to strip padding.
301
+ // LZMA2 data includes a 0x00 end marker which must NOT be stripped.
302
+ var compressedData = input.slice(dataStart, dataEnd);
303
+ // Decompress this block with LZMA2 (fast path, no buffering)
304
+ var blockOutput = (0, _indexts.decodeLzma2)(compressedData, blockInfo.lzma2Props, record.uncompressedSize);
305
+ // Apply preprocessing filters in reverse order (BCJ/Delta applied after LZMA2)
306
+ // Filters are stored in order they were applied during compression,
307
+ // so we need to reverse for decompression
308
+ for(var j = blockInfo.filters.length - 1; j >= 0; j--){
309
+ blockOutput = applyFilter(blockOutput, blockInfo.filters[j]);
310
+ }
311
+ outputChunks.push(blockOutput);
312
+ _totalOutputSize += blockOutput.length;
313
+ }
304
314
  return Buffer.concat(outputChunks);
305
315
  }
306
316
  function createXZDecoder() {
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/xz/Decoder.ts"],"sourcesContent":["/**\n * XZ Decompression Module\n *\n * XZ is a container format that wraps LZMA2 compressed data.\n * This module provides both synchronous and streaming XZ decoders.\n *\n * Pure JavaScript implementation, works on Node.js 0.8+\n */\n\nimport { Transform } from 'extract-base-iterator';\nimport type { Transform as TransformType } from 'stream';\nimport { decodeBcj } from '../filters/bcj/Bcj.ts';\nimport { decodeBcjArm } from '../filters/bcj/BcjArm.ts';\nimport { decodeBcjArm64 } from '../filters/bcj/BcjArm64.ts';\nimport { decodeBcjArmt } from '../filters/bcj/BcjArmt.ts';\nimport { decodeBcjIa64 } from '../filters/bcj/BcjIa64.ts';\nimport { decodeBcjPpc } from '../filters/bcj/BcjPpc.ts';\nimport { decodeBcjSparc } from '../filters/bcj/BcjSparc.ts';\nimport { decodeDelta } from '../filters/delta/Delta.ts';\nimport { decodeLzma2 } from '../lzma/index.ts';\n\n// XZ magic bytes\nconst XZ_MAGIC = [0xfd, 0x37, 0x7a, 0x58, 0x5a, 0x00];\nconst XZ_FOOTER_MAGIC = [0x59, 0x5a]; // \"YZ\"\n\n// Filter IDs (from XZ specification)\nconst FILTER_DELTA = 0x03;\nconst FILTER_BCJ_X86 = 0x04;\nconst FILTER_BCJ_PPC = 0x05;\nconst FILTER_BCJ_IA64 = 0x06;\nconst FILTER_BCJ_ARM = 0x07;\nconst FILTER_BCJ_ARMT = 0x08;\nconst FILTER_BCJ_SPARC = 0x09;\nconst FILTER_BCJ_ARM64 = 0x0a;\nconst FILTER_LZMA2 = 0x21;\n\n// Filter info for parsing\ninterface FilterInfo {\n id: number;\n props: Buffer;\n}\n\n/**\n * Simple buffer comparison\n */\nfunction bufferEquals(buf: Buffer, offset: number, expected: number[]): boolean {\n if (offset + expected.length > buf.length) {\n return false;\n }\n for (let i = 0; i < expected.length; i++) {\n if (buf[offset + i] !== expected[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Decode variable-length integer (XZ multibyte encoding)\n * Returns number, but limits to 32-bit to work on Node 0.8+\n */\nfunction decodeMultibyte(buf: Buffer, offset: number): { value: number; bytesRead: number } {\n let value = 0;\n let i = 0;\n let byte: number;\n do {\n if (offset + i >= buf.length) {\n throw new Error('Truncated multibyte integer');\n }\n byte = buf[offset + i];\n value |= (byte & 0x7f) << (i * 7);\n i++;\n if (i > 4) {\n // Reduced to prevent overflow on Node 0.8\n throw new Error('Multibyte integer too large');\n }\n } while (byte & 0x80);\n return { value, bytesRead: i };\n}\n\n/**\n * Apply a preprocessing filter (BCJ/Delta) to decompressed data\n */\nfunction applyFilter(data: Buffer, filter: FilterInfo): Buffer {\n switch (filter.id) {\n case FILTER_BCJ_X86:\n return decodeBcj(data, filter.props);\n case FILTER_BCJ_ARM:\n return decodeBcjArm(data, filter.props);\n case FILTER_BCJ_ARM64:\n return decodeBcjArm64(data, filter.props);\n case FILTER_BCJ_ARMT:\n return decodeBcjArmt(data, filter.props);\n case FILTER_BCJ_PPC:\n return decodeBcjPpc(data, filter.props);\n case FILTER_BCJ_SPARC:\n return decodeBcjSparc(data, filter.props);\n case FILTER_BCJ_IA64:\n return decodeBcjIa64(data, filter.props);\n case FILTER_DELTA:\n return decodeDelta(data, filter.props);\n default:\n throw new Error(`Unsupported filter: 0x${filter.id.toString(16)}`);\n }\n}\n\n/**\n * Parse XZ Block Header to extract filters and LZMA2 properties\n */\nfunction parseBlockHeader(\n input: Buffer,\n offset: number,\n _checkSize: number\n): {\n filters: FilterInfo[];\n lzma2Props: Buffer;\n headerSize: number;\n dataStart: number;\n dataEnd: number;\n nextOffset: number;\n} {\n // Block header size\n const blockHeaderSizeRaw = input[offset];\n if (blockHeaderSizeRaw === 0) {\n throw new Error('Invalid block header size (index indicator found instead of block)');\n }\n const blockHeaderSize = (blockHeaderSizeRaw + 1) * 4;\n\n // Parse block header\n const blockHeaderStart = offset;\n offset++; // skip size byte\n\n const blockFlags = input[offset++];\n const numFilters = (blockFlags & 0x03) + 1;\n const hasCompressedSize = (blockFlags & 0x40) !== 0;\n const hasUncompressedSize = (blockFlags & 0x80) !== 0;\n\n // Skip optional sizes\n if (hasCompressedSize) {\n const result = decodeMultibyte(input, offset);\n offset += result.bytesRead;\n }\n\n if (hasUncompressedSize) {\n const result = decodeMultibyte(input, offset);\n offset += result.bytesRead;\n }\n\n // Parse all filters\n const filters: FilterInfo[] = [];\n let lzma2Props: Buffer | null = null;\n\n for (let i = 0; i < numFilters; i++) {\n const filterIdResult = decodeMultibyte(input, offset);\n const filterId = filterIdResult.value;\n offset += filterIdResult.bytesRead;\n\n const propsSizeResult = decodeMultibyte(input, offset);\n offset += propsSizeResult.bytesRead;\n\n const filterProps = input.slice(offset, offset + propsSizeResult.value);\n offset += propsSizeResult.value;\n\n if (filterId === FILTER_LZMA2) {\n // LZMA2 must be the last filter\n lzma2Props = filterProps;\n } else if (filterId === FILTER_DELTA || (filterId >= FILTER_BCJ_X86 && filterId <= FILTER_BCJ_ARM64)) {\n // Preprocessing filter - store for later application\n filters.push({ id: filterId, props: filterProps });\n } else {\n throw new Error(`Unsupported filter: 0x${filterId.toString(16)}`);\n }\n }\n\n if (!lzma2Props) {\n throw new Error('No LZMA2 filter found in XZ block');\n }\n\n // Skip to end of block header (must be aligned to 4 bytes)\n const blockDataStart = blockHeaderStart + blockHeaderSize;\n\n return {\n filters,\n lzma2Props,\n headerSize: blockHeaderSize,\n dataStart: blockDataStart,\n dataEnd: input.length,\n nextOffset: blockDataStart,\n };\n}\n\n/**\n * Parse XZ Index to get block positions\n *\n * XZ Index stores \"Unpadded Size\" for each block which equals:\n * Block Header Size + Compressed Data Size + Check Size\n * (does NOT include padding to 4-byte boundary)\n */\nfunction parseIndex(\n input: Buffer,\n indexStart: number,\n checkSize: number\n): Array<{\n compressedPos: number;\n compressedDataSize: number;\n uncompressedSize: number;\n}> {\n let offset = indexStart;\n\n // Index indicator (0x00)\n if (input[offset] !== 0x00) {\n throw new Error('Invalid index indicator');\n }\n offset++;\n\n // Number of records\n const countResult = decodeMultibyte(input, offset);\n const recordCount = countResult.value;\n offset += countResult.bytesRead;\n\n const records: Array<{\n compressedPos: number;\n unpaddedSize: number;\n compressedDataSize: number;\n uncompressedSize: number;\n }> = [];\n\n // Parse each record\n for (let i = 0; i < recordCount; i++) {\n // Unpadded Size (header + compressed data + check)\n const unpaddedResult = decodeMultibyte(input, offset);\n offset += unpaddedResult.bytesRead;\n\n // Uncompressed size\n const uncompressedResult = decodeMultibyte(input, offset);\n offset += uncompressedResult.bytesRead;\n\n records.push({\n compressedPos: 0, // will be calculated\n unpaddedSize: unpaddedResult.value,\n compressedDataSize: 0, // will be calculated\n uncompressedSize: uncompressedResult.value,\n });\n }\n\n // Calculate actual positions by walking through blocks\n let currentPos = 12; // After stream header\n for (let i = 0; i < records.length; i++) {\n const record = records[i];\n // Record where this block's header starts\n record.compressedPos = currentPos;\n\n // Get block header size from the actual data\n const headerSizeRaw = input[currentPos];\n const headerSize = (headerSizeRaw + 1) * 4;\n\n // Calculate compressed data size from unpadded size\n // unpaddedSize = headerSize + compressedDataSize + checkSize\n record.compressedDataSize = record.unpaddedSize - headerSize - checkSize;\n\n // Move to next block: unpaddedSize + padding to 4-byte boundary\n const paddedSize = Math.ceil(record.unpaddedSize / 4) * 4;\n currentPos += paddedSize;\n }\n\n return records;\n}\n\n/**\n * Decompress XZ data synchronously\n * Properly handles multi-block XZ files and stream padding\n * @param input - XZ compressed data\n * @returns Decompressed data\n */\nexport function decodeXZ(input: Buffer): Buffer {\n // Verify XZ magic\n if (input.length < 12 || !bufferEquals(input, 0, XZ_MAGIC)) {\n throw new Error('Invalid XZ magic bytes');\n }\n\n // Stream flags at offset 6-7\n const checkType = input[7] & 0x0f;\n\n // Check sizes based on check type\n const checkSizes: { [key: number]: number } = {\n 0: 0, // None\n 1: 4, // CRC32\n 4: 8, // CRC64\n 10: 32, // SHA-256\n };\n const checkSize = checkSizes[checkType] ?? 0;\n\n // Find footer by skipping stream padding (null bytes at end before footer)\n // Stream padding must be multiple of 4 bytes\n let footerEnd = input.length;\n while (footerEnd > 12 && input[footerEnd - 1] === 0x00) {\n footerEnd--;\n }\n // Align to 4-byte boundary (stream padding rules)\n while (footerEnd % 4 !== 0 && footerEnd > 12) {\n footerEnd++;\n }\n\n // Verify footer magic (at footerEnd - 2)\n if (!bufferEquals(input, footerEnd - 2, XZ_FOOTER_MAGIC)) {\n throw new Error('Invalid XZ footer magic');\n }\n\n // Get backward size (tells us where index starts) - at footerEnd - 8\n const backwardSize = (input.readUInt32LE(footerEnd - 8) + 1) * 4;\n const indexStart = footerEnd - 12 - backwardSize;\n\n // Parse Index to get block information\n const blockRecords = parseIndex(input, indexStart, checkSize);\n\n // Decompress each block\n const outputChunks: Buffer[] = [];\n let _totalOutputSize = 0;\n\n for (let i = 0; i < blockRecords.length; i++) {\n const record = blockRecords[i];\n const recordStart = record.compressedPos;\n\n // Parse block header\n const blockInfo = parseBlockHeader(input, recordStart, checkSize);\n\n // Extract compressed data for this block\n const dataStart = recordStart + blockInfo.headerSize;\n // compressedDataSize is calculated from the Index's Unpadded Size minus header and check\n const dataEnd = dataStart + record.compressedDataSize;\n\n // Note: XZ blocks have padding AFTER the check field to align to 4 bytes,\n // but the compressedSize from index is exact - no need to strip padding.\n // LZMA2 data includes a 0x00 end marker which must NOT be stripped.\n const compressedData = input.slice(dataStart, dataEnd);\n\n // Decompress this block with LZMA2\n const blockChunks: Buffer[] = [];\n decodeLzma2(compressedData, blockInfo.lzma2Props, record.uncompressedSize, {\n write: (chunk: Buffer) => {\n blockChunks.push(chunk);\n },\n });\n\n // Concatenate LZMA2 output\n let blockOutput = Buffer.concat(blockChunks) as Buffer;\n\n // Apply preprocessing filters in reverse order (BCJ/Delta applied after LZMA2)\n // Filters are stored in order they were applied during compression,\n // so we need to reverse for decompression\n for (let j = blockInfo.filters.length - 1; j >= 0; j--) {\n blockOutput = applyFilter(blockOutput, blockInfo.filters[j]) as Buffer;\n }\n\n outputChunks.push(blockOutput);\n _totalOutputSize += blockOutput.length;\n }\n\n return Buffer.concat(outputChunks);\n}\n\n/**\n * Create an XZ decompression Transform stream\n * @returns Transform stream that decompresses XZ data\n */\nexport function createXZDecoder(): TransformType {\n const chunks: Buffer[] = [];\n\n return new Transform({\n transform(chunk: Buffer, _encoding: string, callback: (error?: Error | null) => void) {\n chunks.push(chunk);\n callback();\n },\n\n flush(callback: (error?: Error | null) => void) {\n try {\n const input = Buffer.concat(chunks);\n const output = decodeXZ(input);\n this.push(output);\n callback();\n } catch (err) {\n callback(err as Error);\n }\n },\n });\n}\n"],"names":["createXZDecoder","decodeXZ","XZ_MAGIC","XZ_FOOTER_MAGIC","FILTER_DELTA","FILTER_BCJ_X86","FILTER_BCJ_PPC","FILTER_BCJ_IA64","FILTER_BCJ_ARM","FILTER_BCJ_ARMT","FILTER_BCJ_SPARC","FILTER_BCJ_ARM64","FILTER_LZMA2","bufferEquals","buf","offset","expected","length","i","decodeMultibyte","value","byte","Error","bytesRead","applyFilter","data","filter","id","decodeBcj","props","decodeBcjArm","decodeBcjArm64","decodeBcjArmt","decodeBcjPpc","decodeBcjSparc","decodeBcjIa64","decodeDelta","toString","parseBlockHeader","input","_checkSize","blockHeaderSizeRaw","blockHeaderSize","blockHeaderStart","blockFlags","numFilters","hasCompressedSize","hasUncompressedSize","result","filters","lzma2Props","filterIdResult","filterId","propsSizeResult","filterProps","slice","push","blockDataStart","headerSize","dataStart","dataEnd","nextOffset","parseIndex","indexStart","checkSize","countResult","recordCount","records","unpaddedResult","uncompressedResult","compressedPos","unpaddedSize","compressedDataSize","uncompressedSize","currentPos","record","headerSizeRaw","paddedSize","Math","ceil","blockRecords","recordStart","blockInfo","compressedData","blockChunks","decodeLzma2","write","chunk","blockOutput","Buffer","concat","j","outputChunks","_totalOutputSize","checkSizes","checkType","footerEnd","backwardSize","readUInt32LE","chunks","Transform","transform","_encoding","callback","flush","output","err"],"mappings":"AAAA;;;;;;;CAOC;;;;;;;;;;;QAsWeA;eAAAA;;QA3FAC;eAAAA;;;mCAzQU;qBAEA;wBACG;0BACE;yBACD;yBACA;wBACD;0BACE;uBACH;uBACA;AAE5B,iBAAiB;AACjB,IAAMC,WAAW;IAAC;IAAM;IAAM;IAAM;IAAM;IAAM;CAAK;AACrD,IAAMC,kBAAkB;IAAC;IAAM;CAAK,EAAE,OAAO;AAE7C,qCAAqC;AACrC,IAAMC,eAAe;AACrB,IAAMC,iBAAiB;AACvB,IAAMC,iBAAiB;AACvB,IAAMC,kBAAkB;AACxB,IAAMC,iBAAiB;AACvB,IAAMC,kBAAkB;AACxB,IAAMC,mBAAmB;AACzB,IAAMC,mBAAmB;AACzB,IAAMC,eAAe;AAQrB;;CAEC,GACD,SAASC,aAAaC,GAAW,EAAEC,MAAc,EAAEC,QAAkB;IACnE,IAAID,SAASC,SAASC,MAAM,GAAGH,IAAIG,MAAM,EAAE;QACzC,OAAO;IACT;IACA,IAAK,IAAIC,IAAI,GAAGA,IAAIF,SAASC,MAAM,EAAEC,IAAK;QACxC,IAAIJ,GAAG,CAACC,SAASG,EAAE,KAAKF,QAAQ,CAACE,EAAE,EAAE;YACnC,OAAO;QACT;IACF;IACA,OAAO;AACT;AAEA;;;CAGC,GACD,SAASC,gBAAgBL,GAAW,EAAEC,MAAc;IAClD,IAAIK,QAAQ;IACZ,IAAIF,IAAI;IACR,IAAIG;IACJ,GAAG;QACD,IAAIN,SAASG,KAAKJ,IAAIG,MAAM,EAAE;YAC5B,MAAM,IAAIK,MAAM;QAClB;QACAD,OAAOP,GAAG,CAACC,SAASG,EAAE;QACtBE,SAAS,AAACC,CAAAA,OAAO,IAAG,KAAOH,IAAI;QAC/BA;QACA,IAAIA,IAAI,GAAG;YACT,0CAA0C;YAC1C,MAAM,IAAII,MAAM;QAClB;IACF,QAASD,OAAO,MAAM;IACtB,OAAO;QAAED,OAAAA;QAAOG,WAAWL;IAAE;AAC/B;AAEA;;CAEC,GACD,SAASM,YAAYC,IAAY,EAAEC,MAAkB;IACnD,OAAQA,OAAOC,EAAE;QACf,KAAKtB;YACH,OAAOuB,IAAAA,gBAAS,EAACH,MAAMC,OAAOG,KAAK;QACrC,KAAKrB;YACH,OAAOsB,IAAAA,sBAAY,EAACL,MAAMC,OAAOG,KAAK;QACxC,KAAKlB;YACH,OAAOoB,IAAAA,0BAAc,EAACN,MAAMC,OAAOG,KAAK;QAC1C,KAAKpB;YACH,OAAOuB,IAAAA,wBAAa,EAACP,MAAMC,OAAOG,KAAK;QACzC,KAAKvB;YACH,OAAO2B,IAAAA,sBAAY,EAACR,MAAMC,OAAOG,KAAK;QACxC,KAAKnB;YACH,OAAOwB,IAAAA,0BAAc,EAACT,MAAMC,OAAOG,KAAK;QAC1C,KAAKtB;YACH,OAAO4B,IAAAA,wBAAa,EAACV,MAAMC,OAAOG,KAAK;QACzC,KAAKzB;YACH,OAAOgC,IAAAA,oBAAW,EAACX,MAAMC,OAAOG,KAAK;QACvC;YACE,MAAM,IAAIP,MAAM,AAAC,yBAA+C,OAAvBI,OAAOC,EAAE,CAACU,QAAQ,CAAC;IAChE;AACF;AAEA;;CAEC,GACD,SAASC,iBACPC,KAAa,EACbxB,MAAc,EACdyB,UAAkB;IASlB,oBAAoB;IACpB,IAAMC,qBAAqBF,KAAK,CAACxB,OAAO;IACxC,IAAI0B,uBAAuB,GAAG;QAC5B,MAAM,IAAInB,MAAM;IAClB;IACA,IAAMoB,kBAAkB,AAACD,CAAAA,qBAAqB,CAAA,IAAK;IAEnD,qBAAqB;IACrB,IAAME,mBAAmB5B;IACzBA,UAAU,iBAAiB;IAE3B,IAAM6B,aAAaL,KAAK,CAACxB,SAAS;IAClC,IAAM8B,aAAa,AAACD,CAAAA,aAAa,IAAG,IAAK;IACzC,IAAME,oBAAoB,AAACF,CAAAA,aAAa,IAAG,MAAO;IAClD,IAAMG,sBAAsB,AAACH,CAAAA,aAAa,IAAG,MAAO;IAEpD,sBAAsB;IACtB,IAAIE,mBAAmB;QACrB,IAAME,SAAS7B,gBAAgBoB,OAAOxB;QACtCA,UAAUiC,OAAOzB,SAAS;IAC5B;IAEA,IAAIwB,qBAAqB;QACvB,IAAMC,UAAS7B,gBAAgBoB,OAAOxB;QACtCA,UAAUiC,QAAOzB,SAAS;IAC5B;IAEA,oBAAoB;IACpB,IAAM0B,UAAwB,EAAE;IAChC,IAAIC,aAA4B;IAEhC,IAAK,IAAIhC,IAAI,GAAGA,IAAI2B,YAAY3B,IAAK;QACnC,IAAMiC,iBAAiBhC,gBAAgBoB,OAAOxB;QAC9C,IAAMqC,WAAWD,eAAe/B,KAAK;QACrCL,UAAUoC,eAAe5B,SAAS;QAElC,IAAM8B,kBAAkBlC,gBAAgBoB,OAAOxB;QAC/CA,UAAUsC,gBAAgB9B,SAAS;QAEnC,IAAM+B,cAAcf,MAAMgB,KAAK,CAACxC,QAAQA,SAASsC,gBAAgBjC,KAAK;QACtEL,UAAUsC,gBAAgBjC,KAAK;QAE/B,IAAIgC,aAAaxC,cAAc;YAC7B,gCAAgC;YAChCsC,aAAaI;QACf,OAAO,IAAIF,aAAahD,gBAAiBgD,YAAY/C,kBAAkB+C,YAAYzC,kBAAmB;YACpG,qDAAqD;YACrDsC,QAAQO,IAAI,CAAC;gBAAE7B,IAAIyB;gBAAUvB,OAAOyB;YAAY;QAClD,OAAO;YACL,MAAM,IAAIhC,MAAM,AAAC,yBAA8C,OAAtB8B,SAASf,QAAQ,CAAC;QAC7D;IACF;IAEA,IAAI,CAACa,YAAY;QACf,MAAM,IAAI5B,MAAM;IAClB;IAEA,2DAA2D;IAC3D,IAAMmC,iBAAiBd,mBAAmBD;IAE1C,OAAO;QACLO,SAAAA;QACAC,YAAAA;QACAQ,YAAYhB;QACZiB,WAAWF;QACXG,SAASrB,MAAMtB,MAAM;QACrB4C,YAAYJ;IACd;AACF;AAEA;;;;;;CAMC,GACD,SAASK,WACPvB,KAAa,EACbwB,UAAkB,EAClBC,SAAiB;IAMjB,IAAIjD,SAASgD;IAEb,yBAAyB;IACzB,IAAIxB,KAAK,CAACxB,OAAO,KAAK,MAAM;QAC1B,MAAM,IAAIO,MAAM;IAClB;IACAP;IAEA,oBAAoB;IACpB,IAAMkD,cAAc9C,gBAAgBoB,OAAOxB;IAC3C,IAAMmD,cAAcD,YAAY7C,KAAK;IACrCL,UAAUkD,YAAY1C,SAAS;IAE/B,IAAM4C,UAKD,EAAE;IAEP,oBAAoB;IACpB,IAAK,IAAIjD,IAAI,GAAGA,IAAIgD,aAAahD,IAAK;QACpC,mDAAmD;QACnD,IAAMkD,iBAAiBjD,gBAAgBoB,OAAOxB;QAC9CA,UAAUqD,eAAe7C,SAAS;QAElC,oBAAoB;QACpB,IAAM8C,qBAAqBlD,gBAAgBoB,OAAOxB;QAClDA,UAAUsD,mBAAmB9C,SAAS;QAEtC4C,QAAQX,IAAI,CAAC;YACXc,eAAe;YACfC,cAAcH,eAAehD,KAAK;YAClCoD,oBAAoB;YACpBC,kBAAkBJ,mBAAmBjD,KAAK;QAC5C;IACF;IAEA,uDAAuD;IACvD,IAAIsD,aAAa,IAAI,sBAAsB;IAC3C,IAAK,IAAIxD,KAAI,GAAGA,KAAIiD,QAAQlD,MAAM,EAAEC,KAAK;QACvC,IAAMyD,SAASR,OAAO,CAACjD,GAAE;QACzB,0CAA0C;QAC1CyD,OAAOL,aAAa,GAAGI;QAEvB,6CAA6C;QAC7C,IAAME,gBAAgBrC,KAAK,CAACmC,WAAW;QACvC,IAAMhB,aAAa,AAACkB,CAAAA,gBAAgB,CAAA,IAAK;QAEzC,oDAAoD;QACpD,6DAA6D;QAC7DD,OAAOH,kBAAkB,GAAGG,OAAOJ,YAAY,GAAGb,aAAaM;QAE/D,gEAAgE;QAChE,IAAMa,aAAaC,KAAKC,IAAI,CAACJ,OAAOJ,YAAY,GAAG,KAAK;QACxDG,cAAcG;IAChB;IAEA,OAAOV;AACT;AAQO,SAASlE,SAASsC,KAAa;;QA8ClC,IAAMoC,SAASK,YAAY,CAAC9D,EAAE;QAC9B,IAAM+D,cAAcN,OAAOL,aAAa;QAExC,qBAAqB;QACrB,IAAMY,YAAY5C,iBAAiBC,OAAO0C,aAAajB;QAEvD,yCAAyC;QACzC,IAAML,YAAYsB,cAAcC,UAAUxB,UAAU;QACpD,yFAAyF;QACzF,IAAME,UAAUD,YAAYgB,OAAOH,kBAAkB;QAErD,0EAA0E;QAC1E,yEAAyE;QACzE,oEAAoE;QACpE,IAAMW,iBAAiB5C,MAAMgB,KAAK,CAACI,WAAWC;QAE9C,mCAAmC;QACnC,IAAMwB,cAAwB,EAAE;QAChCC,IAAAA,oBAAW,EAACF,gBAAgBD,UAAUhC,UAAU,EAAEyB,OAAOF,gBAAgB,EAAE;YACzEa,OAAO,SAACC;gBACNH,YAAY5B,IAAI,CAAC+B;YACnB;QACF;QAEA,2BAA2B;QAC3B,IAAIC,cAAcC,OAAOC,MAAM,CAACN;QAEhC,+EAA+E;QAC/E,oEAAoE;QACpE,0CAA0C;QAC1C,IAAK,IAAIO,IAAIT,UAAUjC,OAAO,CAAChC,MAAM,GAAG,GAAG0E,KAAK,GAAGA,IAAK;YACtDH,cAAchE,YAAYgE,aAAaN,UAAUjC,OAAO,CAAC0C,EAAE;QAC7D;QAEAC,aAAapC,IAAI,CAACgC;QAClBK,oBAAoBL,YAAYvE,MAAM;IACxC;QAlEkB6E;IAflB,kBAAkB;IAClB,IAAIvD,MAAMtB,MAAM,GAAG,MAAM,CAACJ,aAAa0B,OAAO,GAAGrC,WAAW;QAC1D,MAAM,IAAIoB,MAAM;IAClB;IAEA,6BAA6B;IAC7B,IAAMyE,YAAYxD,KAAK,CAAC,EAAE,GAAG;IAE7B,kCAAkC;IAClC,IAAMuD,aAAwC;QAC5C,GAAG;QACH,GAAG;QACH,GAAG;QACH,IAAI;IACN;IACA,IAAM9B,aAAY8B,wBAAAA,UAAU,CAACC,UAAU,cAArBD,mCAAAA,wBAAyB;IAE3C,2EAA2E;IAC3E,6CAA6C;IAC7C,IAAIE,YAAYzD,MAAMtB,MAAM;IAC5B,MAAO+E,YAAY,MAAMzD,KAAK,CAACyD,YAAY,EAAE,KAAK,KAAM;QACtDA;IACF;IACA,kDAAkD;IAClD,MAAOA,YAAY,MAAM,KAAKA,YAAY,GAAI;QAC5CA;IACF;IAEA,yCAAyC;IACzC,IAAI,CAACnF,aAAa0B,OAAOyD,YAAY,GAAG7F,kBAAkB;QACxD,MAAM,IAAImB,MAAM;IAClB;IAEA,qEAAqE;IACrE,IAAM2E,eAAe,AAAC1D,CAAAA,MAAM2D,YAAY,CAACF,YAAY,KAAK,CAAA,IAAK;IAC/D,IAAMjC,aAAaiC,YAAY,KAAKC;IAEpC,uCAAuC;IACvC,IAAMjB,eAAelB,WAAWvB,OAAOwB,YAAYC;IAEnD,wBAAwB;IACxB,IAAM4B,eAAyB,EAAE;IACjC,IAAIC,mBAAmB;IAEvB,IAAK,IAAI3E,IAAI,GAAGA,IAAI8D,aAAa/D,MAAM,EAAEC;IAuCzC,OAAOuE,OAAOC,MAAM,CAACE;AACvB;AAMO,SAAS5F;IACd,IAAMmG,SAAmB,EAAE;IAE3B,OAAO,IAAIC,8BAAS,CAAC;QACnBC,WAAAA,SAAAA,UAAUd,KAAa,EAAEe,SAAiB,EAAEC,QAAwC;YAClFJ,OAAO3C,IAAI,CAAC+B;YACZgB;QACF;QAEAC,OAAAA,SAAAA,MAAMD,QAAwC;YAC5C,IAAI;gBACF,IAAMhE,QAAQkD,OAAOC,MAAM,CAACS;gBAC5B,IAAMM,SAASxG,SAASsC;gBACxB,IAAI,CAACiB,IAAI,CAACiD;gBACVF;YACF,EAAE,OAAOG,KAAK;gBACZH,SAASG;YACX;QACF;IACF;AACF"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/xz/Decoder.ts"],"sourcesContent":["/**\n * XZ Decompression Module\n *\n * XZ is a container format that wraps LZMA2 compressed data.\n * This module provides both synchronous and streaming XZ decoders.\n *\n * Pure JavaScript implementation, works on Node.js 0.8+\n *\n * IMPORTANT: Buffer Management Pattern\n *\n * When calling decodeLzma2(), use the direct return pattern:\n *\n * ✅ CORRECT - Fast path:\n * const output = decodeLzma2(data, props, size) as Buffer;\n *\n * ❌ WRONG - Slow path (do NOT buffer):\n * const chunks: Buffer[] = [];\n * decodeLzma2(data, props, size, { write: c => chunks.push(c) });\n * return Buffer.concat(chunks); // ← Unnecessary copies!\n */\n\nimport { Transform } from 'extract-base-iterator';\nimport type { Transform as TransformType } from 'stream';\nimport { decodeBcj } from '../filters/bcj/Bcj.ts';\nimport { decodeBcjArm } from '../filters/bcj/BcjArm.ts';\nimport { decodeBcjArm64 } from '../filters/bcj/BcjArm64.ts';\nimport { decodeBcjArmt } from '../filters/bcj/BcjArmt.ts';\nimport { decodeBcjIa64 } from '../filters/bcj/BcjIa64.ts';\nimport { decodeBcjPpc } from '../filters/bcj/BcjPpc.ts';\nimport { decodeBcjSparc } from '../filters/bcj/BcjSparc.ts';\nimport { decodeDelta } from '../filters/delta/Delta.ts';\nimport { decodeLzma2 } from '../lzma/index.ts';\nimport { tryLoadNative } from '../native.ts';\n\n// XZ magic bytes\nconst XZ_MAGIC = [0xfd, 0x37, 0x7a, 0x58, 0x5a, 0x00];\nconst XZ_FOOTER_MAGIC = [0x59, 0x5a]; // \"YZ\"\n\n// Filter IDs (from XZ specification)\nconst FILTER_DELTA = 0x03;\nconst FILTER_BCJ_X86 = 0x04;\nconst FILTER_BCJ_PPC = 0x05;\nconst FILTER_BCJ_IA64 = 0x06;\nconst FILTER_BCJ_ARM = 0x07;\nconst FILTER_BCJ_ARMT = 0x08;\nconst FILTER_BCJ_SPARC = 0x09;\nconst FILTER_BCJ_ARM64 = 0x0a;\nconst FILTER_LZMA2 = 0x21;\n\n// Filter info for parsing\ninterface FilterInfo {\n id: number;\n props: Buffer;\n}\n\n/**\n * Simple buffer comparison\n */\nfunction bufferEquals(buf: Buffer, offset: number, expected: number[]): boolean {\n if (offset + expected.length > buf.length) {\n return false;\n }\n for (let i = 0; i < expected.length; i++) {\n if (buf[offset + i] !== expected[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Decode variable-length integer (XZ multibyte encoding)\n * Returns number, but limits to 32-bit to work on Node 0.8+\n */\nfunction decodeMultibyte(buf: Buffer, offset: number): { value: number; bytesRead: number } {\n let value = 0;\n let i = 0;\n let byte: number;\n do {\n if (offset + i >= buf.length) {\n throw new Error('Truncated multibyte integer');\n }\n byte = buf[offset + i];\n value |= (byte & 0x7f) << (i * 7);\n i++;\n if (i > 4) {\n // Reduced to prevent overflow on Node 0.8\n throw new Error('Multibyte integer too large');\n }\n } while (byte & 0x80);\n return { value, bytesRead: i };\n}\n\n/**\n * Apply a preprocessing filter (BCJ/Delta) to decompressed data\n */\nfunction applyFilter(data: Buffer, filter: FilterInfo): Buffer {\n switch (filter.id) {\n case FILTER_BCJ_X86:\n return decodeBcj(data, filter.props);\n case FILTER_BCJ_ARM:\n return decodeBcjArm(data, filter.props);\n case FILTER_BCJ_ARM64:\n return decodeBcjArm64(data, filter.props);\n case FILTER_BCJ_ARMT:\n return decodeBcjArmt(data, filter.props);\n case FILTER_BCJ_PPC:\n return decodeBcjPpc(data, filter.props);\n case FILTER_BCJ_SPARC:\n return decodeBcjSparc(data, filter.props);\n case FILTER_BCJ_IA64:\n return decodeBcjIa64(data, filter.props);\n case FILTER_DELTA:\n return decodeDelta(data, filter.props);\n default:\n throw new Error(`Unsupported filter: 0x${filter.id.toString(16)}`);\n }\n}\n\n/**\n * Parse XZ Block Header to extract filters and LZMA2 properties\n */\nfunction parseBlockHeader(\n input: Buffer,\n offset: number,\n _checkSize: number\n): {\n filters: FilterInfo[];\n lzma2Props: Buffer;\n headerSize: number;\n dataStart: number;\n dataEnd: number;\n nextOffset: number;\n} {\n // Block header size\n const blockHeaderSizeRaw = input[offset];\n if (blockHeaderSizeRaw === 0) {\n throw new Error('Invalid block header size (index indicator found instead of block)');\n }\n const blockHeaderSize = (blockHeaderSizeRaw + 1) * 4;\n\n // Parse block header\n const blockHeaderStart = offset;\n offset++; // skip size byte\n\n const blockFlags = input[offset++];\n const numFilters = (blockFlags & 0x03) + 1;\n const hasCompressedSize = (blockFlags & 0x40) !== 0;\n const hasUncompressedSize = (blockFlags & 0x80) !== 0;\n\n // Skip optional sizes\n if (hasCompressedSize) {\n const result = decodeMultibyte(input, offset);\n offset += result.bytesRead;\n }\n\n if (hasUncompressedSize) {\n const result = decodeMultibyte(input, offset);\n offset += result.bytesRead;\n }\n\n // Parse all filters\n const filters: FilterInfo[] = [];\n let lzma2Props: Buffer | null = null;\n\n for (let i = 0; i < numFilters; i++) {\n const filterIdResult = decodeMultibyte(input, offset);\n const filterId = filterIdResult.value;\n offset += filterIdResult.bytesRead;\n\n const propsSizeResult = decodeMultibyte(input, offset);\n offset += propsSizeResult.bytesRead;\n\n const filterProps = input.slice(offset, offset + propsSizeResult.value);\n offset += propsSizeResult.value;\n\n if (filterId === FILTER_LZMA2) {\n // LZMA2 must be the last filter\n lzma2Props = filterProps;\n } else if (filterId === FILTER_DELTA || (filterId >= FILTER_BCJ_X86 && filterId <= FILTER_BCJ_ARM64)) {\n // Preprocessing filter - store for later application\n filters.push({ id: filterId, props: filterProps });\n } else {\n throw new Error(`Unsupported filter: 0x${filterId.toString(16)}`);\n }\n }\n\n if (!lzma2Props) {\n throw new Error('No LZMA2 filter found in XZ block');\n }\n\n // Skip to end of block header (must be aligned to 4 bytes)\n const blockDataStart = blockHeaderStart + blockHeaderSize;\n\n return {\n filters,\n lzma2Props,\n headerSize: blockHeaderSize,\n dataStart: blockDataStart,\n dataEnd: input.length,\n nextOffset: blockDataStart,\n };\n}\n\n/**\n * Parse XZ Index to get block positions\n *\n * XZ Index stores \"Unpadded Size\" for each block which equals:\n * Block Header Size + Compressed Data Size + Check Size\n * (does NOT include padding to 4-byte boundary)\n */\nfunction parseIndex(\n input: Buffer,\n indexStart: number,\n checkSize: number\n): Array<{\n compressedPos: number;\n compressedDataSize: number;\n uncompressedSize: number;\n}> {\n let offset = indexStart;\n\n // Index indicator (0x00)\n if (input[offset] !== 0x00) {\n throw new Error('Invalid index indicator');\n }\n offset++;\n\n // Number of records\n const countResult = decodeMultibyte(input, offset);\n const recordCount = countResult.value;\n offset += countResult.bytesRead;\n\n const records: Array<{\n compressedPos: number;\n unpaddedSize: number;\n compressedDataSize: number;\n uncompressedSize: number;\n }> = [];\n\n // Parse each record\n for (let i = 0; i < recordCount; i++) {\n // Unpadded Size (header + compressed data + check)\n const unpaddedResult = decodeMultibyte(input, offset);\n offset += unpaddedResult.bytesRead;\n\n // Uncompressed size\n const uncompressedResult = decodeMultibyte(input, offset);\n offset += uncompressedResult.bytesRead;\n\n records.push({\n compressedPos: 0, // will be calculated\n unpaddedSize: unpaddedResult.value,\n compressedDataSize: 0, // will be calculated\n uncompressedSize: uncompressedResult.value,\n });\n }\n\n // Calculate actual positions by walking through blocks\n let currentPos = 12; // After stream header\n for (let i = 0; i < records.length; i++) {\n const record = records[i];\n // Record where this block's header starts\n record.compressedPos = currentPos;\n\n // Get block header size from the actual data\n const headerSizeRaw = input[currentPos];\n const headerSize = (headerSizeRaw + 1) * 4;\n\n // Calculate compressed data size from unpadded size\n // unpaddedSize = headerSize + compressedDataSize + checkSize\n record.compressedDataSize = record.unpaddedSize - headerSize - checkSize;\n\n // Move to next block: unpaddedSize + padding to 4-byte boundary\n const paddedSize = Math.ceil(record.unpaddedSize / 4) * 4;\n currentPos += paddedSize;\n }\n\n return records;\n}\n\n/**\n * Decompress XZ data synchronously\n * Uses @napi-rs/lzma if available on Node 14+, falls back to pure JS\n * Properly handles multi-block XZ files and stream padding\n * @param input - XZ compressed data\n * @returns Decompressed data\n */\nexport function decodeXZ(input: Buffer): Buffer {\n // Try native acceleration first (Node 14+ with @napi-rs/lzma installed)\n const native = tryLoadNative();\n if (native) {\n return native.xz.decompressSync(input);\n }\n\n // Verify XZ magic\n if (input.length < 12 || !bufferEquals(input, 0, XZ_MAGIC)) {\n throw new Error('Invalid XZ magic bytes');\n }\n\n // Stream flags at offset 6-7\n const checkType = input[7] & 0x0f;\n\n // Check sizes based on check type\n const checkSizes: { [key: number]: number } = {\n 0: 0, // None\n 1: 4, // CRC32\n 4: 8, // CRC64\n 10: 32, // SHA-256\n };\n const checkSize = checkSizes[checkType] ?? 0;\n\n // Find footer by skipping stream padding (null bytes at end before footer)\n // Stream padding must be multiple of 4 bytes\n let footerEnd = input.length;\n while (footerEnd > 12 && input[footerEnd - 1] === 0x00) {\n footerEnd--;\n }\n // Align to 4-byte boundary (stream padding rules)\n while (footerEnd % 4 !== 0 && footerEnd > 12) {\n footerEnd++;\n }\n\n // Verify footer magic (at footerEnd - 2)\n if (!bufferEquals(input, footerEnd - 2, XZ_FOOTER_MAGIC)) {\n throw new Error('Invalid XZ footer magic');\n }\n\n // Get backward size (tells us where index starts) - at footerEnd - 8\n const backwardSize = (input.readUInt32LE(footerEnd - 8) + 1) * 4;\n const indexStart = footerEnd - 12 - backwardSize;\n\n // Parse Index to get block information\n const blockRecords = parseIndex(input, indexStart, checkSize);\n\n // Decompress each block\n const outputChunks: Buffer[] = [];\n let _totalOutputSize = 0;\n\n for (let i = 0; i < blockRecords.length; i++) {\n const record = blockRecords[i];\n const recordStart = record.compressedPos;\n\n // Parse block header\n const blockInfo = parseBlockHeader(input, recordStart, checkSize);\n\n // Extract compressed data for this block\n const dataStart = recordStart + blockInfo.headerSize;\n // compressedDataSize is calculated from the Index's Unpadded Size minus header and check\n const dataEnd = dataStart + record.compressedDataSize;\n\n // Note: XZ blocks have padding AFTER the check field to align to 4 bytes,\n // but the compressedSize from index is exact - no need to strip padding.\n // LZMA2 data includes a 0x00 end marker which must NOT be stripped.\n const compressedData = input.slice(dataStart, dataEnd);\n\n // Decompress this block with LZMA2 (fast path, no buffering)\n let blockOutput = decodeLzma2(compressedData, blockInfo.lzma2Props, record.uncompressedSize) as Buffer;\n\n // Apply preprocessing filters in reverse order (BCJ/Delta applied after LZMA2)\n // Filters are stored in order they were applied during compression,\n // so we need to reverse for decompression\n for (let j = blockInfo.filters.length - 1; j >= 0; j--) {\n blockOutput = applyFilter(blockOutput, blockInfo.filters[j]) as Buffer;\n }\n\n outputChunks.push(blockOutput);\n _totalOutputSize += blockOutput.length;\n }\n\n return Buffer.concat(outputChunks);\n}\n\n/**\n * Create an XZ decompression Transform stream\n * @returns Transform stream that decompresses XZ data\n */\nexport function createXZDecoder(): TransformType {\n const chunks: Buffer[] = [];\n\n return new Transform({\n transform(chunk: Buffer, _encoding: string, callback: (error?: Error | null) => void) {\n chunks.push(chunk);\n callback();\n },\n\n flush(callback: (error?: Error | null) => void) {\n try {\n const input = Buffer.concat(chunks);\n const output = decodeXZ(input);\n this.push(output);\n callback();\n } catch (err) {\n callback(err as Error);\n }\n },\n });\n}\n"],"names":["createXZDecoder","decodeXZ","XZ_MAGIC","XZ_FOOTER_MAGIC","FILTER_DELTA","FILTER_BCJ_X86","FILTER_BCJ_PPC","FILTER_BCJ_IA64","FILTER_BCJ_ARM","FILTER_BCJ_ARMT","FILTER_BCJ_SPARC","FILTER_BCJ_ARM64","FILTER_LZMA2","bufferEquals","buf","offset","expected","length","i","decodeMultibyte","value","byte","Error","bytesRead","applyFilter","data","filter","id","decodeBcj","props","decodeBcjArm","decodeBcjArm64","decodeBcjArmt","decodeBcjPpc","decodeBcjSparc","decodeBcjIa64","decodeDelta","toString","parseBlockHeader","input","_checkSize","blockHeaderSizeRaw","blockHeaderSize","blockHeaderStart","blockFlags","numFilters","hasCompressedSize","hasUncompressedSize","result","filters","lzma2Props","filterIdResult","filterId","propsSizeResult","filterProps","slice","push","blockDataStart","headerSize","dataStart","dataEnd","nextOffset","parseIndex","indexStart","checkSize","countResult","recordCount","records","unpaddedResult","uncompressedResult","compressedPos","unpaddedSize","compressedDataSize","uncompressedSize","currentPos","record","headerSizeRaw","paddedSize","Math","ceil","checkSizes","native","tryLoadNative","xz","decompressSync","checkType","footerEnd","backwardSize","readUInt32LE","blockRecords","outputChunks","_totalOutputSize","recordStart","blockInfo","compressedData","blockOutput","decodeLzma2","j","Buffer","concat","chunks","Transform","transform","chunk","_encoding","callback","flush","output","err"],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;CAmBC;;;;;;;;;;;QAsWeA;eAAAA;;QAzFAC;eAAAA;;;mCA3QU;qBAEA;wBACG;0BACE;yBACD;yBACA;wBACD;0BACE;uBACH;uBACA;wBACE;AAE9B,iBAAiB;AACjB,IAAMC,WAAW;IAAC;IAAM;IAAM;IAAM;IAAM;IAAM;CAAK;AACrD,IAAMC,kBAAkB;IAAC;IAAM;CAAK,EAAE,OAAO;AAE7C,qCAAqC;AACrC,IAAMC,eAAe;AACrB,IAAMC,iBAAiB;AACvB,IAAMC,iBAAiB;AACvB,IAAMC,kBAAkB;AACxB,IAAMC,iBAAiB;AACvB,IAAMC,kBAAkB;AACxB,IAAMC,mBAAmB;AACzB,IAAMC,mBAAmB;AACzB,IAAMC,eAAe;AAQrB;;CAEC,GACD,SAASC,aAAaC,GAAW,EAAEC,MAAc,EAAEC,QAAkB;IACnE,IAAID,SAASC,SAASC,MAAM,GAAGH,IAAIG,MAAM,EAAE;QACzC,OAAO;IACT;IACA,IAAK,IAAIC,IAAI,GAAGA,IAAIF,SAASC,MAAM,EAAEC,IAAK;QACxC,IAAIJ,GAAG,CAACC,SAASG,EAAE,KAAKF,QAAQ,CAACE,EAAE,EAAE;YACnC,OAAO;QACT;IACF;IACA,OAAO;AACT;AAEA;;;CAGC,GACD,SAASC,gBAAgBL,GAAW,EAAEC,MAAc;IAClD,IAAIK,QAAQ;IACZ,IAAIF,IAAI;IACR,IAAIG;IACJ,GAAG;QACD,IAAIN,SAASG,KAAKJ,IAAIG,MAAM,EAAE;YAC5B,MAAM,IAAIK,MAAM;QAClB;QACAD,OAAOP,GAAG,CAACC,SAASG,EAAE;QACtBE,SAAS,AAACC,CAAAA,OAAO,IAAG,KAAOH,IAAI;QAC/BA;QACA,IAAIA,IAAI,GAAG;YACT,0CAA0C;YAC1C,MAAM,IAAII,MAAM;QAClB;IACF,QAASD,OAAO,MAAM;IACtB,OAAO;QAAED,OAAAA;QAAOG,WAAWL;IAAE;AAC/B;AAEA;;CAEC,GACD,SAASM,YAAYC,IAAY,EAAEC,MAAkB;IACnD,OAAQA,OAAOC,EAAE;QACf,KAAKtB;YACH,OAAOuB,IAAAA,gBAAS,EAACH,MAAMC,OAAOG,KAAK;QACrC,KAAKrB;YACH,OAAOsB,IAAAA,sBAAY,EAACL,MAAMC,OAAOG,KAAK;QACxC,KAAKlB;YACH,OAAOoB,IAAAA,0BAAc,EAACN,MAAMC,OAAOG,KAAK;QAC1C,KAAKpB;YACH,OAAOuB,IAAAA,wBAAa,EAACP,MAAMC,OAAOG,KAAK;QACzC,KAAKvB;YACH,OAAO2B,IAAAA,sBAAY,EAACR,MAAMC,OAAOG,KAAK;QACxC,KAAKnB;YACH,OAAOwB,IAAAA,0BAAc,EAACT,MAAMC,OAAOG,KAAK;QAC1C,KAAKtB;YACH,OAAO4B,IAAAA,wBAAa,EAACV,MAAMC,OAAOG,KAAK;QACzC,KAAKzB;YACH,OAAOgC,IAAAA,oBAAW,EAACX,MAAMC,OAAOG,KAAK;QACvC;YACE,MAAM,IAAIP,MAAM,AAAC,yBAA+C,OAAvBI,OAAOC,EAAE,CAACU,QAAQ,CAAC;IAChE;AACF;AAEA;;CAEC,GACD,SAASC,iBACPC,KAAa,EACbxB,MAAc,EACdyB,UAAkB;IASlB,oBAAoB;IACpB,IAAMC,qBAAqBF,KAAK,CAACxB,OAAO;IACxC,IAAI0B,uBAAuB,GAAG;QAC5B,MAAM,IAAInB,MAAM;IAClB;IACA,IAAMoB,kBAAkB,AAACD,CAAAA,qBAAqB,CAAA,IAAK;IAEnD,qBAAqB;IACrB,IAAME,mBAAmB5B;IACzBA,UAAU,iBAAiB;IAE3B,IAAM6B,aAAaL,KAAK,CAACxB,SAAS;IAClC,IAAM8B,aAAa,AAACD,CAAAA,aAAa,IAAG,IAAK;IACzC,IAAME,oBAAoB,AAACF,CAAAA,aAAa,IAAG,MAAO;IAClD,IAAMG,sBAAsB,AAACH,CAAAA,aAAa,IAAG,MAAO;IAEpD,sBAAsB;IACtB,IAAIE,mBAAmB;QACrB,IAAME,SAAS7B,gBAAgBoB,OAAOxB;QACtCA,UAAUiC,OAAOzB,SAAS;IAC5B;IAEA,IAAIwB,qBAAqB;QACvB,IAAMC,UAAS7B,gBAAgBoB,OAAOxB;QACtCA,UAAUiC,QAAOzB,SAAS;IAC5B;IAEA,oBAAoB;IACpB,IAAM0B,UAAwB,EAAE;IAChC,IAAIC,aAA4B;IAEhC,IAAK,IAAIhC,IAAI,GAAGA,IAAI2B,YAAY3B,IAAK;QACnC,IAAMiC,iBAAiBhC,gBAAgBoB,OAAOxB;QAC9C,IAAMqC,WAAWD,eAAe/B,KAAK;QACrCL,UAAUoC,eAAe5B,SAAS;QAElC,IAAM8B,kBAAkBlC,gBAAgBoB,OAAOxB;QAC/CA,UAAUsC,gBAAgB9B,SAAS;QAEnC,IAAM+B,cAAcf,MAAMgB,KAAK,CAACxC,QAAQA,SAASsC,gBAAgBjC,KAAK;QACtEL,UAAUsC,gBAAgBjC,KAAK;QAE/B,IAAIgC,aAAaxC,cAAc;YAC7B,gCAAgC;YAChCsC,aAAaI;QACf,OAAO,IAAIF,aAAahD,gBAAiBgD,YAAY/C,kBAAkB+C,YAAYzC,kBAAmB;YACpG,qDAAqD;YACrDsC,QAAQO,IAAI,CAAC;gBAAE7B,IAAIyB;gBAAUvB,OAAOyB;YAAY;QAClD,OAAO;YACL,MAAM,IAAIhC,MAAM,AAAC,yBAA8C,OAAtB8B,SAASf,QAAQ,CAAC;QAC7D;IACF;IAEA,IAAI,CAACa,YAAY;QACf,MAAM,IAAI5B,MAAM;IAClB;IAEA,2DAA2D;IAC3D,IAAMmC,iBAAiBd,mBAAmBD;IAE1C,OAAO;QACLO,SAAAA;QACAC,YAAAA;QACAQ,YAAYhB;QACZiB,WAAWF;QACXG,SAASrB,MAAMtB,MAAM;QACrB4C,YAAYJ;IACd;AACF;AAEA;;;;;;CAMC,GACD,SAASK,WACPvB,KAAa,EACbwB,UAAkB,EAClBC,SAAiB;IAMjB,IAAIjD,SAASgD;IAEb,yBAAyB;IACzB,IAAIxB,KAAK,CAACxB,OAAO,KAAK,MAAM;QAC1B,MAAM,IAAIO,MAAM;IAClB;IACAP;IAEA,oBAAoB;IACpB,IAAMkD,cAAc9C,gBAAgBoB,OAAOxB;IAC3C,IAAMmD,cAAcD,YAAY7C,KAAK;IACrCL,UAAUkD,YAAY1C,SAAS;IAE/B,IAAM4C,UAKD,EAAE;IAEP,oBAAoB;IACpB,IAAK,IAAIjD,IAAI,GAAGA,IAAIgD,aAAahD,IAAK;QACpC,mDAAmD;QACnD,IAAMkD,iBAAiBjD,gBAAgBoB,OAAOxB;QAC9CA,UAAUqD,eAAe7C,SAAS;QAElC,oBAAoB;QACpB,IAAM8C,qBAAqBlD,gBAAgBoB,OAAOxB;QAClDA,UAAUsD,mBAAmB9C,SAAS;QAEtC4C,QAAQX,IAAI,CAAC;YACXc,eAAe;YACfC,cAAcH,eAAehD,KAAK;YAClCoD,oBAAoB;YACpBC,kBAAkBJ,mBAAmBjD,KAAK;QAC5C;IACF;IAEA,uDAAuD;IACvD,IAAIsD,aAAa,IAAI,sBAAsB;IAC3C,IAAK,IAAIxD,KAAI,GAAGA,KAAIiD,QAAQlD,MAAM,EAAEC,KAAK;QACvC,IAAMyD,SAASR,OAAO,CAACjD,GAAE;QACzB,0CAA0C;QAC1CyD,OAAOL,aAAa,GAAGI;QAEvB,6CAA6C;QAC7C,IAAME,gBAAgBrC,KAAK,CAACmC,WAAW;QACvC,IAAMhB,aAAa,AAACkB,CAAAA,gBAAgB,CAAA,IAAK;QAEzC,oDAAoD;QACpD,6DAA6D;QAC7DD,OAAOH,kBAAkB,GAAGG,OAAOJ,YAAY,GAAGb,aAAaM;QAE/D,gEAAgE;QAChE,IAAMa,aAAaC,KAAKC,IAAI,CAACJ,OAAOJ,YAAY,GAAG,KAAK;QACxDG,cAAcG;IAChB;IAEA,OAAOV;AACT;AASO,SAASlE,SAASsC,KAAa;QAsBlByC;IArBlB,wEAAwE;IACxE,IAAMC,SAASC,IAAAA,uBAAa;IAC5B,IAAID,QAAQ;QACV,OAAOA,OAAOE,EAAE,CAACC,cAAc,CAAC7C;IAClC;IAEA,kBAAkB;IAClB,IAAIA,MAAMtB,MAAM,GAAG,MAAM,CAACJ,aAAa0B,OAAO,GAAGrC,WAAW;QAC1D,MAAM,IAAIoB,MAAM;IAClB;IAEA,6BAA6B;IAC7B,IAAM+D,YAAY9C,KAAK,CAAC,EAAE,GAAG;IAE7B,kCAAkC;IAClC,IAAMyC,aAAwC;QAC5C,GAAG;QACH,GAAG;QACH,GAAG;QACH,IAAI;IACN;IACA,IAAMhB,aAAYgB,wBAAAA,UAAU,CAACK,UAAU,cAArBL,mCAAAA,wBAAyB;IAE3C,2EAA2E;IAC3E,6CAA6C;IAC7C,IAAIM,YAAY/C,MAAMtB,MAAM;IAC5B,MAAOqE,YAAY,MAAM/C,KAAK,CAAC+C,YAAY,EAAE,KAAK,KAAM;QACtDA;IACF;IACA,kDAAkD;IAClD,MAAOA,YAAY,MAAM,KAAKA,YAAY,GAAI;QAC5CA;IACF;IAEA,yCAAyC;IACzC,IAAI,CAACzE,aAAa0B,OAAO+C,YAAY,GAAGnF,kBAAkB;QACxD,MAAM,IAAImB,MAAM;IAClB;IAEA,qEAAqE;IACrE,IAAMiE,eAAe,AAAChD,CAAAA,MAAMiD,YAAY,CAACF,YAAY,KAAK,CAAA,IAAK;IAC/D,IAAMvB,aAAauB,YAAY,KAAKC;IAEpC,uCAAuC;IACvC,IAAME,eAAe3B,WAAWvB,OAAOwB,YAAYC;IAEnD,wBAAwB;IACxB,IAAM0B,eAAyB,EAAE;IACjC,IAAIC,mBAAmB;IAEvB,IAAK,IAAIzE,IAAI,GAAGA,IAAIuE,aAAaxE,MAAM,EAAEC,IAAK;QAC5C,IAAMyD,SAASc,YAAY,CAACvE,EAAE;QAC9B,IAAM0E,cAAcjB,OAAOL,aAAa;QAExC,qBAAqB;QACrB,IAAMuB,YAAYvD,iBAAiBC,OAAOqD,aAAa5B;QAEvD,yCAAyC;QACzC,IAAML,YAAYiC,cAAcC,UAAUnC,UAAU;QACpD,yFAAyF;QACzF,IAAME,UAAUD,YAAYgB,OAAOH,kBAAkB;QAErD,0EAA0E;QAC1E,yEAAyE;QACzE,oEAAoE;QACpE,IAAMsB,iBAAiBvD,MAAMgB,KAAK,CAACI,WAAWC;QAE9C,6DAA6D;QAC7D,IAAImC,cAAcC,IAAAA,oBAAW,EAACF,gBAAgBD,UAAU3C,UAAU,EAAEyB,OAAOF,gBAAgB;QAE3F,+EAA+E;QAC/E,oEAAoE;QACpE,0CAA0C;QAC1C,IAAK,IAAIwB,IAAIJ,UAAU5C,OAAO,CAAChC,MAAM,GAAG,GAAGgF,KAAK,GAAGA,IAAK;YACtDF,cAAcvE,YAAYuE,aAAaF,UAAU5C,OAAO,CAACgD,EAAE;QAC7D;QAEAP,aAAalC,IAAI,CAACuC;QAClBJ,oBAAoBI,YAAY9E,MAAM;IACxC;IAEA,OAAOiF,OAAOC,MAAM,CAACT;AACvB;AAMO,SAAS1F;IACd,IAAMoG,SAAmB,EAAE;IAE3B,OAAO,IAAIC,8BAAS,CAAC;QACnBC,WAAAA,SAAAA,UAAUC,KAAa,EAAEC,SAAiB,EAAEC,QAAwC;YAClFL,OAAO5C,IAAI,CAAC+C;YACZE;QACF;QAEAC,OAAAA,SAAAA,MAAMD,QAAwC;YAC5C,IAAI;gBACF,IAAMlE,QAAQ2D,OAAOC,MAAM,CAACC;gBAC5B,IAAMO,SAAS1G,SAASsC;gBACxB,IAAI,CAACiB,IAAI,CAACmD;gBACVF;YACF,EAAE,OAAOG,KAAK;gBACZH,SAASG;YACX;QACF;IACF;AACF"}
@@ -1,4 +1,14 @@
1
- export * from './filters/index.js';
2
- export type { OutputSink } from './lzma/index.js';
3
- export { createLzma2Decoder, createLzmaDecoder, decodeLzma, decodeLzma2, detectLzmaFormat, Lzma2Decoder, LzmaDecoder, } from './lzma/index.js';
1
+ /**
2
+ * XZ-Compat: XZ/LZMA Decompression Library
3
+ *
4
+ * Pure JavaScript implementation with optional native acceleration
5
+ * via @napi-rs/lzma on Node.js 14+.
6
+ *
7
+ * Works on Node.js 0.8+ with automatic performance optimization
8
+ * when native bindings are available.
9
+ */
10
+ export { decode7zLzma, decode7zLzma2 } from './sevenz.js';
4
11
  export { createXZDecoder, decodeXZ } from './xz/Decoder.js';
12
+ export { createLzma2Decoder, createLzmaDecoder, decodeLzma, decodeLzma2 } from './lzma/index.js';
13
+ export * from './filters/index.js';
14
+ export { isNativeAvailable } from './native.js';
package/dist/esm/index.js CHANGED
@@ -1,5 +1,27 @@
1
- // XZ and LZMA decoders for external use
2
- // Re-export filters for convenience
3
- export * from './filters/index.js';
4
- export { createLzma2Decoder, createLzmaDecoder, decodeLzma, decodeLzma2, detectLzmaFormat, Lzma2Decoder, LzmaDecoder } from './lzma/index.js';
1
+ /**
2
+ * XZ-Compat: XZ/LZMA Decompression Library
3
+ *
4
+ * Pure JavaScript implementation with optional native acceleration
5
+ * via @napi-rs/lzma on Node.js 14+.
6
+ *
7
+ * Works on Node.js 0.8+ with automatic performance optimization
8
+ * when native bindings are available.
9
+ */ // ============================================================================
10
+ // High-Level APIs (Recommended)
11
+ // ============================================================================
12
+ // 7z-specific decoders - accept properties separately, try native automatically
13
+ export { decode7zLzma, decode7zLzma2 } from './sevenz.js';
14
+ // XZ container format - self-describing, works great with native acceleration
5
15
  export { createXZDecoder, decodeXZ } from './xz/Decoder.js';
16
+ // ============================================================================
17
+ // Low-Level APIs (Backward Compatibility)
18
+ // ============================================================================
19
+ // Raw LZMA decoders (for specialized use cases)
20
+ export { createLzma2Decoder, createLzmaDecoder, decodeLzma, decodeLzma2 } from './lzma/index.js';
21
+ // ============================================================================
22
+ // Supporting APIs
23
+ // ============================================================================
24
+ // Preprocessing filters (BCJ/Delta - used by 7z-iterator)
25
+ export * from './filters/index.js';
26
+ // Native acceleration utilities
27
+ export { isNativeAvailable } from './native.js';
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/index.ts"],"sourcesContent":["// XZ and LZMA decoders for external use\n\n// Re-export filters for convenience\nexport * from './filters/index.ts';\nexport type { OutputSink } from './lzma/index.ts';\nexport {\n createLzma2Decoder,\n createLzmaDecoder,\n decodeLzma,\n decodeLzma2,\n detectLzmaFormat,\n Lzma2Decoder,\n LzmaDecoder,\n} from './lzma/index.ts';\nexport { createXZDecoder, decodeXZ } from './xz/Decoder.ts';\n"],"names":["createLzma2Decoder","createLzmaDecoder","decodeLzma","decodeLzma2","detectLzmaFormat","Lzma2Decoder","LzmaDecoder","createXZDecoder","decodeXZ"],"mappings":"AAAA,wCAAwC;AAExC,oCAAoC;AACpC,cAAc,qBAAqB;AAEnC,SACEA,kBAAkB,EAClBC,iBAAiB,EACjBC,UAAU,EACVC,WAAW,EACXC,gBAAgB,EAChBC,YAAY,EACZC,WAAW,QACN,kBAAkB;AACzB,SAASC,eAAe,EAAEC,QAAQ,QAAQ,kBAAkB"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/index.ts"],"sourcesContent":["/**\n * XZ-Compat: XZ/LZMA Decompression Library\n *\n * Pure JavaScript implementation with optional native acceleration\n * via @napi-rs/lzma on Node.js 14+.\n *\n * Works on Node.js 0.8+ with automatic performance optimization\n * when native bindings are available.\n */\n\n// ============================================================================\n// High-Level APIs (Recommended)\n// ============================================================================\n\n// 7z-specific decoders - accept properties separately, try native automatically\nexport { decode7zLzma, decode7zLzma2 } from './sevenz.ts';\n// XZ container format - self-describing, works great with native acceleration\nexport { createXZDecoder, decodeXZ } from './xz/Decoder.ts';\n\n// ============================================================================\n// Low-Level APIs (Backward Compatibility)\n// ============================================================================\n\n// Raw LZMA decoders (for specialized use cases)\nexport { createLzma2Decoder, createLzmaDecoder, decodeLzma, decodeLzma2 } from './lzma/index.ts';\n\n// ============================================================================\n// Supporting APIs\n// ============================================================================\n\n// Preprocessing filters (BCJ/Delta - used by 7z-iterator)\nexport * from './filters/index.ts';\n\n// Native acceleration utilities\nexport { isNativeAvailable } from './native.ts';\n"],"names":["decode7zLzma","decode7zLzma2","createXZDecoder","decodeXZ","createLzma2Decoder","createLzmaDecoder","decodeLzma","decodeLzma2","isNativeAvailable"],"mappings":"AAAA;;;;;;;;CAQC,GAED,+EAA+E;AAC/E,gCAAgC;AAChC,+EAA+E;AAE/E,gFAAgF;AAChF,SAASA,YAAY,EAAEC,aAAa,QAAQ,cAAc;AAC1D,8EAA8E;AAC9E,SAASC,eAAe,EAAEC,QAAQ,QAAQ,kBAAkB;AAE5D,+EAA+E;AAC/E,0CAA0C;AAC1C,+EAA+E;AAE/E,gDAAgD;AAChD,SAASC,kBAAkB,EAAEC,iBAAiB,EAAEC,UAAU,EAAEC,WAAW,QAAQ,kBAAkB;AAEjG,+EAA+E;AAC/E,kBAAkB;AAClB,+EAA+E;AAE/E,0DAA0D;AAC1D,cAAc,qBAAqB;AAEnC,gCAAgC;AAChC,SAASC,iBAAiB,QAAQ,cAAc"}
@@ -11,21 +11,5 @@
11
11
  * - LZMA1 has no chunk boundaries and requires buffering all input for streaming
12
12
  */
13
13
  export { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.js';
14
- export { decodeLzma2, Lzma2Decoder } from './sync/Lzma2Decoder.js';
15
- export { decodeLzma, LzmaDecoder } from './sync/LzmaDecoder.js';
16
- export { BitTreeDecoder, RangeDecoder } from './sync/RangeDecoder.js';
17
- export * from './types.js';
18
- /**
19
- * Detect LZMA format from compressed data
20
- *
21
- * LZMA2 uses chunk-based framing with control bytes:
22
- * - 0x00: End of stream
23
- * - 0x01-0x02: Uncompressed chunks
24
- * - 0x80-0xFF: LZMA compressed chunks
25
- *
26
- * LZMA1 is raw LZMA-compressed data (no framing)
27
- *
28
- * @param data - Compressed data to analyze
29
- * @returns 'lzma1' for LZMA1, 'lzma2' for LZMA2
30
- */
31
- export declare function detectLzmaFormat(data: Buffer): 'lzma1' | 'lzma2';
14
+ export { decodeLzma2 } from './sync/Lzma2Decoder.js';
15
+ export { decodeLzma } from './sync/LzmaDecoder.js';
@@ -11,34 +11,6 @@
11
11
  * - LZMA1 has no chunk boundaries and requires buffering all input for streaming
12
12
  */ // Streaming decoders (Transform streams)
13
13
  export { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.js';
14
- export { decodeLzma2, Lzma2Decoder } from './sync/Lzma2Decoder.js';
14
+ export { decodeLzma2 } from './sync/Lzma2Decoder.js';
15
15
  // Synchronous decoders (for Buffer input)
16
- export { decodeLzma, LzmaDecoder } from './sync/LzmaDecoder.js';
17
- export { BitTreeDecoder, RangeDecoder } from './sync/RangeDecoder.js';
18
- // Type exports
19
- export * from './types.js';
20
- /**
21
- * Detect LZMA format from compressed data
22
- *
23
- * LZMA2 uses chunk-based framing with control bytes:
24
- * - 0x00: End of stream
25
- * - 0x01-0x02: Uncompressed chunks
26
- * - 0x80-0xFF: LZMA compressed chunks
27
- *
28
- * LZMA1 is raw LZMA-compressed data (no framing)
29
- *
30
- * @param data - Compressed data to analyze
31
- * @returns 'lzma1' for LZMA1, 'lzma2' for LZMA2
32
- */ export function detectLzmaFormat(data) {
33
- if (data.length === 0) {
34
- // Default to LZMA2 for empty data (matches LZMA2 decoder behavior)
35
- return 'lzma2';
36
- }
37
- const firstByte = data[0];
38
- // LZMA2 control bytes: 0x00, 0x01, 0x02, or 0x80-0xFF
39
- if (firstByte === 0x00 || firstByte === 0x01 || firstByte === 0x02 || firstByte >= 0x80 && firstByte <= 0xff) {
40
- return 'lzma2';
41
- }
42
- // All other values indicate LZMA1 (raw LZMA data)
43
- return 'lzma1';
44
- }
16
+ export { decodeLzma } from './sync/LzmaDecoder.js';
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/index.ts"],"sourcesContent":["/**\n * LZMA Decoder Module\n *\n * Provides both synchronous and streaming LZMA1/LZMA2 decoders.\n *\n * Synchronous API: Use when input is a complete Buffer\n * Streaming API: Use with Transform streams for memory-efficient decompression\n *\n * LZMA1 vs LZMA2:\n * - LZMA2 is chunked and supports true streaming with bounded memory\n * - LZMA1 has no chunk boundaries and requires buffering all input for streaming\n */\n\n// Streaming decoders (Transform streams)\nexport { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.ts';\nexport { decodeLzma2, Lzma2Decoder } from './sync/Lzma2Decoder.ts';\n// Synchronous decoders (for Buffer input)\nexport { decodeLzma, LzmaDecoder } from './sync/LzmaDecoder.ts';\nexport { BitTreeDecoder, RangeDecoder } from './sync/RangeDecoder.ts';\n// Type exports\nexport * from './types.ts';\n\n/**\n * Detect LZMA format from compressed data\n *\n * LZMA2 uses chunk-based framing with control bytes:\n * - 0x00: End of stream\n * - 0x01-0x02: Uncompressed chunks\n * - 0x80-0xFF: LZMA compressed chunks\n *\n * LZMA1 is raw LZMA-compressed data (no framing)\n *\n * @param data - Compressed data to analyze\n * @returns 'lzma1' for LZMA1, 'lzma2' for LZMA2\n */\nexport function detectLzmaFormat(data: Buffer): 'lzma1' | 'lzma2' {\n if (data.length === 0) {\n // Default to LZMA2 for empty data (matches LZMA2 decoder behavior)\n return 'lzma2';\n }\n\n const firstByte = data[0];\n\n // LZMA2 control bytes: 0x00, 0x01, 0x02, or 0x80-0xFF\n if (firstByte === 0x00 || firstByte === 0x01 || firstByte === 0x02 || (firstByte >= 0x80 && firstByte <= 0xff)) {\n return 'lzma2';\n }\n\n // All other values indicate LZMA1 (raw LZMA data)\n return 'lzma1';\n}\n"],"names":["createLzma2Decoder","createLzmaDecoder","decodeLzma2","Lzma2Decoder","decodeLzma","LzmaDecoder","BitTreeDecoder","RangeDecoder","detectLzmaFormat","data","length","firstByte"],"mappings":"AAAA;;;;;;;;;;;CAWC,GAED,yCAAyC;AACzC,SAASA,kBAAkB,EAAEC,iBAAiB,QAAQ,yBAAyB;AAC/E,SAASC,WAAW,EAAEC,YAAY,QAAQ,yBAAyB;AACnE,0CAA0C;AAC1C,SAASC,UAAU,EAAEC,WAAW,QAAQ,wBAAwB;AAChE,SAASC,cAAc,EAAEC,YAAY,QAAQ,yBAAyB;AACtE,eAAe;AACf,cAAc,aAAa;AAE3B;;;;;;;;;;;;CAYC,GACD,OAAO,SAASC,iBAAiBC,IAAY;IAC3C,IAAIA,KAAKC,MAAM,KAAK,GAAG;QACrB,mEAAmE;QACnE,OAAO;IACT;IAEA,MAAMC,YAAYF,IAAI,CAAC,EAAE;IAEzB,sDAAsD;IACtD,IAAIE,cAAc,QAAQA,cAAc,QAAQA,cAAc,QAASA,aAAa,QAAQA,aAAa,MAAO;QAC9G,OAAO;IACT;IAEA,kDAAkD;IAClD,OAAO;AACT"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/index.ts"],"sourcesContent":["/**\n * LZMA Decoder Module\n *\n * Provides both synchronous and streaming LZMA1/LZMA2 decoders.\n *\n * Synchronous API: Use when input is a complete Buffer\n * Streaming API: Use with Transform streams for memory-efficient decompression\n *\n * LZMA1 vs LZMA2:\n * - LZMA2 is chunked and supports true streaming with bounded memory\n * - LZMA1 has no chunk boundaries and requires buffering all input for streaming\n */\n\n// Streaming decoders (Transform streams)\nexport { createLzma2Decoder, createLzmaDecoder } from './stream/transforms.ts';\nexport { decodeLzma2 } from './sync/Lzma2Decoder.ts';\n// Synchronous decoders (for Buffer input)\nexport { decodeLzma } from './sync/LzmaDecoder.ts';\n"],"names":["createLzma2Decoder","createLzmaDecoder","decodeLzma2","decodeLzma"],"mappings":"AAAA;;;;;;;;;;;CAWC,GAED,yCAAyC;AACzC,SAASA,kBAAkB,EAAEC,iBAAiB,QAAQ,yBAAyB;AAC/E,SAASC,WAAW,QAAQ,yBAAyB;AACrD,0CAA0C;AAC1C,SAASC,UAAU,QAAQ,wBAAwB"}
@@ -57,7 +57,9 @@ export declare class Lzma2Decoder {
57
57
  * @param input - LZMA2 compressed data
58
58
  * @param properties - 1-byte properties (dictionary size)
59
59
  * @param unpackSize - Expected output size (optional, autodetects if not provided)
60
- * @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)
60
+ * @param outputSink - Optional output sink with write callback for streaming (returns bytes written)
61
61
  * @returns Decompressed data (or bytes written if outputSink provided)
62
62
  */
63
- export declare function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number, outputSink?: OutputSink): Buffer | number;
63
+ export declare function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number, outputSink?: {
64
+ write(buffer: Buffer): void;
65
+ }): Buffer | number;
@@ -164,14 +164,15 @@ import { LzmaDecoder } from './LzmaDecoder.js';
164
164
  }
165
165
  // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)
166
166
  const useSolid = !chunk.stateReset || chunk.stateReset && !chunk.dictReset;
167
- // Decode LZMA chunk
168
- const chunkData = input.slice(dataOffset, dataOffset + chunk.compSize);
169
- const decoded = this.lzmaDecoder.decode(chunkData, 0, chunk.uncompSize, useSolid);
170
- // Copy to output
167
+ // Decode LZMA chunk - use zero-copy when we have pre-allocated buffer
171
168
  if (outputBuffer) {
172
- decoded.copy(outputBuffer, outputPos);
173
- outputPos += decoded.length;
169
+ // Zero-copy: decode directly into caller's buffer
170
+ const bytesWritten = this.lzmaDecoder.decodeToBuffer(input, dataOffset, chunk.uncompSize, outputBuffer, outputPos, useSolid);
171
+ outputPos += bytesWritten;
174
172
  } else {
173
+ // No pre-allocation: decode to new buffer and collect chunks
174
+ const chunkData = input.slice(dataOffset, dataOffset + chunk.compSize);
175
+ const decoded = this.lzmaDecoder.decode(chunkData, 0, chunk.uncompSize, useSolid);
175
176
  outputChunks.push(decoded);
176
177
  }
177
178
  offset = dataOffset + chunk.compSize;
@@ -198,7 +199,7 @@ import { LzmaDecoder } from './LzmaDecoder.js';
198
199
  * @param input - LZMA2 compressed data
199
200
  * @param properties - 1-byte properties (dictionary size)
200
201
  * @param unpackSize - Expected output size (optional, autodetects if not provided)
201
- * @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)
202
+ * @param outputSink - Optional output sink with write callback for streaming (returns bytes written)
202
203
  * @returns Decompressed data (or bytes written if outputSink provided)
203
204
  */ export function decodeLzma2(input, properties, unpackSize, outputSink) {
204
205
  const decoder = new Lzma2Decoder(properties, outputSink);
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/sync/Lzma2Decoder.ts"],"sourcesContent":["/**\n * Synchronous LZMA2 Decoder\n *\n * LZMA2 is a container format that wraps LZMA chunks with framing.\n * Decodes LZMA2 data from a buffer.\n */\n\nimport { allocBufferUnsafe } from 'extract-base-iterator';\nimport { parseLzma2ChunkHeader } from '../Lzma2ChunkParser.ts';\nimport { type OutputSink, parseLzma2DictionarySize } from '../types.ts';\nimport { LzmaDecoder } from './LzmaDecoder.ts';\n\n/**\n * Synchronous LZMA2 decoder\n */\nexport class Lzma2Decoder {\n private lzmaDecoder: LzmaDecoder;\n private dictionarySize: number;\n private propsSet: boolean;\n\n constructor(properties: Buffer | Uint8Array, outputSink?: OutputSink) {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n this.dictionarySize = parseLzma2DictionarySize(properties[0]);\n this.lzmaDecoder = new LzmaDecoder(outputSink);\n this.lzmaDecoder.setDictionarySize(this.dictionarySize);\n this.propsSet = false;\n }\n\n /**\n * Reset the dictionary (for stream boundaries)\n */\n resetDictionary(): void {\n this.lzmaDecoder.resetDictionary();\n }\n\n /**\n * Reset all probability models (for stream boundaries)\n */\n resetProbabilities(): void {\n this.lzmaDecoder.resetProbabilities();\n }\n\n /**\n * Set LZMA properties\n */\n setLcLpPb(lc: number, lp: number, pb: number): boolean {\n return this.lzmaDecoder.setLcLpPb(lc, lp, pb);\n }\n\n /**\n * Feed uncompressed data to the dictionary (for subsequent LZMA chunks)\n */\n feedUncompressed(data: Buffer): void {\n this.lzmaDecoder.feedUncompressed(data);\n }\n\n /**\n * Decode raw LZMA data (used internally for LZMA2 chunks)\n * @param input - LZMA compressed data\n * @param offset - Input offset\n * @param outSize - Expected output size\n * @param solid - Use solid mode\n * @returns Decompressed data\n */\n decodeLzmaData(input: Buffer, offset: number, outSize: number, solid = false): Buffer {\n return this.lzmaDecoder.decode(input, offset, outSize, solid);\n }\n\n /**\n * Decode LZMA2 data with streaming output\n * @param input - LZMA2 compressed data\n * @returns Total number of bytes written to sink\n */\n decodeWithSink(input: Buffer): number {\n let totalBytes = 0;\n let offset = 0;\n\n while (offset < input.length) {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (!result.success) {\n throw new Error('Truncated LZMA2 chunk header');\n }\n\n const chunk = result.chunk;\n\n if (chunk.type === 'end') {\n break;\n }\n\n // Validate we have enough data for the chunk\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n if (offset + chunk.headerSize + dataSize > input.length) {\n throw new Error(`Truncated LZMA2 ${chunk.type} data`);\n }\n\n // Handle dictionary reset\n if (chunk.dictReset) {\n this.lzmaDecoder.resetDictionary();\n }\n\n const dataOffset = offset + chunk.headerSize;\n\n if (chunk.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);\n\n // Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it\n this.lzmaDecoder.feedUncompressed(uncompData);\n\n totalBytes += uncompData.length;\n offset = dataOffset + chunk.uncompSize;\n } else {\n // LZMA compressed chunk\n\n // Apply new properties if present\n if (chunk.newProps) {\n const { lc, lp, pb } = chunk.newProps;\n if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n this.propsSet = true;\n }\n\n if (!this.propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunk.stateReset) {\n this.lzmaDecoder.resetProbabilities();\n }\n\n // Determine solid mode\n const useSolid = !chunk.stateReset || (chunk.stateReset && !chunk.dictReset);\n\n // Decode LZMA chunk directly to sink\n totalBytes += this.lzmaDecoder.decodeWithSink(input, dataOffset, chunk.uncompSize, useSolid);\n\n offset = dataOffset + chunk.compSize;\n }\n }\n\n // Flush any remaining data in the OutWindow\n this.lzmaDecoder.flushOutWindow();\n\n return totalBytes;\n }\n\n /**\n * Decode LZMA2 data\n * @param input - LZMA2 compressed data\n * @param unpackSize - Expected output size (optional, for pre-allocation)\n * @returns Decompressed data\n */\n decode(input: Buffer, unpackSize?: number): Buffer {\n // Pre-allocate output buffer if size is known\n let outputBuffer: Buffer | null = null;\n let outputPos = 0;\n const outputChunks: Buffer[] = [];\n\n if (unpackSize && unpackSize > 0) {\n outputBuffer = allocBufferUnsafe(unpackSize);\n }\n\n let offset = 0;\n\n while (offset < input.length) {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (!result.success) {\n throw new Error('Truncated LZMA2 chunk header');\n }\n\n const chunk = result.chunk;\n\n if (chunk.type === 'end') {\n break;\n }\n\n // Validate we have enough data for the chunk\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n if (offset + chunk.headerSize + dataSize > input.length) {\n throw new Error(`Truncated LZMA2 ${chunk.type} data`);\n }\n\n // Handle dictionary reset\n if (chunk.dictReset) {\n this.lzmaDecoder.resetDictionary();\n }\n\n const dataOffset = offset + chunk.headerSize;\n\n if (chunk.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);\n\n // Copy to output\n if (outputBuffer) {\n uncompData.copy(outputBuffer, outputPos);\n outputPos += uncompData.length;\n } else {\n outputChunks.push(uncompData);\n }\n\n // Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it\n this.lzmaDecoder.feedUncompressed(uncompData);\n\n offset = dataOffset + chunk.uncompSize;\n } else {\n // LZMA compressed chunk\n\n // Apply new properties if present\n if (chunk.newProps) {\n const { lc, lp, pb } = chunk.newProps;\n if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n this.propsSet = true;\n }\n\n if (!this.propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunk.stateReset) {\n this.lzmaDecoder.resetProbabilities();\n }\n\n // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)\n const useSolid = !chunk.stateReset || (chunk.stateReset && !chunk.dictReset);\n\n // Decode LZMA chunk\n const chunkData = input.slice(dataOffset, dataOffset + chunk.compSize);\n const decoded = this.lzmaDecoder.decode(chunkData, 0, chunk.uncompSize, useSolid);\n\n // Copy to output\n if (outputBuffer) {\n decoded.copy(outputBuffer, outputPos);\n outputPos += decoded.length;\n } else {\n outputChunks.push(decoded);\n }\n\n offset = dataOffset + chunk.compSize;\n }\n }\n\n // Return pre-allocated buffer or concatenated chunks\n if (outputBuffer) {\n return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;\n }\n return Buffer.concat(outputChunks);\n }\n}\n\n/**\n * Decode LZMA2 data synchronously\n * @param input - LZMA2 compressed data\n * @param properties - 1-byte properties (dictionary size)\n * @param unpackSize - Expected output size (optional, autodetects if not provided)\n * @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)\n * @returns Decompressed data (or bytes written if outputSink provided)\n */\nexport function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number, outputSink?: OutputSink): Buffer | number {\n const decoder = new Lzma2Decoder(properties, outputSink);\n if (outputSink) {\n // Zero-copy mode: write to sink during decode\n return decoder.decodeWithSink(input);\n }\n // Buffering mode: returns Buffer (zero-copy)\n return decoder.decode(input, unpackSize);\n}\n"],"names":["allocBufferUnsafe","parseLzma2ChunkHeader","parseLzma2DictionarySize","LzmaDecoder","Lzma2Decoder","resetDictionary","lzmaDecoder","resetProbabilities","setLcLpPb","lc","lp","pb","feedUncompressed","data","decodeLzmaData","input","offset","outSize","solid","decode","decodeWithSink","totalBytes","length","result","success","Error","chunk","type","dataSize","uncompSize","compSize","headerSize","dictReset","dataOffset","uncompData","slice","newProps","propsSet","stateReset","useSolid","flushOutWindow","unpackSize","outputBuffer","outputPos","outputChunks","copy","push","chunkData","decoded","Buffer","concat","properties","outputSink","dictionarySize","setDictionarySize","decodeLzma2","decoder"],"mappings":"AAAA;;;;;CAKC,GAED,SAASA,iBAAiB,QAAQ,wBAAwB;AAC1D,SAASC,qBAAqB,QAAQ,yBAAyB;AAC/D,SAA0BC,wBAAwB,QAAQ,cAAc;AACxE,SAASC,WAAW,QAAQ,mBAAmB;AAE/C;;CAEC,GACD,OAAO,MAAMC;IAgBX;;GAEC,GACDC,kBAAwB;QACtB,IAAI,CAACC,WAAW,CAACD,eAAe;IAClC;IAEA;;GAEC,GACDE,qBAA2B;QACzB,IAAI,CAACD,WAAW,CAACC,kBAAkB;IACrC;IAEA;;GAEC,GACDC,UAAUC,EAAU,EAAEC,EAAU,EAAEC,EAAU,EAAW;QACrD,OAAO,IAAI,CAACL,WAAW,CAACE,SAAS,CAACC,IAAIC,IAAIC;IAC5C;IAEA;;GAEC,GACDC,iBAAiBC,IAAY,EAAQ;QACnC,IAAI,CAACP,WAAW,CAACM,gBAAgB,CAACC;IACpC;IAEA;;;;;;;GAOC,GACDC,eAAeC,KAAa,EAAEC,MAAc,EAAEC,OAAe,EAAEC,QAAQ,KAAK,EAAU;QACpF,OAAO,IAAI,CAACZ,WAAW,CAACa,MAAM,CAACJ,OAAOC,QAAQC,SAASC;IACzD;IAEA;;;;GAIC,GACDE,eAAeL,KAAa,EAAU;QACpC,IAAIM,aAAa;QACjB,IAAIL,SAAS;QAEb,MAAOA,SAASD,MAAMO,MAAM,CAAE;YAC5B,MAAMC,SAAStB,sBAAsBc,OAAOC;YAE5C,IAAI,CAACO,OAAOC,OAAO,EAAE;gBACnB,MAAM,IAAIC,MAAM;YAClB;YAEA,MAAMC,QAAQH,OAAOG,KAAK;YAE1B,IAAIA,MAAMC,IAAI,KAAK,OAAO;gBACxB;YACF;YAEA,6CAA6C;YAC7C,MAAMC,WAAWF,MAAMC,IAAI,KAAK,iBAAiBD,MAAMG,UAAU,GAAGH,MAAMI,QAAQ;YAClF,IAAId,SAASU,MAAMK,UAAU,GAAGH,WAAWb,MAAMO,MAAM,EAAE;gBACvD,MAAM,IAAIG,MAAM,CAAC,gBAAgB,EAAEC,MAAMC,IAAI,CAAC,KAAK,CAAC;YACtD;YAEA,0BAA0B;YAC1B,IAAID,MAAMM,SAAS,EAAE;gBACnB,IAAI,CAAC1B,WAAW,CAACD,eAAe;YAClC;YAEA,MAAM4B,aAAajB,SAASU,MAAMK,UAAU;YAE5C,IAAIL,MAAMC,IAAI,KAAK,gBAAgB;gBACjC,MAAMO,aAAanB,MAAMoB,KAAK,CAACF,YAAYA,aAAaP,MAAMG,UAAU;gBAExE,kFAAkF;gBAClF,IAAI,CAACvB,WAAW,CAACM,gBAAgB,CAACsB;gBAElCb,cAAca,WAAWZ,MAAM;gBAC/BN,SAASiB,aAAaP,MAAMG,UAAU;YACxC,OAAO;gBACL,wBAAwB;gBAExB,kCAAkC;gBAClC,IAAIH,MAAMU,QAAQ,EAAE;oBAClB,MAAM,EAAE3B,EAAE,EAAEC,EAAE,EAAEC,EAAE,EAAE,GAAGe,MAAMU,QAAQ;oBACrC,IAAI,CAAC,IAAI,CAAC9B,WAAW,CAACE,SAAS,CAACC,IAAIC,IAAIC,KAAK;wBAC3C,MAAM,IAAIc,MAAM,CAAC,4BAA4B,EAAEhB,GAAG,IAAI,EAAEC,GAAG,IAAI,EAAEC,IAAI;oBACvE;oBACA,IAAI,CAAC0B,QAAQ,GAAG;gBAClB;gBAEA,IAAI,CAAC,IAAI,CAACA,QAAQ,EAAE;oBAClB,MAAM,IAAIZ,MAAM;gBAClB;gBAEA,qCAAqC;gBACrC,IAAIC,MAAMY,UAAU,EAAE;oBACpB,IAAI,CAAChC,WAAW,CAACC,kBAAkB;gBACrC;gBAEA,uBAAuB;gBACvB,MAAMgC,WAAW,CAACb,MAAMY,UAAU,IAAKZ,MAAMY,UAAU,IAAI,CAACZ,MAAMM,SAAS;gBAE3E,qCAAqC;gBACrCX,cAAc,IAAI,CAACf,WAAW,CAACc,cAAc,CAACL,OAAOkB,YAAYP,MAAMG,UAAU,EAAEU;gBAEnFvB,SAASiB,aAAaP,MAAMI,QAAQ;YACtC;QACF;QAEA,4CAA4C;QAC5C,IAAI,CAACxB,WAAW,CAACkC,cAAc;QAE/B,OAAOnB;IACT;IAEA;;;;;GAKC,GACDF,OAAOJ,KAAa,EAAE0B,UAAmB,EAAU;QACjD,8CAA8C;QAC9C,IAAIC,eAA8B;QAClC,IAAIC,YAAY;QAChB,MAAMC,eAAyB,EAAE;QAEjC,IAAIH,cAAcA,aAAa,GAAG;YAChCC,eAAe1C,kBAAkByC;QACnC;QAEA,IAAIzB,SAAS;QAEb,MAAOA,SAASD,MAAMO,MAAM,CAAE;YAC5B,MAAMC,SAAStB,sBAAsBc,OAAOC;YAE5C,IAAI,CAACO,OAAOC,OAAO,EAAE;gBACnB,MAAM,IAAIC,MAAM;YAClB;YAEA,MAAMC,QAAQH,OAAOG,KAAK;YAE1B,IAAIA,MAAMC,IAAI,KAAK,OAAO;gBACxB;YACF;YAEA,6CAA6C;YAC7C,MAAMC,WAAWF,MAAMC,IAAI,KAAK,iBAAiBD,MAAMG,UAAU,GAAGH,MAAMI,QAAQ;YAClF,IAAId,SAASU,MAAMK,UAAU,GAAGH,WAAWb,MAAMO,MAAM,EAAE;gBACvD,MAAM,IAAIG,MAAM,CAAC,gBAAgB,EAAEC,MAAMC,IAAI,CAAC,KAAK,CAAC;YACtD;YAEA,0BAA0B;YAC1B,IAAID,MAAMM,SAAS,EAAE;gBACnB,IAAI,CAAC1B,WAAW,CAACD,eAAe;YAClC;YAEA,MAAM4B,aAAajB,SAASU,MAAMK,UAAU;YAE5C,IAAIL,MAAMC,IAAI,KAAK,gBAAgB;gBACjC,MAAMO,aAAanB,MAAMoB,KAAK,CAACF,YAAYA,aAAaP,MAAMG,UAAU;gBAExE,iBAAiB;gBACjB,IAAIa,cAAc;oBAChBR,WAAWW,IAAI,CAACH,cAAcC;oBAC9BA,aAAaT,WAAWZ,MAAM;gBAChC,OAAO;oBACLsB,aAAaE,IAAI,CAACZ;gBACpB;gBAEA,kFAAkF;gBAClF,IAAI,CAAC5B,WAAW,CAACM,gBAAgB,CAACsB;gBAElClB,SAASiB,aAAaP,MAAMG,UAAU;YACxC,OAAO;gBACL,wBAAwB;gBAExB,kCAAkC;gBAClC,IAAIH,MAAMU,QAAQ,EAAE;oBAClB,MAAM,EAAE3B,EAAE,EAAEC,EAAE,EAAEC,EAAE,EAAE,GAAGe,MAAMU,QAAQ;oBACrC,IAAI,CAAC,IAAI,CAAC9B,WAAW,CAACE,SAAS,CAACC,IAAIC,IAAIC,KAAK;wBAC3C,MAAM,IAAIc,MAAM,CAAC,4BAA4B,EAAEhB,GAAG,IAAI,EAAEC,GAAG,IAAI,EAAEC,IAAI;oBACvE;oBACA,IAAI,CAAC0B,QAAQ,GAAG;gBAClB;gBAEA,IAAI,CAAC,IAAI,CAACA,QAAQ,EAAE;oBAClB,MAAM,IAAIZ,MAAM;gBAClB;gBAEA,qCAAqC;gBACrC,IAAIC,MAAMY,UAAU,EAAE;oBACpB,IAAI,CAAChC,WAAW,CAACC,kBAAkB;gBACrC;gBAEA,0GAA0G;gBAC1G,MAAMgC,WAAW,CAACb,MAAMY,UAAU,IAAKZ,MAAMY,UAAU,IAAI,CAACZ,MAAMM,SAAS;gBAE3E,oBAAoB;gBACpB,MAAMe,YAAYhC,MAAMoB,KAAK,CAACF,YAAYA,aAAaP,MAAMI,QAAQ;gBACrE,MAAMkB,UAAU,IAAI,CAAC1C,WAAW,CAACa,MAAM,CAAC4B,WAAW,GAAGrB,MAAMG,UAAU,EAAEU;gBAExE,iBAAiB;gBACjB,IAAIG,cAAc;oBAChBM,QAAQH,IAAI,CAACH,cAAcC;oBAC3BA,aAAaK,QAAQ1B,MAAM;gBAC7B,OAAO;oBACLsB,aAAaE,IAAI,CAACE;gBACpB;gBAEAhC,SAASiB,aAAaP,MAAMI,QAAQ;YACtC;QACF;QAEA,qDAAqD;QACrD,IAAIY,cAAc;YAChB,OAAOC,YAAYD,aAAapB,MAAM,GAAGoB,aAAaP,KAAK,CAAC,GAAGQ,aAAaD;QAC9E;QACA,OAAOO,OAAOC,MAAM,CAACN;IACvB;IA3OA,YAAYO,UAA+B,EAAEC,UAAuB,CAAE;QACpE,IAAI,CAACD,cAAcA,WAAW7B,MAAM,GAAG,GAAG;YACxC,MAAM,IAAIG,MAAM;QAClB;QAEA,IAAI,CAAC4B,cAAc,GAAGnD,yBAAyBiD,UAAU,CAAC,EAAE;QAC5D,IAAI,CAAC7C,WAAW,GAAG,IAAIH,YAAYiD;QACnC,IAAI,CAAC9C,WAAW,CAACgD,iBAAiB,CAAC,IAAI,CAACD,cAAc;QACtD,IAAI,CAAChB,QAAQ,GAAG;IAClB;AAmOF;AAEA;;;;;;;CAOC,GACD,OAAO,SAASkB,YAAYxC,KAAa,EAAEoC,UAA+B,EAAEV,UAAmB,EAAEW,UAAuB;IACtH,MAAMI,UAAU,IAAIpD,aAAa+C,YAAYC;IAC7C,IAAIA,YAAY;QACd,8CAA8C;QAC9C,OAAOI,QAAQpC,cAAc,CAACL;IAChC;IACA,6CAA6C;IAC7C,OAAOyC,QAAQrC,MAAM,CAACJ,OAAO0B;AAC/B"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/xz-compat/src/lzma/sync/Lzma2Decoder.ts"],"sourcesContent":["/**\n * Synchronous LZMA2 Decoder\n *\n * LZMA2 is a container format that wraps LZMA chunks with framing.\n * Decodes LZMA2 data from a buffer.\n */\n\nimport { allocBufferUnsafe } from 'extract-base-iterator';\nimport { parseLzma2ChunkHeader } from '../Lzma2ChunkParser.ts';\nimport { type OutputSink, parseLzma2DictionarySize } from '../types.ts';\nimport { LzmaDecoder } from './LzmaDecoder.ts';\n\n/**\n * Synchronous LZMA2 decoder\n */\nexport class Lzma2Decoder {\n private lzmaDecoder: LzmaDecoder;\n private dictionarySize: number;\n private propsSet: boolean;\n\n constructor(properties: Buffer | Uint8Array, outputSink?: OutputSink) {\n if (!properties || properties.length < 1) {\n throw new Error('LZMA2 requires properties byte');\n }\n\n this.dictionarySize = parseLzma2DictionarySize(properties[0]);\n this.lzmaDecoder = new LzmaDecoder(outputSink);\n this.lzmaDecoder.setDictionarySize(this.dictionarySize);\n this.propsSet = false;\n }\n\n /**\n * Reset the dictionary (for stream boundaries)\n */\n resetDictionary(): void {\n this.lzmaDecoder.resetDictionary();\n }\n\n /**\n * Reset all probability models (for stream boundaries)\n */\n resetProbabilities(): void {\n this.lzmaDecoder.resetProbabilities();\n }\n\n /**\n * Set LZMA properties\n */\n setLcLpPb(lc: number, lp: number, pb: number): boolean {\n return this.lzmaDecoder.setLcLpPb(lc, lp, pb);\n }\n\n /**\n * Feed uncompressed data to the dictionary (for subsequent LZMA chunks)\n */\n feedUncompressed(data: Buffer): void {\n this.lzmaDecoder.feedUncompressed(data);\n }\n\n /**\n * Decode raw LZMA data (used internally for LZMA2 chunks)\n * @param input - LZMA compressed data\n * @param offset - Input offset\n * @param outSize - Expected output size\n * @param solid - Use solid mode\n * @returns Decompressed data\n */\n decodeLzmaData(input: Buffer, offset: number, outSize: number, solid = false): Buffer {\n return this.lzmaDecoder.decode(input, offset, outSize, solid);\n }\n\n /**\n * Decode LZMA2 data with streaming output\n * @param input - LZMA2 compressed data\n * @returns Total number of bytes written to sink\n */\n decodeWithSink(input: Buffer): number {\n let totalBytes = 0;\n let offset = 0;\n\n while (offset < input.length) {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (!result.success) {\n throw new Error('Truncated LZMA2 chunk header');\n }\n\n const chunk = result.chunk;\n\n if (chunk.type === 'end') {\n break;\n }\n\n // Validate we have enough data for the chunk\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n if (offset + chunk.headerSize + dataSize > input.length) {\n throw new Error(`Truncated LZMA2 ${chunk.type} data`);\n }\n\n // Handle dictionary reset\n if (chunk.dictReset) {\n this.lzmaDecoder.resetDictionary();\n }\n\n const dataOffset = offset + chunk.headerSize;\n\n if (chunk.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);\n\n // Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it\n this.lzmaDecoder.feedUncompressed(uncompData);\n\n totalBytes += uncompData.length;\n offset = dataOffset + chunk.uncompSize;\n } else {\n // LZMA compressed chunk\n\n // Apply new properties if present\n if (chunk.newProps) {\n const { lc, lp, pb } = chunk.newProps;\n if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n this.propsSet = true;\n }\n\n if (!this.propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunk.stateReset) {\n this.lzmaDecoder.resetProbabilities();\n }\n\n // Determine solid mode\n const useSolid = !chunk.stateReset || (chunk.stateReset && !chunk.dictReset);\n\n // Decode LZMA chunk directly to sink\n totalBytes += this.lzmaDecoder.decodeWithSink(input, dataOffset, chunk.uncompSize, useSolid);\n\n offset = dataOffset + chunk.compSize;\n }\n }\n\n // Flush any remaining data in the OutWindow\n this.lzmaDecoder.flushOutWindow();\n\n return totalBytes;\n }\n\n /**\n * Decode LZMA2 data\n * @param input - LZMA2 compressed data\n * @param unpackSize - Expected output size (optional, for pre-allocation)\n * @returns Decompressed data\n */\n decode(input: Buffer, unpackSize?: number): Buffer {\n // Pre-allocate output buffer if size is known\n let outputBuffer: Buffer | null = null;\n let outputPos = 0;\n const outputChunks: Buffer[] = [];\n\n if (unpackSize && unpackSize > 0) {\n outputBuffer = allocBufferUnsafe(unpackSize);\n }\n\n let offset = 0;\n\n while (offset < input.length) {\n const result = parseLzma2ChunkHeader(input, offset);\n\n if (!result.success) {\n throw new Error('Truncated LZMA2 chunk header');\n }\n\n const chunk = result.chunk;\n\n if (chunk.type === 'end') {\n break;\n }\n\n // Validate we have enough data for the chunk\n const dataSize = chunk.type === 'uncompressed' ? chunk.uncompSize : chunk.compSize;\n if (offset + chunk.headerSize + dataSize > input.length) {\n throw new Error(`Truncated LZMA2 ${chunk.type} data`);\n }\n\n // Handle dictionary reset\n if (chunk.dictReset) {\n this.lzmaDecoder.resetDictionary();\n }\n\n const dataOffset = offset + chunk.headerSize;\n\n if (chunk.type === 'uncompressed') {\n const uncompData = input.slice(dataOffset, dataOffset + chunk.uncompSize);\n\n // Copy to output\n if (outputBuffer) {\n uncompData.copy(outputBuffer, outputPos);\n outputPos += uncompData.length;\n } else {\n outputChunks.push(uncompData);\n }\n\n // Feed uncompressed data to dictionary so subsequent LZMA chunks can reference it\n this.lzmaDecoder.feedUncompressed(uncompData);\n\n offset = dataOffset + chunk.uncompSize;\n } else {\n // LZMA compressed chunk\n\n // Apply new properties if present\n if (chunk.newProps) {\n const { lc, lp, pb } = chunk.newProps;\n if (!this.lzmaDecoder.setLcLpPb(lc, lp, pb)) {\n throw new Error(`Invalid LZMA properties: lc=${lc} lp=${lp} pb=${pb}`);\n }\n this.propsSet = true;\n }\n\n if (!this.propsSet) {\n throw new Error('LZMA chunk without properties');\n }\n\n // Reset probabilities if state reset\n if (chunk.stateReset) {\n this.lzmaDecoder.resetProbabilities();\n }\n\n // Determine solid mode - preserve dictionary if not resetting state or if only resetting state (not dict)\n const useSolid = !chunk.stateReset || (chunk.stateReset && !chunk.dictReset);\n\n // Decode LZMA chunk - use zero-copy when we have pre-allocated buffer\n if (outputBuffer) {\n // Zero-copy: decode directly into caller's buffer\n const bytesWritten = this.lzmaDecoder.decodeToBuffer(input, dataOffset, chunk.uncompSize, outputBuffer, outputPos, useSolid);\n outputPos += bytesWritten;\n } else {\n // No pre-allocation: decode to new buffer and collect chunks\n const chunkData = input.slice(dataOffset, dataOffset + chunk.compSize);\n const decoded = this.lzmaDecoder.decode(chunkData, 0, chunk.uncompSize, useSolid);\n outputChunks.push(decoded);\n }\n\n offset = dataOffset + chunk.compSize;\n }\n }\n\n // Return pre-allocated buffer or concatenated chunks\n if (outputBuffer) {\n return outputPos < outputBuffer.length ? outputBuffer.slice(0, outputPos) : outputBuffer;\n }\n return Buffer.concat(outputChunks);\n }\n}\n\n/**\n * Decode LZMA2 data synchronously\n * @param input - LZMA2 compressed data\n * @param properties - 1-byte properties (dictionary size)\n * @param unpackSize - Expected output size (optional, autodetects if not provided)\n * @param outputSink - Optional output sink with write callback for streaming (returns bytes written)\n * @returns Decompressed data (or bytes written if outputSink provided)\n */\nexport function decodeLzma2(input: Buffer, properties: Buffer | Uint8Array, unpackSize?: number, outputSink?: { write(buffer: Buffer): void }): Buffer | number {\n const decoder = new Lzma2Decoder(properties, outputSink as OutputSink);\n if (outputSink) {\n // Zero-copy mode: write to sink during decode\n return decoder.decodeWithSink(input);\n }\n // Buffering mode: returns Buffer (zero-copy)\n return decoder.decode(input, unpackSize);\n}\n"],"names":["allocBufferUnsafe","parseLzma2ChunkHeader","parseLzma2DictionarySize","LzmaDecoder","Lzma2Decoder","resetDictionary","lzmaDecoder","resetProbabilities","setLcLpPb","lc","lp","pb","feedUncompressed","data","decodeLzmaData","input","offset","outSize","solid","decode","decodeWithSink","totalBytes","length","result","success","Error","chunk","type","dataSize","uncompSize","compSize","headerSize","dictReset","dataOffset","uncompData","slice","newProps","propsSet","stateReset","useSolid","flushOutWindow","unpackSize","outputBuffer","outputPos","outputChunks","copy","push","bytesWritten","decodeToBuffer","chunkData","decoded","Buffer","concat","properties","outputSink","dictionarySize","setDictionarySize","decodeLzma2","decoder"],"mappings":"AAAA;;;;;CAKC,GAED,SAASA,iBAAiB,QAAQ,wBAAwB;AAC1D,SAASC,qBAAqB,QAAQ,yBAAyB;AAC/D,SAA0BC,wBAAwB,QAAQ,cAAc;AACxE,SAASC,WAAW,QAAQ,mBAAmB;AAE/C;;CAEC,GACD,OAAO,MAAMC;IAgBX;;GAEC,GACDC,kBAAwB;QACtB,IAAI,CAACC,WAAW,CAACD,eAAe;IAClC;IAEA;;GAEC,GACDE,qBAA2B;QACzB,IAAI,CAACD,WAAW,CAACC,kBAAkB;IACrC;IAEA;;GAEC,GACDC,UAAUC,EAAU,EAAEC,EAAU,EAAEC,EAAU,EAAW;QACrD,OAAO,IAAI,CAACL,WAAW,CAACE,SAAS,CAACC,IAAIC,IAAIC;IAC5C;IAEA;;GAEC,GACDC,iBAAiBC,IAAY,EAAQ;QACnC,IAAI,CAACP,WAAW,CAACM,gBAAgB,CAACC;IACpC;IAEA;;;;;;;GAOC,GACDC,eAAeC,KAAa,EAAEC,MAAc,EAAEC,OAAe,EAAEC,QAAQ,KAAK,EAAU;QACpF,OAAO,IAAI,CAACZ,WAAW,CAACa,MAAM,CAACJ,OAAOC,QAAQC,SAASC;IACzD;IAEA;;;;GAIC,GACDE,eAAeL,KAAa,EAAU;QACpC,IAAIM,aAAa;QACjB,IAAIL,SAAS;QAEb,MAAOA,SAASD,MAAMO,MAAM,CAAE;YAC5B,MAAMC,SAAStB,sBAAsBc,OAAOC;YAE5C,IAAI,CAACO,OAAOC,OAAO,EAAE;gBACnB,MAAM,IAAIC,MAAM;YAClB;YAEA,MAAMC,QAAQH,OAAOG,KAAK;YAE1B,IAAIA,MAAMC,IAAI,KAAK,OAAO;gBACxB;YACF;YAEA,6CAA6C;YAC7C,MAAMC,WAAWF,MAAMC,IAAI,KAAK,iBAAiBD,MAAMG,UAAU,GAAGH,MAAMI,QAAQ;YAClF,IAAId,SAASU,MAAMK,UAAU,GAAGH,WAAWb,MAAMO,MAAM,EAAE;gBACvD,MAAM,IAAIG,MAAM,CAAC,gBAAgB,EAAEC,MAAMC,IAAI,CAAC,KAAK,CAAC;YACtD;YAEA,0BAA0B;YAC1B,IAAID,MAAMM,SAAS,EAAE;gBACnB,IAAI,CAAC1B,WAAW,CAACD,eAAe;YAClC;YAEA,MAAM4B,aAAajB,SAASU,MAAMK,UAAU;YAE5C,IAAIL,MAAMC,IAAI,KAAK,gBAAgB;gBACjC,MAAMO,aAAanB,MAAMoB,KAAK,CAACF,YAAYA,aAAaP,MAAMG,UAAU;gBAExE,kFAAkF;gBAClF,IAAI,CAACvB,WAAW,CAACM,gBAAgB,CAACsB;gBAElCb,cAAca,WAAWZ,MAAM;gBAC/BN,SAASiB,aAAaP,MAAMG,UAAU;YACxC,OAAO;gBACL,wBAAwB;gBAExB,kCAAkC;gBAClC,IAAIH,MAAMU,QAAQ,EAAE;oBAClB,MAAM,EAAE3B,EAAE,EAAEC,EAAE,EAAEC,EAAE,EAAE,GAAGe,MAAMU,QAAQ;oBACrC,IAAI,CAAC,IAAI,CAAC9B,WAAW,CAACE,SAAS,CAACC,IAAIC,IAAIC,KAAK;wBAC3C,MAAM,IAAIc,MAAM,CAAC,4BAA4B,EAAEhB,GAAG,IAAI,EAAEC,GAAG,IAAI,EAAEC,IAAI;oBACvE;oBACA,IAAI,CAAC0B,QAAQ,GAAG;gBAClB;gBAEA,IAAI,CAAC,IAAI,CAACA,QAAQ,EAAE;oBAClB,MAAM,IAAIZ,MAAM;gBAClB;gBAEA,qCAAqC;gBACrC,IAAIC,MAAMY,UAAU,EAAE;oBACpB,IAAI,CAAChC,WAAW,CAACC,kBAAkB;gBACrC;gBAEA,uBAAuB;gBACvB,MAAMgC,WAAW,CAACb,MAAMY,UAAU,IAAKZ,MAAMY,UAAU,IAAI,CAACZ,MAAMM,SAAS;gBAE3E,qCAAqC;gBACrCX,cAAc,IAAI,CAACf,WAAW,CAACc,cAAc,CAACL,OAAOkB,YAAYP,MAAMG,UAAU,EAAEU;gBAEnFvB,SAASiB,aAAaP,MAAMI,QAAQ;YACtC;QACF;QAEA,4CAA4C;QAC5C,IAAI,CAACxB,WAAW,CAACkC,cAAc;QAE/B,OAAOnB;IACT;IAEA;;;;;GAKC,GACDF,OAAOJ,KAAa,EAAE0B,UAAmB,EAAU;QACjD,8CAA8C;QAC9C,IAAIC,eAA8B;QAClC,IAAIC,YAAY;QAChB,MAAMC,eAAyB,EAAE;QAEjC,IAAIH,cAAcA,aAAa,GAAG;YAChCC,eAAe1C,kBAAkByC;QACnC;QAEA,IAAIzB,SAAS;QAEb,MAAOA,SAASD,MAAMO,MAAM,CAAE;YAC5B,MAAMC,SAAStB,sBAAsBc,OAAOC;YAE5C,IAAI,CAACO,OAAOC,OAAO,EAAE;gBACnB,MAAM,IAAIC,MAAM;YAClB;YAEA,MAAMC,QAAQH,OAAOG,KAAK;YAE1B,IAAIA,MAAMC,IAAI,KAAK,OAAO;gBACxB;YACF;YAEA,6CAA6C;YAC7C,MAAMC,WAAWF,MAAMC,IAAI,KAAK,iBAAiBD,MAAMG,UAAU,GAAGH,MAAMI,QAAQ;YAClF,IAAId,SAASU,MAAMK,UAAU,GAAGH,WAAWb,MAAMO,MAAM,EAAE;gBACvD,MAAM,IAAIG,MAAM,CAAC,gBAAgB,EAAEC,MAAMC,IAAI,CAAC,KAAK,CAAC;YACtD;YAEA,0BAA0B;YAC1B,IAAID,MAAMM,SAAS,EAAE;gBACnB,IAAI,CAAC1B,WAAW,CAACD,eAAe;YAClC;YAEA,MAAM4B,aAAajB,SAASU,MAAMK,UAAU;YAE5C,IAAIL,MAAMC,IAAI,KAAK,gBAAgB;gBACjC,MAAMO,aAAanB,MAAMoB,KAAK,CAACF,YAAYA,aAAaP,MAAMG,UAAU;gBAExE,iBAAiB;gBACjB,IAAIa,cAAc;oBAChBR,WAAWW,IAAI,CAACH,cAAcC;oBAC9BA,aAAaT,WAAWZ,MAAM;gBAChC,OAAO;oBACLsB,aAAaE,IAAI,CAACZ;gBACpB;gBAEA,kFAAkF;gBAClF,IAAI,CAAC5B,WAAW,CAACM,gBAAgB,CAACsB;gBAElClB,SAASiB,aAAaP,MAAMG,UAAU;YACxC,OAAO;gBACL,wBAAwB;gBAExB,kCAAkC;gBAClC,IAAIH,MAAMU,QAAQ,EAAE;oBAClB,MAAM,EAAE3B,EAAE,EAAEC,EAAE,EAAEC,EAAE,EAAE,GAAGe,MAAMU,QAAQ;oBACrC,IAAI,CAAC,IAAI,CAAC9B,WAAW,CAACE,SAAS,CAACC,IAAIC,IAAIC,KAAK;wBAC3C,MAAM,IAAIc,MAAM,CAAC,4BAA4B,EAAEhB,GAAG,IAAI,EAAEC,GAAG,IAAI,EAAEC,IAAI;oBACvE;oBACA,IAAI,CAAC0B,QAAQ,GAAG;gBAClB;gBAEA,IAAI,CAAC,IAAI,CAACA,QAAQ,EAAE;oBAClB,MAAM,IAAIZ,MAAM;gBAClB;gBAEA,qCAAqC;gBACrC,IAAIC,MAAMY,UAAU,EAAE;oBACpB,IAAI,CAAChC,WAAW,CAACC,kBAAkB;gBACrC;gBAEA,0GAA0G;gBAC1G,MAAMgC,WAAW,CAACb,MAAMY,UAAU,IAAKZ,MAAMY,UAAU,IAAI,CAACZ,MAAMM,SAAS;gBAE3E,sEAAsE;gBACtE,IAAIU,cAAc;oBAChB,kDAAkD;oBAClD,MAAMK,eAAe,IAAI,CAACzC,WAAW,CAAC0C,cAAc,CAACjC,OAAOkB,YAAYP,MAAMG,UAAU,EAAEa,cAAcC,WAAWJ;oBACnHI,aAAaI;gBACf,OAAO;oBACL,6DAA6D;oBAC7D,MAAME,YAAYlC,MAAMoB,KAAK,CAACF,YAAYA,aAAaP,MAAMI,QAAQ;oBACrE,MAAMoB,UAAU,IAAI,CAAC5C,WAAW,CAACa,MAAM,CAAC8B,WAAW,GAAGvB,MAAMG,UAAU,EAAEU;oBACxEK,aAAaE,IAAI,CAACI;gBACpB;gBAEAlC,SAASiB,aAAaP,MAAMI,QAAQ;YACtC;QACF;QAEA,qDAAqD;QACrD,IAAIY,cAAc;YAChB,OAAOC,YAAYD,aAAapB,MAAM,GAAGoB,aAAaP,KAAK,CAAC,GAAGQ,aAAaD;QAC9E;QACA,OAAOS,OAAOC,MAAM,CAACR;IACvB;IA3OA,YAAYS,UAA+B,EAAEC,UAAuB,CAAE;QACpE,IAAI,CAACD,cAAcA,WAAW/B,MAAM,GAAG,GAAG;YACxC,MAAM,IAAIG,MAAM;QAClB;QAEA,IAAI,CAAC8B,cAAc,GAAGrD,yBAAyBmD,UAAU,CAAC,EAAE;QAC5D,IAAI,CAAC/C,WAAW,GAAG,IAAIH,YAAYmD;QACnC,IAAI,CAAChD,WAAW,CAACkD,iBAAiB,CAAC,IAAI,CAACD,cAAc;QACtD,IAAI,CAAClB,QAAQ,GAAG;IAClB;AAmOF;AAEA;;;;;;;CAOC,GACD,OAAO,SAASoB,YAAY1C,KAAa,EAAEsC,UAA+B,EAAEZ,UAAmB,EAAEa,UAA4C;IAC3I,MAAMI,UAAU,IAAItD,aAAaiD,YAAYC;IAC7C,IAAIA,YAAY;QACd,8CAA8C;QAC9C,OAAOI,QAAQtC,cAAc,CAACL;IAChC;IACA,6CAA6C;IAC7C,OAAO2C,QAAQvC,MAAM,CAACJ,OAAO0B;AAC/B"}
@@ -76,6 +76,17 @@ export declare class LzmaDecoder {
76
76
  * @returns Number of bytes written to sink
77
77
  */
78
78
  decodeWithSink(input: Buffer, inputOffset: number, outSize: number, solid?: boolean): number;
79
+ /**
80
+ * Decode LZMA data directly into caller's buffer (zero-copy)
81
+ * @param input - Compressed input buffer
82
+ * @param inputOffset - Offset into input buffer
83
+ * @param outSize - Expected output size
84
+ * @param output - Pre-allocated output buffer to write to
85
+ * @param outputOffset - Offset in output buffer to start writing
86
+ * @param solid - If true, preserve state from previous decode
87
+ * @returns Number of bytes written
88
+ */
89
+ decodeToBuffer(input: Buffer, inputOffset: number, outSize: number, output: Buffer, outputOffset: number, solid?: boolean): number;
79
90
  /**
80
91
  * Decode LZMA data
81
92
  * @param input - Compressed input buffer
@@ -88,10 +99,17 @@ export declare class LzmaDecoder {
88
99
  }
89
100
  /**
90
101
  * Decode LZMA1 data synchronously
102
+ *
103
+ * Note: LZMA1 is a low-level format. @napi-rs/lzma expects self-describing
104
+ * data (like XZ), but here we accept raw LZMA with properties specified separately.
105
+ * Pure JS implementation is used for LZMA1.
106
+ *
91
107
  * @param input - Compressed data (without 5-byte properties header)
92
108
  * @param properties - 5-byte LZMA properties
93
109
  * @param outSize - Expected output size
94
- * @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)
110
+ * @param outputSink - Optional output sink with write callback for streaming (returns bytes written)
95
111
  * @returns Decompressed data (or bytes written if outputSink provided)
96
112
  */
97
- export declare function decodeLzma(input: Buffer, properties: Buffer | Uint8Array, outSize: number, outputSink?: OutputSink): Buffer | number;
113
+ export declare function decodeLzma(input: Buffer, properties: Buffer | Uint8Array, outSize: number, outputSink?: {
114
+ write(buffer: Buffer): void;
115
+ }): Buffer | number;
@@ -383,13 +383,15 @@ import { BitTreeDecoder, RangeDecoder, reverseDecodeFromArray } from './RangeDec
383
383
  return outPos;
384
384
  }
385
385
  /**
386
- * Decode LZMA data
386
+ * Decode LZMA data directly into caller's buffer (zero-copy)
387
387
  * @param input - Compressed input buffer
388
388
  * @param inputOffset - Offset into input buffer
389
389
  * @param outSize - Expected output size
390
+ * @param output - Pre-allocated output buffer to write to
391
+ * @param outputOffset - Offset in output buffer to start writing
390
392
  * @param solid - If true, preserve state from previous decode
391
- * @returns Decompressed data
392
- */ decode(input, inputOffset, outSize, solid = false) {
393
+ * @returns Number of bytes written
394
+ */ decodeToBuffer(input, inputOffset, outSize, output, outputOffset, solid = false) {
393
395
  this.rangeDecoder.setInput(input, inputOffset);
394
396
  if (!solid) {
395
397
  this.outWindow.init(false);
@@ -405,10 +407,10 @@ import { BitTreeDecoder, RangeDecoder, reverseDecodeFromArray } from './RangeDec
405
407
  // Solid mode: preserve dictionary state but reinitialize range decoder
406
408
  this.outWindow.init(true);
407
409
  }
408
- const output = allocBufferUnsafe(outSize);
409
- let outPos = 0;
410
+ let outPos = outputOffset;
411
+ const outEnd = outputOffset + outSize;
410
412
  let cumPos = this.totalPos;
411
- while(outPos < outSize){
413
+ while(outPos < outEnd){
412
414
  const posState = cumPos & this.posStateMask;
413
415
  if (this.rangeDecoder.decodeBit(this.isMatchDecoders, (this.state << kNumPosStatesBitsMax) + posState) === 0) {
414
416
  // Literal
@@ -492,6 +494,18 @@ import { BitTreeDecoder, RangeDecoder, reverseDecodeFromArray } from './RangeDec
492
494
  }
493
495
  }
494
496
  this.totalPos = cumPos;
497
+ return outPos - outputOffset;
498
+ }
499
+ /**
500
+ * Decode LZMA data
501
+ * @param input - Compressed input buffer
502
+ * @param inputOffset - Offset into input buffer
503
+ * @param outSize - Expected output size
504
+ * @param solid - If true, preserve state from previous decode
505
+ * @returns Decompressed data
506
+ */ decode(input, inputOffset, outSize, solid = false) {
507
+ const output = allocBufferUnsafe(outSize);
508
+ this.decodeToBuffer(input, inputOffset, outSize, output, 0, solid);
495
509
  return output;
496
510
  }
497
511
  constructor(outputSink){
@@ -526,10 +540,15 @@ import { BitTreeDecoder, RangeDecoder, reverseDecodeFromArray } from './RangeDec
526
540
  }
527
541
  /**
528
542
  * Decode LZMA1 data synchronously
543
+ *
544
+ * Note: LZMA1 is a low-level format. @napi-rs/lzma expects self-describing
545
+ * data (like XZ), but here we accept raw LZMA with properties specified separately.
546
+ * Pure JS implementation is used for LZMA1.
547
+ *
529
548
  * @param input - Compressed data (without 5-byte properties header)
530
549
  * @param properties - 5-byte LZMA properties
531
550
  * @param outSize - Expected output size
532
- * @param outputSink - Optional output sink for zero-copy decoding (returns bytes written)
551
+ * @param outputSink - Optional output sink with write callback for streaming (returns bytes written)
533
552
  * @returns Decompressed data (or bytes written if outputSink provided)
534
553
  */ export function decodeLzma(input, properties, outSize, outputSink) {
535
554
  const decoder = new LzmaDecoder(outputSink);