7z-iterator 0.1.8 → 0.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/FileEntry.js +7 -7
- package/dist/cjs/FileEntry.js.map +1 -1
- package/dist/cjs/nextEntry.js +21 -17
- package/dist/cjs/nextEntry.js.map +1 -1
- package/dist/cjs/sevenz/SevenZipParser.d.cts +10 -0
- package/dist/cjs/sevenz/SevenZipParser.d.ts +10 -0
- package/dist/cjs/sevenz/SevenZipParser.js +159 -0
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma.d.cts +4 -1
- package/dist/cjs/sevenz/codecs/Lzma.d.ts +4 -1
- package/dist/cjs/sevenz/codecs/Lzma.js +30 -2
- package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma2.d.cts +3 -0
- package/dist/cjs/sevenz/codecs/Lzma2.d.ts +3 -0
- package/dist/cjs/sevenz/codecs/Lzma2.js +10 -0
- package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/lzmaCompat.d.cts +35 -0
- package/dist/cjs/sevenz/codecs/lzmaCompat.d.ts +35 -0
- package/dist/cjs/sevenz/codecs/lzmaCompat.js +76 -0
- package/dist/cjs/sevenz/codecs/lzmaCompat.js.map +1 -0
- package/dist/esm/FileEntry.js +7 -7
- package/dist/esm/FileEntry.js.map +1 -1
- package/dist/esm/nextEntry.js +21 -17
- package/dist/esm/nextEntry.js.map +1 -1
- package/dist/esm/sevenz/SevenZipParser.d.ts +10 -0
- package/dist/esm/sevenz/SevenZipParser.js +158 -0
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma.d.ts +4 -1
- package/dist/esm/sevenz/codecs/Lzma.js +36 -5
- package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma2.d.ts +3 -0
- package/dist/esm/sevenz/codecs/Lzma2.js +20 -6
- package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/esm/sevenz/codecs/lzmaCompat.d.ts +35 -0
- package/dist/esm/sevenz/codecs/lzmaCompat.js +69 -0
- package/dist/esm/sevenz/codecs/lzmaCompat.js.map +1 -0
- package/package.json +4 -1
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LZMA compatibility layer - uses native lzma when available, falls back to lzma-purejs
|
|
3
|
+
*
|
|
4
|
+
* lzma-native provides native liblzma bindings with rawDecoder support.
|
|
5
|
+
* This gives significant performance improvements on Node.js 8+ while
|
|
6
|
+
* maintaining compatibility with Node.js 0.8+ via lzma-purejs fallback.
|
|
7
|
+
*
|
|
8
|
+
* The native decoder uses Node.js streams which integrate naturally with
|
|
9
|
+
* the callback-based async pattern used throughout the iterator libraries.
|
|
10
|
+
*/ "use strict";
|
|
11
|
+
Object.defineProperty(exports, "__esModule", {
|
|
12
|
+
value: true
|
|
13
|
+
});
|
|
14
|
+
function _export(target, all) {
|
|
15
|
+
for(var name in all)Object.defineProperty(target, name, {
|
|
16
|
+
enumerable: true,
|
|
17
|
+
get: Object.getOwnPropertyDescriptor(all, name).get
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
_export(exports, {
|
|
21
|
+
get createNativeLzma1Decoder () {
|
|
22
|
+
return createNativeLzma1Decoder;
|
|
23
|
+
},
|
|
24
|
+
get createNativeLzma2Decoder () {
|
|
25
|
+
return createNativeLzma2Decoder;
|
|
26
|
+
},
|
|
27
|
+
get hasNativeLzma () {
|
|
28
|
+
return hasNativeLzma;
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
var _module = /*#__PURE__*/ _interop_require_default(require("module"));
|
|
32
|
+
function _interop_require_default(obj) {
|
|
33
|
+
return obj && obj.__esModule ? obj : {
|
|
34
|
+
default: obj
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
var _require = typeof require === 'undefined' ? _module.default.createRequire(require("url").pathToFileURL(__filename).toString()) : require;
|
|
38
|
+
// Try to load lzma-native (only on Node 10+ where ES6 class syntax is supported)
|
|
39
|
+
// Note: We must check the version BEFORE requiring because syntax errors during
|
|
40
|
+
// module parsing cannot be caught by try/catch
|
|
41
|
+
var lzmaNative = null;
|
|
42
|
+
var _hasNativeLzmaLib = false;
|
|
43
|
+
var major = +process.versions.node.split('.')[0];
|
|
44
|
+
if (major >= 10) {
|
|
45
|
+
try {
|
|
46
|
+
lzmaNative = _require('lzma-native');
|
|
47
|
+
// Verify rawDecoder support
|
|
48
|
+
_hasNativeLzmaLib = lzmaNative !== null && typeof lzmaNative.createStream === 'function';
|
|
49
|
+
} catch (_e) {
|
|
50
|
+
// lzma-native not available - will use lzma-purejs
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
var hasNativeLzma = _hasNativeLzmaLib;
|
|
54
|
+
function createNativeLzma2Decoder(dictSize) {
|
|
55
|
+
if (!lzmaNative) {
|
|
56
|
+
return null;
|
|
57
|
+
}
|
|
58
|
+
var filterOpts = {
|
|
59
|
+
id: lzmaNative.FILTER_LZMA2
|
|
60
|
+
};
|
|
61
|
+
if (dictSize !== undefined) {
|
|
62
|
+
filterOpts.dict_size = dictSize;
|
|
63
|
+
}
|
|
64
|
+
return lzmaNative.createStream('rawDecoder', {
|
|
65
|
+
filters: [
|
|
66
|
+
filterOpts
|
|
67
|
+
]
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
function createNativeLzma1Decoder(_lc, _lp, _pb, _dictSize) {
|
|
71
|
+
// Native LZMA1 disabled - lzma-native's rawDecoder has issues with 7z's LZMA1 format
|
|
72
|
+
// (LZMA_BUF_ERROR: No progress is possible)
|
|
73
|
+
// LZMA2 native works correctly and is more common in modern 7z files
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
/* CJS INTEROP */ if (exports.__esModule && exports.default) { try { Object.defineProperty(exports.default, '__esModule', { value: true }); for (var key in exports) { exports.default[key] = exports[key]; } } catch (_) {}; module.exports = exports.default; }
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/codecs/lzmaCompat.ts"],"sourcesContent":["/**\n * LZMA compatibility layer - uses native lzma when available, falls back to lzma-purejs\n *\n * lzma-native provides native liblzma bindings with rawDecoder support.\n * This gives significant performance improvements on Node.js 8+ while\n * maintaining compatibility with Node.js 0.8+ via lzma-purejs fallback.\n *\n * The native decoder uses Node.js streams which integrate naturally with\n * the callback-based async pattern used throughout the iterator libraries.\n */\n\nimport Module from 'module';\nimport type { Transform } from 'readable-stream';\n\nvar _require = typeof require === 'undefined' ? Module.createRequire(import.meta.url) : require;\n\n// Try to load lzma-native (only on Node 10+ where ES6 class syntax is supported)\n// Note: We must check the version BEFORE requiring because syntax errors during\n// module parsing cannot be caught by try/catch\nvar lzmaNative: typeof import('lzma-native') | null = null;\nvar _hasNativeLzmaLib = false;\nvar major = +process.versions.node.split('.')[0];\n\nif (major >= 10) {\n try {\n lzmaNative = _require('lzma-native');\n // Verify rawDecoder support\n _hasNativeLzmaLib = lzmaNative !== null && typeof lzmaNative.createStream === 'function';\n } catch (_e) {\n // lzma-native not available - will use lzma-purejs\n }\n}\n\n// Export whether native lzma is available for streaming\nexport var hasNativeLzma = _hasNativeLzmaLib;\n\n/**\n * Create a native LZMA2 decoder stream\n * Returns a Transform stream that decodes LZMA2 data\n *\n * @param dictSize - Dictionary size\n * @returns Transform stream for decoding, or null if native not available\n */\nexport function createNativeLzma2Decoder(dictSize?: number): Transform | null {\n if (!lzmaNative) {\n return null;\n }\n\n var filterOpts: { id: string; dict_size?: number } = {\n id: lzmaNative.FILTER_LZMA2,\n };\n\n if (dictSize !== undefined) {\n filterOpts.dict_size = dictSize;\n }\n\n return lzmaNative.createStream('rawDecoder', {\n filters: [filterOpts],\n }) as unknown as Transform;\n}\n\n/**\n * Create a native LZMA1 decoder stream\n * Returns a Transform stream that decodes LZMA1 data\n *\n * Note: Native LZMA1 decoder disabled due to LZMA_BUF_ERROR issues with\n * lzma-native's rawDecoder for LZMA1. Falls back to lzma-purejs which\n * handles 7z's LZMA1 format correctly. LZMA2 native works fine.\n *\n * @param _lc - Literal context bits (0-8)\n * @param _lp - Literal position bits (0-4)\n * @param _pb - Position bits (0-4)\n * @param _dictSize - Dictionary size\n * @returns null - always falls back to pure JS decoder\n */\nexport function createNativeLzma1Decoder(_lc: number, _lp: number, _pb: number, _dictSize: number): Transform | null {\n // Native LZMA1 disabled - lzma-native's rawDecoder has issues with 7z's LZMA1 format\n // (LZMA_BUF_ERROR: No progress is possible)\n // LZMA2 native works correctly and is more common in modern 7z files\n return null;\n}\n"],"names":["createNativeLzma1Decoder","createNativeLzma2Decoder","hasNativeLzma","_require","require","Module","createRequire","lzmaNative","_hasNativeLzmaLib","major","process","versions","node","split","createStream","_e","dictSize","filterOpts","id","FILTER_LZMA2","undefined","dict_size","filters","_lc","_lp","_pb","_dictSize"],"mappings":"AAAA;;;;;;;;;CASC;;;;;;;;;;;QAkEeA;eAAAA;;QAhCAC;eAAAA;;QATLC;eAAAA;;;6DAvBQ;;;;;;AAGnB,IAAIC,WAAW,OAAOC,YAAY,cAAcC,eAAM,CAACC,aAAa,CAAC,uDAAmBF;AAExF,iFAAiF;AACjF,gFAAgF;AAChF,+CAA+C;AAC/C,IAAIG,aAAkD;AACtD,IAAIC,oBAAoB;AACxB,IAAIC,QAAQ,CAACC,QAAQC,QAAQ,CAACC,IAAI,CAACC,KAAK,CAAC,IAAI,CAAC,EAAE;AAEhD,IAAIJ,SAAS,IAAI;IACf,IAAI;QACFF,aAAaJ,SAAS;QACtB,4BAA4B;QAC5BK,oBAAoBD,eAAe,QAAQ,OAAOA,WAAWO,YAAY,KAAK;IAChF,EAAE,OAAOC,IAAI;IACX,mDAAmD;IACrD;AACF;AAGO,IAAIb,gBAAgBM;AASpB,SAASP,yBAAyBe,QAAiB;IACxD,IAAI,CAACT,YAAY;QACf,OAAO;IACT;IAEA,IAAIU,aAAiD;QACnDC,IAAIX,WAAWY,YAAY;IAC7B;IAEA,IAAIH,aAAaI,WAAW;QAC1BH,WAAWI,SAAS,GAAGL;IACzB;IAEA,OAAOT,WAAWO,YAAY,CAAC,cAAc;QAC3CQ,SAAS;YAACL;SAAW;IACvB;AACF;AAgBO,SAASjB,yBAAyBuB,GAAW,EAAEC,GAAW,EAAEC,GAAW,EAAEC,SAAiB;IAC/F,qFAAqF;IACrF,4CAA4C;IAC5C,qEAAqE;IACrE,OAAO;AACT"}
|
package/dist/esm/FileEntry.js
CHANGED
|
@@ -28,20 +28,20 @@ let SevenZipFileEntry = class SevenZipFileEntry extends FileEntry {
|
|
|
28
28
|
callback(new Error('7z FileEntry missing entry. Check for calling create multiple times'));
|
|
29
29
|
return;
|
|
30
30
|
}
|
|
31
|
-
|
|
32
|
-
|
|
31
|
+
// Use callback-based async decompression
|
|
32
|
+
this.parser.getEntryStreamAsync(this.entry, (err, stream)=>{
|
|
33
|
+
if (err) return callback(err);
|
|
34
|
+
if (!stream) return callback(new Error('No stream returned'));
|
|
33
35
|
var res = stream.pipe(fs.createWriteStream(fullPath));
|
|
34
36
|
oo(res, [
|
|
35
37
|
'error',
|
|
36
38
|
'end',
|
|
37
39
|
'close',
|
|
38
40
|
'finish'
|
|
39
|
-
], (
|
|
40
|
-
|
|
41
|
+
], (writeErr)=>{
|
|
42
|
+
writeErr ? callback(writeErr) : waitForAccess(fullPath, callback);
|
|
41
43
|
});
|
|
42
|
-
}
|
|
43
|
-
callback(err);
|
|
44
|
-
}
|
|
44
|
+
});
|
|
45
45
|
}
|
|
46
46
|
destroy() {
|
|
47
47
|
FileEntry.prototype.destroy.call(this);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/FileEntry.ts"],"sourcesContent":["import { type FileAttributes, FileEntry, type NoParamCallback, waitForAccess } from 'extract-base-iterator';\nimport fs from 'fs';\nimport oo from 'on-one';\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\nimport type { ExtractOptions, LockT } from './types.ts';\n\nexport default class SevenZipFileEntry extends FileEntry {\n private lock: LockT;\n private entry: SevenZipEntry;\n private parser: SevenZipParser;\n\n constructor(attributes: FileAttributes, entry: SevenZipEntry, parser: SevenZipParser, lock: LockT) {\n super(attributes);\n this.entry = entry;\n this.parser = parser;\n this.lock = lock;\n this.lock.retain();\n }\n\n create(dest: string, options: ExtractOptions | NoParamCallback, callback: NoParamCallback): undefined | Promise<boolean> {\n if (typeof options === 'function') {\n callback = options;\n options = null;\n }\n\n if (typeof callback === 'function') {\n options = options || {};\n return FileEntry.prototype.create.call(this, dest, options, (err?: Error) => {\n callback(err);\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n });\n }\n return new Promise((resolve, reject) => {\n this.create(dest, options, (err?: Error, done?: boolean) => {\n err ? reject(err) : resolve(done);\n });\n });\n }\n\n _writeFile(fullPath: string, _options: ExtractOptions, callback: NoParamCallback): undefined {\n if (!this.entry || !this.parser) {\n callback(new Error('7z FileEntry missing entry. Check for calling create multiple times'));\n return;\n }\n\n
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/FileEntry.ts"],"sourcesContent":["import { type FileAttributes, FileEntry, type NoParamCallback, waitForAccess } from 'extract-base-iterator';\nimport fs from 'fs';\nimport oo from 'on-one';\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\nimport type { ExtractOptions, LockT } from './types.ts';\n\nexport default class SevenZipFileEntry extends FileEntry {\n private lock: LockT;\n private entry: SevenZipEntry;\n private parser: SevenZipParser;\n\n constructor(attributes: FileAttributes, entry: SevenZipEntry, parser: SevenZipParser, lock: LockT) {\n super(attributes);\n this.entry = entry;\n this.parser = parser;\n this.lock = lock;\n this.lock.retain();\n }\n\n create(dest: string, options: ExtractOptions | NoParamCallback, callback: NoParamCallback): undefined | Promise<boolean> {\n if (typeof options === 'function') {\n callback = options;\n options = null;\n }\n\n if (typeof callback === 'function') {\n options = options || {};\n return FileEntry.prototype.create.call(this, dest, options, (err?: Error) => {\n callback(err);\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n });\n }\n return new Promise((resolve, reject) => {\n this.create(dest, options, (err?: Error, done?: boolean) => {\n err ? reject(err) : resolve(done);\n });\n });\n }\n\n _writeFile(fullPath: string, _options: ExtractOptions, callback: NoParamCallback): undefined {\n if (!this.entry || !this.parser) {\n callback(new Error('7z FileEntry missing entry. Check for calling create multiple times'));\n return;\n }\n\n // Use callback-based async decompression\n this.parser.getEntryStreamAsync(this.entry, (err, stream) => {\n if (err) return callback(err);\n if (!stream) return callback(new Error('No stream returned'));\n\n var res = stream.pipe(fs.createWriteStream(fullPath));\n oo(res, ['error', 'end', 'close', 'finish'], (writeErr?: Error) => {\n writeErr ? callback(writeErr) : waitForAccess(fullPath, callback);\n });\n });\n }\n\n destroy() {\n FileEntry.prototype.destroy.call(this);\n this.entry = null;\n this.parser = null;\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n }\n}\n"],"names":["FileEntry","waitForAccess","fs","oo","SevenZipFileEntry","create","dest","options","callback","prototype","call","err","lock","release","Promise","resolve","reject","done","_writeFile","fullPath","_options","entry","parser","Error","getEntryStreamAsync","stream","res","pipe","createWriteStream","writeErr","destroy","attributes","retain"],"mappings":"AAAA,SAA8BA,SAAS,EAAwBC,aAAa,QAAQ,wBAAwB;AAC5G,OAAOC,QAAQ,KAAK;AACpB,OAAOC,QAAQ,SAAS;AAIT,IAAA,AAAMC,oBAAN,MAAMA,0BAA0BJ;IAa7CK,OAAOC,IAAY,EAAEC,OAAyC,EAAEC,QAAyB,EAAgC;QACvH,IAAI,OAAOD,YAAY,YAAY;YACjCC,WAAWD;YACXA,UAAU;QACZ;QAEA,IAAI,OAAOC,aAAa,YAAY;YAClCD,UAAUA,WAAW,CAAC;YACtB,OAAOP,UAAUS,SAAS,CAACJ,MAAM,CAACK,IAAI,CAAC,IAAI,EAAEJ,MAAMC,SAAS,CAACI;gBAC3DH,SAASG;gBACT,IAAI,IAAI,CAACC,IAAI,EAAE;oBACb,IAAI,CAACA,IAAI,CAACC,OAAO;oBACjB,IAAI,CAACD,IAAI,GAAG;gBACd;YACF;QACF;QACA,OAAO,IAAIE,QAAQ,CAACC,SAASC;YAC3B,IAAI,CAACX,MAAM,CAACC,MAAMC,SAAS,CAACI,KAAaM;gBACvCN,MAAMK,OAAOL,OAAOI,QAAQE;YAC9B;QACF;IACF;IAEAC,WAAWC,QAAgB,EAAEC,QAAwB,EAAEZ,QAAyB,EAAa;QAC3F,IAAI,CAAC,IAAI,CAACa,KAAK,IAAI,CAAC,IAAI,CAACC,MAAM,EAAE;YAC/Bd,SAAS,IAAIe,MAAM;YACnB;QACF;QAEA,yCAAyC;QACzC,IAAI,CAACD,MAAM,CAACE,mBAAmB,CAAC,IAAI,CAACH,KAAK,EAAE,CAACV,KAAKc;YAChD,IAAId,KAAK,OAAOH,SAASG;YACzB,IAAI,CAACc,QAAQ,OAAOjB,SAAS,IAAIe,MAAM;YAEvC,IAAIG,MAAMD,OAAOE,IAAI,CAACzB,GAAG0B,iBAAiB,CAACT;YAC3ChB,GAAGuB,KAAK;gBAAC;gBAAS;gBAAO;gBAAS;aAAS,EAAE,CAACG;gBAC5CA,WAAWrB,SAASqB,YAAY5B,cAAckB,UAAUX;YAC1D;QACF;IACF;IAEAsB,UAAU;QACR9B,UAAUS,SAAS,CAACqB,OAAO,CAACpB,IAAI,CAAC,IAAI;QACrC,IAAI,CAACW,KAAK,GAAG;QACb,IAAI,CAACC,MAAM,GAAG;QACd,IAAI,IAAI,CAACV,IAAI,EAAE;YACb,IAAI,CAACA,IAAI,CAACC,OAAO;YACjB,IAAI,CAACD,IAAI,GAAG;QACd;IACF;IAzDA,YAAYmB,UAA0B,EAAEV,KAAoB,EAAEC,MAAsB,EAAEV,IAAW,CAAE;QACjG,KAAK,CAACmB;QACN,IAAI,CAACV,KAAK,GAAGA;QACb,IAAI,CAACC,MAAM,GAAGA;QACd,IAAI,CAACV,IAAI,GAAGA;QACZ,IAAI,CAACA,IAAI,CAACoB,MAAM;IAClB;AAoDF;AA/DA,SAAqB5B,+BA+DpB"}
|
package/dist/esm/nextEntry.js
CHANGED
|
@@ -54,23 +54,27 @@ export default function nextEntry(iterator, callback) {
|
|
|
54
54
|
// For symlinks, the file content IS the symlink target path
|
|
55
55
|
// Read the content to get the linkpath for SymbolicLinkEntry
|
|
56
56
|
var parser = iterator.iterator.getParser();
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
57
|
+
// Use callback-based async decompression
|
|
58
|
+
parser.getEntryStreamAsync(entry, (err, stream)=>{
|
|
59
|
+
if (err) return nextCallback(err);
|
|
60
|
+
if (!stream) return nextCallback(new Error('No stream returned'));
|
|
61
|
+
var chunks = [];
|
|
62
|
+
stream.on('data', (chunk)=>{
|
|
63
|
+
chunks.push(chunk);
|
|
64
|
+
});
|
|
65
|
+
stream.on('end', ()=>{
|
|
66
|
+
var linkpath = Buffer.concat(chunks).toString('utf8');
|
|
67
|
+
var linkAttributes = {
|
|
68
|
+
path: attributes.path,
|
|
69
|
+
mtime: attributes.mtime,
|
|
70
|
+
mode: attributes.mode,
|
|
71
|
+
linkpath: linkpath
|
|
72
|
+
};
|
|
73
|
+
nextCallback(null, new SymbolicLinkEntry(linkAttributes));
|
|
74
|
+
});
|
|
75
|
+
stream.on('error', (streamErr)=>{
|
|
76
|
+
nextCallback(streamErr);
|
|
77
|
+
});
|
|
74
78
|
});
|
|
75
79
|
return;
|
|
76
80
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/nextEntry.ts"],"sourcesContent":["import once from 'call-once-fn';\nimport { type DirectoryAttributes, DirectoryEntry, type FileAttributes, type LinkAttributes, SymbolicLinkEntry } from 'extract-base-iterator';\nimport compact from 'lodash.compact';\nimport path from 'path';\nimport FileEntry from './FileEntry.ts';\nimport type SevenZipIterator from './SevenZipIterator.ts';\nimport type { SevenZipEntry } from './sevenz/SevenZipParser.ts';\nimport type { Entry, EntryCallback } from './types.ts';\n\nexport type NextCallback = (error?: Error, entry?: Entry) => undefined;\n\n// Entry attributes object that gets mutated in switch - union of possible shapes\n// mtime is number for FileAttributes compatibility (timestamp in ms)\ntype EntryAttributesBuilder = {\n path: string;\n basename: string;\n mtime: number;\n mode: number;\n type?: 'file' | 'directory';\n size?: number;\n};\n\nexport default function nextEntry<_T>(iterator: SevenZipIterator, callback: EntryCallback): undefined {\n if (!iterator.iterator) {\n callback(new Error('iterator missing'));\n return;\n }\n\n var entry: SevenZipEntry | null = null;\n entry = iterator.iterator.next();\n\n var nextCallback = once((err?: Error, entry?: Entry) => {\n // keep processing\n if (entry) iterator.push(nextEntry);\n err ? callback(err) : callback(null, entry ? { done: false, value: entry } : { done: true, value: null });\n }) as NextCallback;\n\n // done: signal iteration is complete (guard against stale lock)\n if (!iterator.lock || iterator.isDone() || !entry) return callback(null, { done: true, value: null });\n\n // Skip anti-files (these mark files to delete in delta archives)\n if (entry.isAntiFile) {\n iterator.push(nextEntry);\n return callback(null, null);\n }\n\n // Determine type from entry\n var type = entry.type;\n\n // Default modes (decimal values for Node 0.8 compatibility)\n // 0o755 = 493, 0o644 = 420\n var defaultMode = type === 'directory' ? 493 : 420;\n\n // Build attributes from 7z entry\n // mtime must be timestamp (number) for FileAttributes compatibility\n var mtimeDate = entry.mtime || new Date();\n var attributes: EntryAttributesBuilder = {\n path: compact(entry.path.split(path.sep)).join(path.sep),\n basename: entry.name,\n mtime: mtimeDate.getTime(),\n mode: entry.mode !== undefined ? entry.mode : defaultMode,\n };\n\n switch (type) {\n case 'directory':\n attributes.type = 'directory';\n return nextCallback(null, new DirectoryEntry(attributes as DirectoryAttributes));\n\n case 'link': {\n // For symlinks, the file content IS the symlink target path\n // Read the content to get the linkpath for SymbolicLinkEntry\n var parser = iterator.iterator.getParser();\n
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/nextEntry.ts"],"sourcesContent":["import once from 'call-once-fn';\nimport { type DirectoryAttributes, DirectoryEntry, type FileAttributes, type LinkAttributes, SymbolicLinkEntry } from 'extract-base-iterator';\nimport compact from 'lodash.compact';\nimport path from 'path';\nimport FileEntry from './FileEntry.ts';\nimport type SevenZipIterator from './SevenZipIterator.ts';\nimport type { SevenZipEntry } from './sevenz/SevenZipParser.ts';\nimport type { Entry, EntryCallback } from './types.ts';\n\nexport type NextCallback = (error?: Error, entry?: Entry) => undefined;\n\n// Entry attributes object that gets mutated in switch - union of possible shapes\n// mtime is number for FileAttributes compatibility (timestamp in ms)\ntype EntryAttributesBuilder = {\n path: string;\n basename: string;\n mtime: number;\n mode: number;\n type?: 'file' | 'directory';\n size?: number;\n};\n\nexport default function nextEntry<_T>(iterator: SevenZipIterator, callback: EntryCallback): undefined {\n if (!iterator.iterator) {\n callback(new Error('iterator missing'));\n return;\n }\n\n var entry: SevenZipEntry | null = null;\n entry = iterator.iterator.next();\n\n var nextCallback = once((err?: Error, entry?: Entry) => {\n // keep processing\n if (entry) iterator.push(nextEntry);\n err ? callback(err) : callback(null, entry ? { done: false, value: entry } : { done: true, value: null });\n }) as NextCallback;\n\n // done: signal iteration is complete (guard against stale lock)\n if (!iterator.lock || iterator.isDone() || !entry) return callback(null, { done: true, value: null });\n\n // Skip anti-files (these mark files to delete in delta archives)\n if (entry.isAntiFile) {\n iterator.push(nextEntry);\n return callback(null, null);\n }\n\n // Determine type from entry\n var type = entry.type;\n\n // Default modes (decimal values for Node 0.8 compatibility)\n // 0o755 = 493, 0o644 = 420\n var defaultMode = type === 'directory' ? 493 : 420;\n\n // Build attributes from 7z entry\n // mtime must be timestamp (number) for FileAttributes compatibility\n var mtimeDate = entry.mtime || new Date();\n var attributes: EntryAttributesBuilder = {\n path: compact(entry.path.split(path.sep)).join(path.sep),\n basename: entry.name,\n mtime: mtimeDate.getTime(),\n mode: entry.mode !== undefined ? entry.mode : defaultMode,\n };\n\n switch (type) {\n case 'directory':\n attributes.type = 'directory';\n return nextCallback(null, new DirectoryEntry(attributes as DirectoryAttributes));\n\n case 'link': {\n // For symlinks, the file content IS the symlink target path\n // Read the content to get the linkpath for SymbolicLinkEntry\n var parser = iterator.iterator.getParser();\n\n // Use callback-based async decompression\n parser.getEntryStreamAsync(entry, (err, stream) => {\n if (err) return nextCallback(err);\n if (!stream) return nextCallback(new Error('No stream returned'));\n\n var chunks: Buffer[] = [];\n\n stream.on('data', (chunk: Buffer) => {\n chunks.push(chunk);\n });\n stream.on('end', () => {\n var linkpath = Buffer.concat(chunks).toString('utf8');\n\n var linkAttributes: LinkAttributes = {\n path: attributes.path,\n mtime: attributes.mtime,\n mode: attributes.mode,\n linkpath: linkpath,\n };\n\n nextCallback(null, new SymbolicLinkEntry(linkAttributes));\n });\n stream.on('error', (streamErr: Error) => {\n nextCallback(streamErr);\n });\n });\n return;\n }\n\n case 'file': {\n attributes.type = 'file';\n attributes.size = entry.size;\n var parser2 = iterator.iterator.getParser();\n return nextCallback(null, new FileEntry(attributes as FileAttributes, entry, parser2, iterator.lock));\n }\n }\n\n return callback(new Error(`Unrecognized entry type: ${type}`));\n}\n"],"names":["once","DirectoryEntry","SymbolicLinkEntry","compact","path","FileEntry","nextEntry","iterator","callback","Error","entry","next","nextCallback","err","push","done","value","lock","isDone","isAntiFile","type","defaultMode","mtimeDate","mtime","Date","attributes","split","sep","join","basename","name","getTime","mode","undefined","parser","getParser","getEntryStreamAsync","stream","chunks","on","chunk","linkpath","Buffer","concat","toString","linkAttributes","streamErr","size","parser2"],"mappings":"AAAA,OAAOA,UAAU,eAAe;AAChC,SAAmCC,cAAc,EAA4CC,iBAAiB,QAAQ,wBAAwB;AAC9I,OAAOC,aAAa,iBAAiB;AACrC,OAAOC,UAAU,OAAO;AACxB,OAAOC,eAAe,iBAAiB;AAkBvC,eAAe,SAASC,UAAcC,QAA0B,EAAEC,QAAuB;IACvF,IAAI,CAACD,SAASA,QAAQ,EAAE;QACtBC,SAAS,IAAIC,MAAM;QACnB;IACF;IAEA,IAAIC,QAA8B;IAClCA,QAAQH,SAASA,QAAQ,CAACI,IAAI;IAE9B,IAAIC,eAAeZ,KAAK,CAACa,KAAaH;QACpC,kBAAkB;QAClB,IAAIA,OAAOH,SAASO,IAAI,CAACR;QACzBO,MAAML,SAASK,OAAOL,SAAS,MAAME,QAAQ;YAAEK,MAAM;YAAOC,OAAON;QAAM,IAAI;YAAEK,MAAM;YAAMC,OAAO;QAAK;IACzG;IAEA,gEAAgE;IAChE,IAAI,CAACT,SAASU,IAAI,IAAIV,SAASW,MAAM,MAAM,CAACR,OAAO,OAAOF,SAAS,MAAM;QAAEO,MAAM;QAAMC,OAAO;IAAK;IAEnG,iEAAiE;IACjE,IAAIN,MAAMS,UAAU,EAAE;QACpBZ,SAASO,IAAI,CAACR;QACd,OAAOE,SAAS,MAAM;IACxB;IAEA,4BAA4B;IAC5B,IAAIY,OAAOV,MAAMU,IAAI;IAErB,4DAA4D;IAC5D,2BAA2B;IAC3B,IAAIC,cAAcD,SAAS,cAAc,MAAM;IAE/C,iCAAiC;IACjC,oEAAoE;IACpE,IAAIE,YAAYZ,MAAMa,KAAK,IAAI,IAAIC;IACnC,IAAIC,aAAqC;QACvCrB,MAAMD,QAAQO,MAAMN,IAAI,CAACsB,KAAK,CAACtB,KAAKuB,GAAG,GAAGC,IAAI,CAACxB,KAAKuB,GAAG;QACvDE,UAAUnB,MAAMoB,IAAI;QACpBP,OAAOD,UAAUS,OAAO;QACxBC,MAAMtB,MAAMsB,IAAI,KAAKC,YAAYvB,MAAMsB,IAAI,GAAGX;IAChD;IAEA,OAAQD;QACN,KAAK;YACHK,WAAWL,IAAI,GAAG;YAClB,OAAOR,aAAa,MAAM,IAAIX,eAAewB;QAE/C,KAAK;YAAQ;gBACX,4DAA4D;gBAC5D,6DAA6D;gBAC7D,IAAIS,SAAS3B,SAASA,QAAQ,CAAC4B,SAAS;gBAExC,yCAAyC;gBACzCD,OAAOE,mBAAmB,CAAC1B,OAAO,CAACG,KAAKwB;oBACtC,IAAIxB,KAAK,OAAOD,aAAaC;oBAC7B,IAAI,CAACwB,QAAQ,OAAOzB,aAAa,IAAIH,MAAM;oBAE3C,IAAI6B,SAAmB,EAAE;oBAEzBD,OAAOE,EAAE,CAAC,QAAQ,CAACC;wBACjBF,OAAOxB,IAAI,CAAC0B;oBACd;oBACAH,OAAOE,EAAE,CAAC,OAAO;wBACf,IAAIE,WAAWC,OAAOC,MAAM,CAACL,QAAQM,QAAQ,CAAC;wBAE9C,IAAIC,iBAAiC;4BACnCzC,MAAMqB,WAAWrB,IAAI;4BACrBmB,OAAOE,WAAWF,KAAK;4BACvBS,MAAMP,WAAWO,IAAI;4BACrBS,UAAUA;wBACZ;wBAEA7B,aAAa,MAAM,IAAIV,kBAAkB2C;oBAC3C;oBACAR,OAAOE,EAAE,CAAC,SAAS,CAACO;wBAClBlC,aAAakC;oBACf;gBACF;gBACA;YACF;QAEA,KAAK;YAAQ;gBACXrB,WAAWL,IAAI,GAAG;gBAClBK,WAAWsB,IAAI,GAAGrC,MAAMqC,IAAI;gBAC5B,IAAIC,UAAUzC,SAASA,QAAQ,CAAC4B,SAAS;gBACzC,OAAOvB,aAAa,MAAM,IAAIP,UAAUoB,YAA8Bf,OAAOsC,SAASzC,SAASU,IAAI;YACrG;IACF;IAEA,OAAOT,SAAS,IAAIC,MAAM,CAAC,yBAAyB,EAAEW,MAAM;AAC9D"}
|
|
@@ -87,6 +87,11 @@ export declare class SevenZipParser {
|
|
|
87
87
|
* Get a readable stream for an entry's content
|
|
88
88
|
*/
|
|
89
89
|
getEntryStream(entry: SevenZipEntry): Readable;
|
|
90
|
+
/**
|
|
91
|
+
* Get a readable stream for an entry's content (callback-based async version)
|
|
92
|
+
* Uses streaming decompression for non-blocking I/O
|
|
93
|
+
*/
|
|
94
|
+
getEntryStreamAsync(entry: SevenZipEntry, callback: (err: Error | null, stream?: Readable) => void): void;
|
|
90
95
|
/**
|
|
91
96
|
* Check if a folder uses BCJ2 codec
|
|
92
97
|
*/
|
|
@@ -96,6 +101,11 @@ export declare class SevenZipParser {
|
|
|
96
101
|
* Only caches when multiple files share a block, releases when last file extracted
|
|
97
102
|
*/
|
|
98
103
|
private getDecompressedFolder;
|
|
104
|
+
/**
|
|
105
|
+
* Get decompressed data for a folder using streaming (callback-based async)
|
|
106
|
+
* Uses createDecoder() streams for non-blocking decompression
|
|
107
|
+
*/
|
|
108
|
+
private getDecompressedFolderAsync;
|
|
99
109
|
/**
|
|
100
110
|
* Decompress a BCJ2 folder with multi-stream handling
|
|
101
111
|
* BCJ2 uses 4 input streams: main, call, jump, range coder
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
// Handles reading archive structure and providing file streams
|
|
3
3
|
import { allocBuffer, crc32 } from 'extract-base-iterator';
|
|
4
4
|
import fs from 'fs';
|
|
5
|
+
import oo from 'on-one';
|
|
5
6
|
import { PassThrough } from 'readable-stream';
|
|
6
7
|
import { decodeBcj2Multi, getCodec, getCodecName, isBcj2Codec, isCodecSupported } from './codecs/index.js';
|
|
7
8
|
import { createCodedError, ErrorCode, FileAttribute, PropertyId, SIGNATURE_HEADER_SIZE } from './constants.js';
|
|
@@ -417,6 +418,65 @@ import { readNumber } from './NumberCodec.js';
|
|
|
417
418
|
return outputStream;
|
|
418
419
|
}
|
|
419
420
|
/**
|
|
421
|
+
* Get a readable stream for an entry's content (callback-based async version)
|
|
422
|
+
* Uses streaming decompression for non-blocking I/O
|
|
423
|
+
*/ getEntryStreamAsync(entry, callback) {
|
|
424
|
+
if (!entry._hasStream || entry.type === 'directory') {
|
|
425
|
+
// Return empty stream for directories and empty files
|
|
426
|
+
var emptyStream = new PassThrough();
|
|
427
|
+
emptyStream.end();
|
|
428
|
+
callback(null, emptyStream);
|
|
429
|
+
return;
|
|
430
|
+
}
|
|
431
|
+
if (!this.streamsInfo) {
|
|
432
|
+
callback(createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER));
|
|
433
|
+
return;
|
|
434
|
+
}
|
|
435
|
+
// Get folder info
|
|
436
|
+
var folder = this.streamsInfo.folders[entry._folderIndex];
|
|
437
|
+
if (!folder) {
|
|
438
|
+
callback(createCodedError('Invalid folder index', ErrorCode.CORRUPT_HEADER));
|
|
439
|
+
return;
|
|
440
|
+
}
|
|
441
|
+
// Check codec support
|
|
442
|
+
for(var i = 0; i < folder.coders.length; i++){
|
|
443
|
+
var coder = folder.coders[i];
|
|
444
|
+
if (!isCodecSupported(coder.id)) {
|
|
445
|
+
var codecName = getCodecName(coder.id);
|
|
446
|
+
callback(createCodedError(`Unsupported codec: ${codecName}`, ErrorCode.UNSUPPORTED_CODEC));
|
|
447
|
+
return;
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
// Get decompressed data for this folder using async method
|
|
451
|
+
var folderIdx = entry._folderIndex;
|
|
452
|
+
var streamsInfo = this.streamsInfo;
|
|
453
|
+
this.getDecompressedFolderAsync(folderIdx, (err, data)=>{
|
|
454
|
+
if (err) return callback(err);
|
|
455
|
+
if (!data) return callback(new Error('No data returned from decompression'));
|
|
456
|
+
// Calculate file offset within the decompressed block
|
|
457
|
+
var fileStart = 0;
|
|
458
|
+
for(var m = 0; m < entry._streamIndexInFolder; m++){
|
|
459
|
+
var prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
|
|
460
|
+
fileStart += streamsInfo.unpackSizes[prevStreamGlobalIndex];
|
|
461
|
+
}
|
|
462
|
+
var fileSize = entry.size;
|
|
463
|
+
// Bounds check
|
|
464
|
+
if (fileStart + fileSize > data.length) {
|
|
465
|
+
return callback(createCodedError(`File data out of bounds: offset ${fileStart} + size ${fileSize} > decompressed length ${data.length}`, ErrorCode.DECOMPRESSION_FAILED));
|
|
466
|
+
}
|
|
467
|
+
// Create a PassThrough stream with the file data
|
|
468
|
+
var outputStream = new PassThrough();
|
|
469
|
+
var fileData = data.slice(fileStart, fileStart + fileSize);
|
|
470
|
+
outputStream.end(fileData);
|
|
471
|
+
// Track extraction and release cache when all files from this folder are done
|
|
472
|
+
this.extractedPerFolder[folderIdx] = (this.extractedPerFolder[folderIdx] || 0) + 1;
|
|
473
|
+
if (this.extractedPerFolder[folderIdx] >= this.filesPerFolder[folderIdx]) {
|
|
474
|
+
delete this.decompressedCache[folderIdx];
|
|
475
|
+
}
|
|
476
|
+
callback(null, outputStream);
|
|
477
|
+
});
|
|
478
|
+
}
|
|
479
|
+
/**
|
|
420
480
|
* Check if a folder uses BCJ2 codec
|
|
421
481
|
*/ folderHasBcj2(folder) {
|
|
422
482
|
for(var i = 0; i < folder.coders.length; i++){
|
|
@@ -482,6 +542,104 @@ import { readNumber } from './NumberCodec.js';
|
|
|
482
542
|
return data2;
|
|
483
543
|
}
|
|
484
544
|
/**
|
|
545
|
+
* Get decompressed data for a folder using streaming (callback-based async)
|
|
546
|
+
* Uses createDecoder() streams for non-blocking decompression
|
|
547
|
+
*/ getDecompressedFolderAsync(folderIndex, callback) {
|
|
548
|
+
var self = this;
|
|
549
|
+
// Check cache first
|
|
550
|
+
if (this.decompressedCache[folderIndex]) {
|
|
551
|
+
callback(null, this.decompressedCache[folderIndex]);
|
|
552
|
+
return;
|
|
553
|
+
}
|
|
554
|
+
if (!this.streamsInfo) {
|
|
555
|
+
callback(createCodedError('No streams info available', ErrorCode.CORRUPT_HEADER));
|
|
556
|
+
return;
|
|
557
|
+
}
|
|
558
|
+
var folder = this.streamsInfo.folders[folderIndex];
|
|
559
|
+
// Check how many files remain in this folder
|
|
560
|
+
var filesInFolder = this.filesPerFolder[folderIndex] || 1;
|
|
561
|
+
var extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
|
|
562
|
+
var remainingFiles = filesInFolder - extractedFromFolder;
|
|
563
|
+
var shouldCache = remainingFiles > 1;
|
|
564
|
+
// BCJ2 requires special handling - use sync version for now
|
|
565
|
+
// TODO: Add async BCJ2 support
|
|
566
|
+
if (this.folderHasBcj2(folder)) {
|
|
567
|
+
try {
|
|
568
|
+
var data = this.decompressBcj2Folder(folderIndex);
|
|
569
|
+
if (shouldCache) {
|
|
570
|
+
this.decompressedCache[folderIndex] = data;
|
|
571
|
+
}
|
|
572
|
+
callback(null, data);
|
|
573
|
+
} catch (err) {
|
|
574
|
+
callback(err);
|
|
575
|
+
}
|
|
576
|
+
return;
|
|
577
|
+
}
|
|
578
|
+
// Calculate packed data position
|
|
579
|
+
var packPos = SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
|
|
580
|
+
// Find which pack stream this folder uses
|
|
581
|
+
var packStreamIndex = 0;
|
|
582
|
+
for(var j = 0; j < folderIndex; j++){
|
|
583
|
+
packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
|
|
584
|
+
}
|
|
585
|
+
// Calculate position of this pack stream
|
|
586
|
+
for(var k = 0; k < packStreamIndex; k++){
|
|
587
|
+
packPos += this.streamsInfo.packSizes[k];
|
|
588
|
+
}
|
|
589
|
+
var packSize = this.streamsInfo.packSizes[packStreamIndex];
|
|
590
|
+
// Read packed data
|
|
591
|
+
var packedData = this.source.read(packPos, packSize);
|
|
592
|
+
// Create decoder stream chain and decompress
|
|
593
|
+
var coders = folder.coders;
|
|
594
|
+
var unpackSizes = folder.unpackSizes;
|
|
595
|
+
// Helper to decompress through a single codec stream
|
|
596
|
+
function decompressWithStream(input, coderIdx, cb) {
|
|
597
|
+
var coderInfo = coders[coderIdx];
|
|
598
|
+
var codec = getCodec(coderInfo.id);
|
|
599
|
+
var decoder = codec.createDecoder(coderInfo.properties, unpackSizes[coderIdx]);
|
|
600
|
+
var chunks = [];
|
|
601
|
+
var errorOccurred = false;
|
|
602
|
+
decoder.on('data', (chunk)=>{
|
|
603
|
+
chunks.push(chunk);
|
|
604
|
+
});
|
|
605
|
+
oo(decoder, [
|
|
606
|
+
'error',
|
|
607
|
+
'end',
|
|
608
|
+
'close',
|
|
609
|
+
'finish'
|
|
610
|
+
], (err)=>{
|
|
611
|
+
if (errorOccurred) return;
|
|
612
|
+
if (err) {
|
|
613
|
+
errorOccurred = true;
|
|
614
|
+
return cb(err);
|
|
615
|
+
}
|
|
616
|
+
cb(null, Buffer.concat(chunks));
|
|
617
|
+
});
|
|
618
|
+
// Write input data to decoder and signal end
|
|
619
|
+
decoder.end(input);
|
|
620
|
+
}
|
|
621
|
+
// Chain decompression through all codecs
|
|
622
|
+
function decompressChain(input, idx) {
|
|
623
|
+
if (idx >= coders.length) {
|
|
624
|
+
// All done - cache and return
|
|
625
|
+
if (shouldCache) {
|
|
626
|
+
self.decompressedCache[folderIndex] = input;
|
|
627
|
+
}
|
|
628
|
+
callback(null, input);
|
|
629
|
+
return;
|
|
630
|
+
}
|
|
631
|
+
decompressWithStream(input, idx, (err, output)=>{
|
|
632
|
+
if (err) {
|
|
633
|
+
callback(err);
|
|
634
|
+
return;
|
|
635
|
+
}
|
|
636
|
+
decompressChain(output, idx + 1);
|
|
637
|
+
});
|
|
638
|
+
}
|
|
639
|
+
// Start the chain
|
|
640
|
+
decompressChain(packedData, 0);
|
|
641
|
+
}
|
|
642
|
+
/**
|
|
485
643
|
* Decompress a BCJ2 folder with multi-stream handling
|
|
486
644
|
* BCJ2 uses 4 input streams: main, call, jump, range coder
|
|
487
645
|
*/ decompressBcj2Folder(folderIndex) {
|