7z-iterator 0.1.8 → 0.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/FileEntry.js +7 -7
- package/dist/cjs/FileEntry.js.map +1 -1
- package/dist/cjs/nextEntry.js +21 -17
- package/dist/cjs/nextEntry.js.map +1 -1
- package/dist/cjs/sevenz/SevenZipParser.d.cts +10 -0
- package/dist/cjs/sevenz/SevenZipParser.d.ts +10 -0
- package/dist/cjs/sevenz/SevenZipParser.js +159 -0
- package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma.d.cts +4 -1
- package/dist/cjs/sevenz/codecs/Lzma.d.ts +4 -1
- package/dist/cjs/sevenz/codecs/Lzma.js +30 -2
- package/dist/cjs/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/cjs/sevenz/codecs/Lzma2.d.cts +3 -0
- package/dist/cjs/sevenz/codecs/Lzma2.d.ts +3 -0
- package/dist/cjs/sevenz/codecs/Lzma2.js +10 -0
- package/dist/cjs/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/cjs/sevenz/codecs/lzmaCompat.d.cts +35 -0
- package/dist/cjs/sevenz/codecs/lzmaCompat.d.ts +35 -0
- package/dist/cjs/sevenz/codecs/lzmaCompat.js +76 -0
- package/dist/cjs/sevenz/codecs/lzmaCompat.js.map +1 -0
- package/dist/esm/FileEntry.js +7 -7
- package/dist/esm/FileEntry.js.map +1 -1
- package/dist/esm/nextEntry.js +21 -17
- package/dist/esm/nextEntry.js.map +1 -1
- package/dist/esm/sevenz/SevenZipParser.d.ts +10 -0
- package/dist/esm/sevenz/SevenZipParser.js +158 -0
- package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma.d.ts +4 -1
- package/dist/esm/sevenz/codecs/Lzma.js +36 -5
- package/dist/esm/sevenz/codecs/Lzma.js.map +1 -1
- package/dist/esm/sevenz/codecs/Lzma2.d.ts +3 -0
- package/dist/esm/sevenz/codecs/Lzma2.js +20 -6
- package/dist/esm/sevenz/codecs/Lzma2.js.map +1 -1
- package/dist/esm/sevenz/codecs/lzmaCompat.d.ts +35 -0
- package/dist/esm/sevenz/codecs/lzmaCompat.js +69 -0
- package/dist/esm/sevenz/codecs/lzmaCompat.js.map +1 -0
- package/package.json +4 -1
package/dist/cjs/FileEntry.js
CHANGED
|
@@ -118,20 +118,20 @@ var SevenZipFileEntry = /*#__PURE__*/ function(FileEntry) {
|
|
|
118
118
|
callback(new Error('7z FileEntry missing entry. Check for calling create multiple times'));
|
|
119
119
|
return;
|
|
120
120
|
}
|
|
121
|
-
|
|
122
|
-
|
|
121
|
+
// Use callback-based async decompression
|
|
122
|
+
this.parser.getEntryStreamAsync(this.entry, function(err, stream) {
|
|
123
|
+
if (err) return callback(err);
|
|
124
|
+
if (!stream) return callback(new Error('No stream returned'));
|
|
123
125
|
var res = stream.pipe(_fs.default.createWriteStream(fullPath));
|
|
124
126
|
(0, _onone.default)(res, [
|
|
125
127
|
'error',
|
|
126
128
|
'end',
|
|
127
129
|
'close',
|
|
128
130
|
'finish'
|
|
129
|
-
], function(
|
|
130
|
-
|
|
131
|
+
], function(writeErr) {
|
|
132
|
+
writeErr ? callback(writeErr) : (0, _extractbaseiterator.waitForAccess)(fullPath, callback);
|
|
131
133
|
});
|
|
132
|
-
}
|
|
133
|
-
callback(err);
|
|
134
|
-
}
|
|
134
|
+
});
|
|
135
135
|
};
|
|
136
136
|
_proto.destroy = function destroy() {
|
|
137
137
|
_extractbaseiterator.FileEntry.prototype.destroy.call(this);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/FileEntry.ts"],"sourcesContent":["import { type FileAttributes, FileEntry, type NoParamCallback, waitForAccess } from 'extract-base-iterator';\nimport fs from 'fs';\nimport oo from 'on-one';\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\nimport type { ExtractOptions, LockT } from './types.ts';\n\nexport default class SevenZipFileEntry extends FileEntry {\n private lock: LockT;\n private entry: SevenZipEntry;\n private parser: SevenZipParser;\n\n constructor(attributes: FileAttributes, entry: SevenZipEntry, parser: SevenZipParser, lock: LockT) {\n super(attributes);\n this.entry = entry;\n this.parser = parser;\n this.lock = lock;\n this.lock.retain();\n }\n\n create(dest: string, options: ExtractOptions | NoParamCallback, callback: NoParamCallback): undefined | Promise<boolean> {\n if (typeof options === 'function') {\n callback = options;\n options = null;\n }\n\n if (typeof callback === 'function') {\n options = options || {};\n return FileEntry.prototype.create.call(this, dest, options, (err?: Error) => {\n callback(err);\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n });\n }\n return new Promise((resolve, reject) => {\n this.create(dest, options, (err?: Error, done?: boolean) => {\n err ? reject(err) : resolve(done);\n });\n });\n }\n\n _writeFile(fullPath: string, _options: ExtractOptions, callback: NoParamCallback): undefined {\n if (!this.entry || !this.parser) {\n callback(new Error('7z FileEntry missing entry. Check for calling create multiple times'));\n return;\n }\n\n
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/FileEntry.ts"],"sourcesContent":["import { type FileAttributes, FileEntry, type NoParamCallback, waitForAccess } from 'extract-base-iterator';\nimport fs from 'fs';\nimport oo from 'on-one';\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\nimport type { ExtractOptions, LockT } from './types.ts';\n\nexport default class SevenZipFileEntry extends FileEntry {\n private lock: LockT;\n private entry: SevenZipEntry;\n private parser: SevenZipParser;\n\n constructor(attributes: FileAttributes, entry: SevenZipEntry, parser: SevenZipParser, lock: LockT) {\n super(attributes);\n this.entry = entry;\n this.parser = parser;\n this.lock = lock;\n this.lock.retain();\n }\n\n create(dest: string, options: ExtractOptions | NoParamCallback, callback: NoParamCallback): undefined | Promise<boolean> {\n if (typeof options === 'function') {\n callback = options;\n options = null;\n }\n\n if (typeof callback === 'function') {\n options = options || {};\n return FileEntry.prototype.create.call(this, dest, options, (err?: Error) => {\n callback(err);\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n });\n }\n return new Promise((resolve, reject) => {\n this.create(dest, options, (err?: Error, done?: boolean) => {\n err ? reject(err) : resolve(done);\n });\n });\n }\n\n _writeFile(fullPath: string, _options: ExtractOptions, callback: NoParamCallback): undefined {\n if (!this.entry || !this.parser) {\n callback(new Error('7z FileEntry missing entry. Check for calling create multiple times'));\n return;\n }\n\n // Use callback-based async decompression\n this.parser.getEntryStreamAsync(this.entry, (err, stream) => {\n if (err) return callback(err);\n if (!stream) return callback(new Error('No stream returned'));\n\n var res = stream.pipe(fs.createWriteStream(fullPath));\n oo(res, ['error', 'end', 'close', 'finish'], (writeErr?: Error) => {\n writeErr ? callback(writeErr) : waitForAccess(fullPath, callback);\n });\n });\n }\n\n destroy() {\n FileEntry.prototype.destroy.call(this);\n this.entry = null;\n this.parser = null;\n if (this.lock) {\n this.lock.release();\n this.lock = null;\n }\n }\n}\n"],"names":["SevenZipFileEntry","attributes","entry","parser","lock","retain","create","dest","options","callback","FileEntry","prototype","call","err","release","Promise","resolve","reject","done","_writeFile","fullPath","_options","Error","getEntryStreamAsync","stream","res","pipe","fs","createWriteStream","oo","writeErr","waitForAccess","destroy"],"mappings":";;;;;;;eAMqBA;;;mCAN+D;yDACrE;4DACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA,IAAA,AAAMA,kCAAN;;cAAMA;aAAAA,kBAKPC,UAA0B,EAAEC,KAAoB,EAAEC,MAAsB,EAAEC,IAAW;gCAL9EJ;;gBAMjB,kBANiBA;YAMXC;;QACN,MAAKC,KAAK,GAAGA;QACb,MAAKC,MAAM,GAAGA;QACd,MAAKC,IAAI,GAAGA;QACZ,MAAKA,IAAI,CAACC,MAAM;;;iBAVCL;IAanBM,OAAAA,MAqBC,GArBDA,SAAAA,OAAOC,IAAY,EAAEC,OAAyC,EAAEC,QAAyB;;QACvF,IAAI,OAAOD,YAAY,YAAY;YACjCC,WAAWD;YACXA,UAAU;QACZ;QAEA,IAAI,OAAOC,aAAa,YAAY;YAClCD,UAAUA,WAAW,CAAC;YACtB,OAAOE,8BAAS,CAACC,SAAS,CAACL,MAAM,CAACM,IAAI,CAAC,IAAI,EAAEL,MAAMC,SAAS,SAACK;gBAC3DJ,SAASI;gBACT,IAAI,MAAKT,IAAI,EAAE;oBACb,MAAKA,IAAI,CAACU,OAAO;oBACjB,MAAKV,IAAI,GAAG;gBACd;YACF;QACF;QACA,OAAO,IAAIW,QAAQ,SAACC,SAASC;YAC3B,MAAKX,MAAM,CAACC,MAAMC,SAAS,SAACK,KAAaK;gBACvCL,MAAMI,OAAOJ,OAAOG,QAAQE;YAC9B;QACF;IACF;IAEAC,OAAAA,UAgBC,GAhBDA,SAAAA,WAAWC,QAAgB,EAAEC,QAAwB,EAAEZ,QAAyB;QAC9E,IAAI,CAAC,IAAI,CAACP,KAAK,IAAI,CAAC,IAAI,CAACC,MAAM,EAAE;YAC/BM,SAAS,IAAIa,MAAM;YACnB;QACF;QAEA,yCAAyC;QACzC,IAAI,CAACnB,MAAM,CAACoB,mBAAmB,CAAC,IAAI,CAACrB,KAAK,EAAE,SAACW,KAAKW;YAChD,IAAIX,KAAK,OAAOJ,SAASI;YACzB,IAAI,CAACW,QAAQ,OAAOf,SAAS,IAAIa,MAAM;YAEvC,IAAIG,MAAMD,OAAOE,IAAI,CAACC,WAAE,CAACC,iBAAiB,CAACR;YAC3CS,IAAAA,cAAE,EAACJ,KAAK;gBAAC;gBAAS;gBAAO;gBAAS;aAAS,EAAE,SAACK;gBAC5CA,WAAWrB,SAASqB,YAAYC,IAAAA,kCAAa,EAACX,UAAUX;YAC1D;QACF;IACF;IAEAuB,OAAAA,OAQC,GARDA,SAAAA;QACEtB,8BAAS,CAACC,SAAS,CAACqB,OAAO,CAACpB,IAAI,CAAC,IAAI;QACrC,IAAI,CAACV,KAAK,GAAG;QACb,IAAI,CAACC,MAAM,GAAG;QACd,IAAI,IAAI,CAACC,IAAI,EAAE;YACb,IAAI,CAACA,IAAI,CAACU,OAAO;YACjB,IAAI,CAACV,IAAI,GAAG;QACd;IACF;WA9DmBJ;EAA0BU,8BAAS"}
|
package/dist/cjs/nextEntry.js
CHANGED
|
@@ -69,23 +69,27 @@ function nextEntry(iterator, callback) {
|
|
|
69
69
|
// For symlinks, the file content IS the symlink target path
|
|
70
70
|
// Read the content to get the linkpath for SymbolicLinkEntry
|
|
71
71
|
var parser = iterator.iterator.getParser();
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
72
|
+
// Use callback-based async decompression
|
|
73
|
+
parser.getEntryStreamAsync(entry, function(err, stream) {
|
|
74
|
+
if (err) return nextCallback(err);
|
|
75
|
+
if (!stream) return nextCallback(new Error('No stream returned'));
|
|
76
|
+
var chunks = [];
|
|
77
|
+
stream.on('data', function(chunk) {
|
|
78
|
+
chunks.push(chunk);
|
|
79
|
+
});
|
|
80
|
+
stream.on('end', function() {
|
|
81
|
+
var linkpath = Buffer.concat(chunks).toString('utf8');
|
|
82
|
+
var linkAttributes = {
|
|
83
|
+
path: attributes.path,
|
|
84
|
+
mtime: attributes.mtime,
|
|
85
|
+
mode: attributes.mode,
|
|
86
|
+
linkpath: linkpath
|
|
87
|
+
};
|
|
88
|
+
nextCallback(null, new _extractbaseiterator.SymbolicLinkEntry(linkAttributes));
|
|
89
|
+
});
|
|
90
|
+
stream.on('error', function(streamErr) {
|
|
91
|
+
nextCallback(streamErr);
|
|
92
|
+
});
|
|
89
93
|
});
|
|
90
94
|
return;
|
|
91
95
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/nextEntry.ts"],"sourcesContent":["import once from 'call-once-fn';\nimport { type DirectoryAttributes, DirectoryEntry, type FileAttributes, type LinkAttributes, SymbolicLinkEntry } from 'extract-base-iterator';\nimport compact from 'lodash.compact';\nimport path from 'path';\nimport FileEntry from './FileEntry.ts';\nimport type SevenZipIterator from './SevenZipIterator.ts';\nimport type { SevenZipEntry } from './sevenz/SevenZipParser.ts';\nimport type { Entry, EntryCallback } from './types.ts';\n\nexport type NextCallback = (error?: Error, entry?: Entry) => undefined;\n\n// Entry attributes object that gets mutated in switch - union of possible shapes\n// mtime is number for FileAttributes compatibility (timestamp in ms)\ntype EntryAttributesBuilder = {\n path: string;\n basename: string;\n mtime: number;\n mode: number;\n type?: 'file' | 'directory';\n size?: number;\n};\n\nexport default function nextEntry<_T>(iterator: SevenZipIterator, callback: EntryCallback): undefined {\n if (!iterator.iterator) {\n callback(new Error('iterator missing'));\n return;\n }\n\n var entry: SevenZipEntry | null = null;\n entry = iterator.iterator.next();\n\n var nextCallback = once((err?: Error, entry?: Entry) => {\n // keep processing\n if (entry) iterator.push(nextEntry);\n err ? callback(err) : callback(null, entry ? { done: false, value: entry } : { done: true, value: null });\n }) as NextCallback;\n\n // done: signal iteration is complete (guard against stale lock)\n if (!iterator.lock || iterator.isDone() || !entry) return callback(null, { done: true, value: null });\n\n // Skip anti-files (these mark files to delete in delta archives)\n if (entry.isAntiFile) {\n iterator.push(nextEntry);\n return callback(null, null);\n }\n\n // Determine type from entry\n var type = entry.type;\n\n // Default modes (decimal values for Node 0.8 compatibility)\n // 0o755 = 493, 0o644 = 420\n var defaultMode = type === 'directory' ? 493 : 420;\n\n // Build attributes from 7z entry\n // mtime must be timestamp (number) for FileAttributes compatibility\n var mtimeDate = entry.mtime || new Date();\n var attributes: EntryAttributesBuilder = {\n path: compact(entry.path.split(path.sep)).join(path.sep),\n basename: entry.name,\n mtime: mtimeDate.getTime(),\n mode: entry.mode !== undefined ? entry.mode : defaultMode,\n };\n\n switch (type) {\n case 'directory':\n attributes.type = 'directory';\n return nextCallback(null, new DirectoryEntry(attributes as DirectoryAttributes));\n\n case 'link': {\n // For symlinks, the file content IS the symlink target path\n // Read the content to get the linkpath for SymbolicLinkEntry\n var parser = iterator.iterator.getParser();\n
|
|
1
|
+
{"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/nextEntry.ts"],"sourcesContent":["import once from 'call-once-fn';\nimport { type DirectoryAttributes, DirectoryEntry, type FileAttributes, type LinkAttributes, SymbolicLinkEntry } from 'extract-base-iterator';\nimport compact from 'lodash.compact';\nimport path from 'path';\nimport FileEntry from './FileEntry.ts';\nimport type SevenZipIterator from './SevenZipIterator.ts';\nimport type { SevenZipEntry } from './sevenz/SevenZipParser.ts';\nimport type { Entry, EntryCallback } from './types.ts';\n\nexport type NextCallback = (error?: Error, entry?: Entry) => undefined;\n\n// Entry attributes object that gets mutated in switch - union of possible shapes\n// mtime is number for FileAttributes compatibility (timestamp in ms)\ntype EntryAttributesBuilder = {\n path: string;\n basename: string;\n mtime: number;\n mode: number;\n type?: 'file' | 'directory';\n size?: number;\n};\n\nexport default function nextEntry<_T>(iterator: SevenZipIterator, callback: EntryCallback): undefined {\n if (!iterator.iterator) {\n callback(new Error('iterator missing'));\n return;\n }\n\n var entry: SevenZipEntry | null = null;\n entry = iterator.iterator.next();\n\n var nextCallback = once((err?: Error, entry?: Entry) => {\n // keep processing\n if (entry) iterator.push(nextEntry);\n err ? callback(err) : callback(null, entry ? { done: false, value: entry } : { done: true, value: null });\n }) as NextCallback;\n\n // done: signal iteration is complete (guard against stale lock)\n if (!iterator.lock || iterator.isDone() || !entry) return callback(null, { done: true, value: null });\n\n // Skip anti-files (these mark files to delete in delta archives)\n if (entry.isAntiFile) {\n iterator.push(nextEntry);\n return callback(null, null);\n }\n\n // Determine type from entry\n var type = entry.type;\n\n // Default modes (decimal values for Node 0.8 compatibility)\n // 0o755 = 493, 0o644 = 420\n var defaultMode = type === 'directory' ? 493 : 420;\n\n // Build attributes from 7z entry\n // mtime must be timestamp (number) for FileAttributes compatibility\n var mtimeDate = entry.mtime || new Date();\n var attributes: EntryAttributesBuilder = {\n path: compact(entry.path.split(path.sep)).join(path.sep),\n basename: entry.name,\n mtime: mtimeDate.getTime(),\n mode: entry.mode !== undefined ? entry.mode : defaultMode,\n };\n\n switch (type) {\n case 'directory':\n attributes.type = 'directory';\n return nextCallback(null, new DirectoryEntry(attributes as DirectoryAttributes));\n\n case 'link': {\n // For symlinks, the file content IS the symlink target path\n // Read the content to get the linkpath for SymbolicLinkEntry\n var parser = iterator.iterator.getParser();\n\n // Use callback-based async decompression\n parser.getEntryStreamAsync(entry, (err, stream) => {\n if (err) return nextCallback(err);\n if (!stream) return nextCallback(new Error('No stream returned'));\n\n var chunks: Buffer[] = [];\n\n stream.on('data', (chunk: Buffer) => {\n chunks.push(chunk);\n });\n stream.on('end', () => {\n var linkpath = Buffer.concat(chunks).toString('utf8');\n\n var linkAttributes: LinkAttributes = {\n path: attributes.path,\n mtime: attributes.mtime,\n mode: attributes.mode,\n linkpath: linkpath,\n };\n\n nextCallback(null, new SymbolicLinkEntry(linkAttributes));\n });\n stream.on('error', (streamErr: Error) => {\n nextCallback(streamErr);\n });\n });\n return;\n }\n\n case 'file': {\n attributes.type = 'file';\n attributes.size = entry.size;\n var parser2 = iterator.iterator.getParser();\n return nextCallback(null, new FileEntry(attributes as FileAttributes, entry, parser2, iterator.lock));\n }\n }\n\n return callback(new Error(`Unrecognized entry type: ${type}`));\n}\n"],"names":["nextEntry","iterator","callback","Error","entry","next","nextCallback","once","err","push","done","value","lock","isDone","isAntiFile","type","defaultMode","mtimeDate","mtime","Date","attributes","path","compact","split","sep","join","basename","name","getTime","mode","undefined","DirectoryEntry","parser","getParser","getEntryStreamAsync","stream","chunks","on","chunk","linkpath","Buffer","concat","toString","linkAttributes","SymbolicLinkEntry","streamErr","size","parser2","FileEntry"],"mappings":";;;;+BAsBA;;;eAAwBA;;;iEAtBP;mCACqG;oEAClG;2DACH;kEACK;;;;;;AAkBP,SAASA,UAAcC,QAA0B,EAAEC,QAAuB;IACvF,IAAI,CAACD,SAASA,QAAQ,EAAE;QACtBC,SAAS,IAAIC,MAAM;QACnB;IACF;IAEA,IAAIC,QAA8B;IAClCA,QAAQH,SAASA,QAAQ,CAACI,IAAI;IAE9B,IAAIC,eAAeC,IAAAA,mBAAI,EAAC,SAACC,KAAaJ;QACpC,kBAAkB;QAClB,IAAIA,OAAOH,SAASQ,IAAI,CAACT;QACzBQ,MAAMN,SAASM,OAAON,SAAS,MAAME,QAAQ;YAAEM,MAAM;YAAOC,OAAOP;QAAM,IAAI;YAAEM,MAAM;YAAMC,OAAO;QAAK;IACzG;IAEA,gEAAgE;IAChE,IAAI,CAACV,SAASW,IAAI,IAAIX,SAASY,MAAM,MAAM,CAACT,OAAO,OAAOF,SAAS,MAAM;QAAEQ,MAAM;QAAMC,OAAO;IAAK;IAEnG,iEAAiE;IACjE,IAAIP,MAAMU,UAAU,EAAE;QACpBb,SAASQ,IAAI,CAACT;QACd,OAAOE,SAAS,MAAM;IACxB;IAEA,4BAA4B;IAC5B,IAAIa,OAAOX,MAAMW,IAAI;IAErB,4DAA4D;IAC5D,2BAA2B;IAC3B,IAAIC,cAAcD,SAAS,cAAc,MAAM;IAE/C,iCAAiC;IACjC,oEAAoE;IACpE,IAAIE,YAAYb,MAAMc,KAAK,IAAI,IAAIC;IACnC,IAAIC,aAAqC;QACvCC,MAAMC,IAAAA,sBAAO,EAAClB,MAAMiB,IAAI,CAACE,KAAK,CAACF,aAAI,CAACG,GAAG,GAAGC,IAAI,CAACJ,aAAI,CAACG,GAAG;QACvDE,UAAUtB,MAAMuB,IAAI;QACpBT,OAAOD,UAAUW,OAAO;QACxBC,MAAMzB,MAAMyB,IAAI,KAAKC,YAAY1B,MAAMyB,IAAI,GAAGb;IAChD;IAEA,OAAQD;QACN,KAAK;YACHK,WAAWL,IAAI,GAAG;YAClB,OAAOT,aAAa,MAAM,IAAIyB,mCAAc,CAACX;QAE/C,KAAK;YAAQ;gBACX,4DAA4D;gBAC5D,6DAA6D;gBAC7D,IAAIY,SAAS/B,SAASA,QAAQ,CAACgC,SAAS;gBAExC,yCAAyC;gBACzCD,OAAOE,mBAAmB,CAAC9B,OAAO,SAACI,KAAK2B;oBACtC,IAAI3B,KAAK,OAAOF,aAAaE;oBAC7B,IAAI,CAAC2B,QAAQ,OAAO7B,aAAa,IAAIH,MAAM;oBAE3C,IAAIiC,SAAmB,EAAE;oBAEzBD,OAAOE,EAAE,CAAC,QAAQ,SAACC;wBACjBF,OAAO3B,IAAI,CAAC6B;oBACd;oBACAH,OAAOE,EAAE,CAAC,OAAO;wBACf,IAAIE,WAAWC,OAAOC,MAAM,CAACL,QAAQM,QAAQ,CAAC;wBAE9C,IAAIC,iBAAiC;4BACnCtB,MAAMD,WAAWC,IAAI;4BACrBH,OAAOE,WAAWF,KAAK;4BACvBW,MAAMT,WAAWS,IAAI;4BACrBU,UAAUA;wBACZ;wBAEAjC,aAAa,MAAM,IAAIsC,sCAAiB,CAACD;oBAC3C;oBACAR,OAAOE,EAAE,CAAC,SAAS,SAACQ;wBAClBvC,aAAauC;oBACf;gBACF;gBACA;YACF;QAEA,KAAK;YAAQ;gBACXzB,WAAWL,IAAI,GAAG;gBAClBK,WAAW0B,IAAI,GAAG1C,MAAM0C,IAAI;gBAC5B,IAAIC,UAAU9C,SAASA,QAAQ,CAACgC,SAAS;gBACzC,OAAO3B,aAAa,MAAM,IAAI0C,oBAAS,CAAC5B,YAA8BhB,OAAO2C,SAAS9C,SAASW,IAAI;YACrG;IACF;IAEA,OAAOV,SAAS,IAAIC,MAAM,AAAC,4BAAgC,OAALY;AACxD"}
|
|
@@ -87,6 +87,11 @@ export declare class SevenZipParser {
|
|
|
87
87
|
* Get a readable stream for an entry's content
|
|
88
88
|
*/
|
|
89
89
|
getEntryStream(entry: SevenZipEntry): Readable;
|
|
90
|
+
/**
|
|
91
|
+
* Get a readable stream for an entry's content (callback-based async version)
|
|
92
|
+
* Uses streaming decompression for non-blocking I/O
|
|
93
|
+
*/
|
|
94
|
+
getEntryStreamAsync(entry: SevenZipEntry, callback: (err: Error | null, stream?: Readable) => void): void;
|
|
90
95
|
/**
|
|
91
96
|
* Check if a folder uses BCJ2 codec
|
|
92
97
|
*/
|
|
@@ -96,6 +101,11 @@ export declare class SevenZipParser {
|
|
|
96
101
|
* Only caches when multiple files share a block, releases when last file extracted
|
|
97
102
|
*/
|
|
98
103
|
private getDecompressedFolder;
|
|
104
|
+
/**
|
|
105
|
+
* Get decompressed data for a folder using streaming (callback-based async)
|
|
106
|
+
* Uses createDecoder() streams for non-blocking decompression
|
|
107
|
+
*/
|
|
108
|
+
private getDecompressedFolderAsync;
|
|
99
109
|
/**
|
|
100
110
|
* Decompress a BCJ2 folder with multi-stream handling
|
|
101
111
|
* BCJ2 uses 4 input streams: main, call, jump, range coder
|
|
@@ -87,6 +87,11 @@ export declare class SevenZipParser {
|
|
|
87
87
|
* Get a readable stream for an entry's content
|
|
88
88
|
*/
|
|
89
89
|
getEntryStream(entry: SevenZipEntry): Readable;
|
|
90
|
+
/**
|
|
91
|
+
* Get a readable stream for an entry's content (callback-based async version)
|
|
92
|
+
* Uses streaming decompression for non-blocking I/O
|
|
93
|
+
*/
|
|
94
|
+
getEntryStreamAsync(entry: SevenZipEntry, callback: (err: Error | null, stream?: Readable) => void): void;
|
|
90
95
|
/**
|
|
91
96
|
* Check if a folder uses BCJ2 codec
|
|
92
97
|
*/
|
|
@@ -96,6 +101,11 @@ export declare class SevenZipParser {
|
|
|
96
101
|
* Only caches when multiple files share a block, releases when last file extracted
|
|
97
102
|
*/
|
|
98
103
|
private getDecompressedFolder;
|
|
104
|
+
/**
|
|
105
|
+
* Get decompressed data for a folder using streaming (callback-based async)
|
|
106
|
+
* Uses createDecoder() streams for non-blocking decompression
|
|
107
|
+
*/
|
|
108
|
+
private getDecompressedFolderAsync;
|
|
99
109
|
/**
|
|
100
110
|
* Decompress a BCJ2 folder with multi-stream handling
|
|
101
111
|
* BCJ2 uses 4 input streams: main, call, jump, range coder
|
|
@@ -23,6 +23,7 @@ _export(exports, {
|
|
|
23
23
|
});
|
|
24
24
|
var _extractbaseiterator = require("extract-base-iterator");
|
|
25
25
|
var _fs = /*#__PURE__*/ _interop_require_default(require("fs"));
|
|
26
|
+
var _onone = /*#__PURE__*/ _interop_require_default(require("on-one"));
|
|
26
27
|
var _readablestream = require("readable-stream");
|
|
27
28
|
var _indexts = require("./codecs/index.js");
|
|
28
29
|
var _constantsts = require("./constants.js");
|
|
@@ -467,6 +468,66 @@ var SevenZipParser = /*#__PURE__*/ function() {
|
|
|
467
468
|
return outputStream;
|
|
468
469
|
};
|
|
469
470
|
/**
|
|
471
|
+
* Get a readable stream for an entry's content (callback-based async version)
|
|
472
|
+
* Uses streaming decompression for non-blocking I/O
|
|
473
|
+
*/ _proto.getEntryStreamAsync = function getEntryStreamAsync(entry, callback) {
|
|
474
|
+
var _this = this;
|
|
475
|
+
if (!entry._hasStream || entry.type === 'directory') {
|
|
476
|
+
// Return empty stream for directories and empty files
|
|
477
|
+
var emptyStream = new _readablestream.PassThrough();
|
|
478
|
+
emptyStream.end();
|
|
479
|
+
callback(null, emptyStream);
|
|
480
|
+
return;
|
|
481
|
+
}
|
|
482
|
+
if (!this.streamsInfo) {
|
|
483
|
+
callback((0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER));
|
|
484
|
+
return;
|
|
485
|
+
}
|
|
486
|
+
// Get folder info
|
|
487
|
+
var folder = this.streamsInfo.folders[entry._folderIndex];
|
|
488
|
+
if (!folder) {
|
|
489
|
+
callback((0, _constantsts.createCodedError)('Invalid folder index', _constantsts.ErrorCode.CORRUPT_HEADER));
|
|
490
|
+
return;
|
|
491
|
+
}
|
|
492
|
+
// Check codec support
|
|
493
|
+
for(var i = 0; i < folder.coders.length; i++){
|
|
494
|
+
var coder = folder.coders[i];
|
|
495
|
+
if (!(0, _indexts.isCodecSupported)(coder.id)) {
|
|
496
|
+
var codecName = (0, _indexts.getCodecName)(coder.id);
|
|
497
|
+
callback((0, _constantsts.createCodedError)("Unsupported codec: ".concat(codecName), _constantsts.ErrorCode.UNSUPPORTED_CODEC));
|
|
498
|
+
return;
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
// Get decompressed data for this folder using async method
|
|
502
|
+
var folderIdx = entry._folderIndex;
|
|
503
|
+
var streamsInfo = this.streamsInfo;
|
|
504
|
+
this.getDecompressedFolderAsync(folderIdx, function(err, data) {
|
|
505
|
+
if (err) return callback(err);
|
|
506
|
+
if (!data) return callback(new Error('No data returned from decompression'));
|
|
507
|
+
// Calculate file offset within the decompressed block
|
|
508
|
+
var fileStart = 0;
|
|
509
|
+
for(var m = 0; m < entry._streamIndexInFolder; m++){
|
|
510
|
+
var prevStreamGlobalIndex = entry._streamIndex - entry._streamIndexInFolder + m;
|
|
511
|
+
fileStart += streamsInfo.unpackSizes[prevStreamGlobalIndex];
|
|
512
|
+
}
|
|
513
|
+
var fileSize = entry.size;
|
|
514
|
+
// Bounds check
|
|
515
|
+
if (fileStart + fileSize > data.length) {
|
|
516
|
+
return callback((0, _constantsts.createCodedError)("File data out of bounds: offset ".concat(fileStart, " + size ").concat(fileSize, " > decompressed length ").concat(data.length), _constantsts.ErrorCode.DECOMPRESSION_FAILED));
|
|
517
|
+
}
|
|
518
|
+
// Create a PassThrough stream with the file data
|
|
519
|
+
var outputStream = new _readablestream.PassThrough();
|
|
520
|
+
var fileData = data.slice(fileStart, fileStart + fileSize);
|
|
521
|
+
outputStream.end(fileData);
|
|
522
|
+
// Track extraction and release cache when all files from this folder are done
|
|
523
|
+
_this.extractedPerFolder[folderIdx] = (_this.extractedPerFolder[folderIdx] || 0) + 1;
|
|
524
|
+
if (_this.extractedPerFolder[folderIdx] >= _this.filesPerFolder[folderIdx]) {
|
|
525
|
+
delete _this.decompressedCache[folderIdx];
|
|
526
|
+
}
|
|
527
|
+
callback(null, outputStream);
|
|
528
|
+
});
|
|
529
|
+
};
|
|
530
|
+
/**
|
|
470
531
|
* Check if a folder uses BCJ2 codec
|
|
471
532
|
*/ _proto.folderHasBcj2 = function folderHasBcj2(folder) {
|
|
472
533
|
for(var i = 0; i < folder.coders.length; i++){
|
|
@@ -532,6 +593,104 @@ var SevenZipParser = /*#__PURE__*/ function() {
|
|
|
532
593
|
return data2;
|
|
533
594
|
};
|
|
534
595
|
/**
|
|
596
|
+
* Get decompressed data for a folder using streaming (callback-based async)
|
|
597
|
+
* Uses createDecoder() streams for non-blocking decompression
|
|
598
|
+
*/ _proto.getDecompressedFolderAsync = function getDecompressedFolderAsync(folderIndex, callback) {
|
|
599
|
+
var self = this;
|
|
600
|
+
// Check cache first
|
|
601
|
+
if (this.decompressedCache[folderIndex]) {
|
|
602
|
+
callback(null, this.decompressedCache[folderIndex]);
|
|
603
|
+
return;
|
|
604
|
+
}
|
|
605
|
+
if (!this.streamsInfo) {
|
|
606
|
+
callback((0, _constantsts.createCodedError)('No streams info available', _constantsts.ErrorCode.CORRUPT_HEADER));
|
|
607
|
+
return;
|
|
608
|
+
}
|
|
609
|
+
var folder = this.streamsInfo.folders[folderIndex];
|
|
610
|
+
// Check how many files remain in this folder
|
|
611
|
+
var filesInFolder = this.filesPerFolder[folderIndex] || 1;
|
|
612
|
+
var extractedFromFolder = this.extractedPerFolder[folderIndex] || 0;
|
|
613
|
+
var remainingFiles = filesInFolder - extractedFromFolder;
|
|
614
|
+
var shouldCache = remainingFiles > 1;
|
|
615
|
+
// BCJ2 requires special handling - use sync version for now
|
|
616
|
+
// TODO: Add async BCJ2 support
|
|
617
|
+
if (this.folderHasBcj2(folder)) {
|
|
618
|
+
try {
|
|
619
|
+
var data = this.decompressBcj2Folder(folderIndex);
|
|
620
|
+
if (shouldCache) {
|
|
621
|
+
this.decompressedCache[folderIndex] = data;
|
|
622
|
+
}
|
|
623
|
+
callback(null, data);
|
|
624
|
+
} catch (err) {
|
|
625
|
+
callback(err);
|
|
626
|
+
}
|
|
627
|
+
return;
|
|
628
|
+
}
|
|
629
|
+
// Calculate packed data position
|
|
630
|
+
var packPos = _constantsts.SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
|
|
631
|
+
// Find which pack stream this folder uses
|
|
632
|
+
var packStreamIndex = 0;
|
|
633
|
+
for(var j = 0; j < folderIndex; j++){
|
|
634
|
+
packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
|
|
635
|
+
}
|
|
636
|
+
// Calculate position of this pack stream
|
|
637
|
+
for(var k = 0; k < packStreamIndex; k++){
|
|
638
|
+
packPos += this.streamsInfo.packSizes[k];
|
|
639
|
+
}
|
|
640
|
+
var packSize = this.streamsInfo.packSizes[packStreamIndex];
|
|
641
|
+
// Read packed data
|
|
642
|
+
var packedData = this.source.read(packPos, packSize);
|
|
643
|
+
// Create decoder stream chain and decompress
|
|
644
|
+
var coders = folder.coders;
|
|
645
|
+
var unpackSizes = folder.unpackSizes;
|
|
646
|
+
// Helper to decompress through a single codec stream
|
|
647
|
+
function decompressWithStream(input, coderIdx, cb) {
|
|
648
|
+
var coderInfo = coders[coderIdx];
|
|
649
|
+
var codec = (0, _indexts.getCodec)(coderInfo.id);
|
|
650
|
+
var decoder = codec.createDecoder(coderInfo.properties, unpackSizes[coderIdx]);
|
|
651
|
+
var chunks = [];
|
|
652
|
+
var errorOccurred = false;
|
|
653
|
+
decoder.on('data', function(chunk) {
|
|
654
|
+
chunks.push(chunk);
|
|
655
|
+
});
|
|
656
|
+
(0, _onone.default)(decoder, [
|
|
657
|
+
'error',
|
|
658
|
+
'end',
|
|
659
|
+
'close',
|
|
660
|
+
'finish'
|
|
661
|
+
], function(err) {
|
|
662
|
+
if (errorOccurred) return;
|
|
663
|
+
if (err) {
|
|
664
|
+
errorOccurred = true;
|
|
665
|
+
return cb(err);
|
|
666
|
+
}
|
|
667
|
+
cb(null, Buffer.concat(chunks));
|
|
668
|
+
});
|
|
669
|
+
// Write input data to decoder and signal end
|
|
670
|
+
decoder.end(input);
|
|
671
|
+
}
|
|
672
|
+
// Chain decompression through all codecs
|
|
673
|
+
function decompressChain(input, idx) {
|
|
674
|
+
if (idx >= coders.length) {
|
|
675
|
+
// All done - cache and return
|
|
676
|
+
if (shouldCache) {
|
|
677
|
+
self.decompressedCache[folderIndex] = input;
|
|
678
|
+
}
|
|
679
|
+
callback(null, input);
|
|
680
|
+
return;
|
|
681
|
+
}
|
|
682
|
+
decompressWithStream(input, idx, function(err, output) {
|
|
683
|
+
if (err) {
|
|
684
|
+
callback(err);
|
|
685
|
+
return;
|
|
686
|
+
}
|
|
687
|
+
decompressChain(output, idx + 1);
|
|
688
|
+
});
|
|
689
|
+
}
|
|
690
|
+
// Start the chain
|
|
691
|
+
decompressChain(packedData, 0);
|
|
692
|
+
};
|
|
693
|
+
/**
|
|
535
694
|
* Decompress a BCJ2 folder with multi-stream handling
|
|
536
695
|
* BCJ2 uses 4 input streams: main, call, jump, range coder
|
|
537
696
|
*/ _proto.decompressBcj2Folder = function decompressBcj2Folder(folderIndex) {
|