7z-iterator 1.4.0 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (160) hide show
  1. package/dist/cjs/index.d.cts +3 -3
  2. package/dist/cjs/index.d.ts +3 -3
  3. package/dist/cjs/index.js +7 -38
  4. package/dist/cjs/index.js.map +1 -1
  5. package/dist/cjs/nextEntry.js +1 -2
  6. package/dist/cjs/nextEntry.js.map +1 -1
  7. package/dist/cjs/sevenz/ArchiveSource.d.cts +1 -0
  8. package/dist/cjs/sevenz/ArchiveSource.d.ts +1 -0
  9. package/dist/cjs/sevenz/ArchiveSource.js +23 -0
  10. package/dist/cjs/sevenz/ArchiveSource.js.map +1 -1
  11. package/dist/cjs/sevenz/SevenZipParser.js +22 -3
  12. package/dist/cjs/sevenz/SevenZipParser.js.map +1 -1
  13. package/dist/cjs/sevenz/codecs/index.js +52 -30
  14. package/dist/cjs/sevenz/codecs/index.js.map +1 -1
  15. package/dist/cjs/sevenz/constants.d.cts +1 -0
  16. package/dist/cjs/sevenz/constants.d.ts +1 -0
  17. package/dist/cjs/sevenz/constants.js +1 -0
  18. package/dist/cjs/sevenz/constants.js.map +1 -1
  19. package/dist/esm/index.d.ts +3 -3
  20. package/dist/esm/index.js +4 -3
  21. package/dist/esm/index.js.map +1 -1
  22. package/dist/esm/nextEntry.js +1 -2
  23. package/dist/esm/nextEntry.js.map +1 -1
  24. package/dist/esm/sevenz/ArchiveSource.d.ts +1 -0
  25. package/dist/esm/sevenz/ArchiveSource.js +23 -0
  26. package/dist/esm/sevenz/ArchiveSource.js.map +1 -1
  27. package/dist/esm/sevenz/SevenZipParser.js +22 -3
  28. package/dist/esm/sevenz/SevenZipParser.js.map +1 -1
  29. package/dist/esm/sevenz/codecs/index.js +32 -10
  30. package/dist/esm/sevenz/codecs/index.js.map +1 -1
  31. package/dist/esm/sevenz/constants.d.ts +1 -0
  32. package/dist/esm/sevenz/constants.js +1 -0
  33. package/dist/esm/sevenz/constants.js.map +1 -1
  34. package/package.json +3 -3
  35. package/dist/cjs/lzma/Lzma2ChunkParser.d.cts +0 -73
  36. package/dist/cjs/lzma/Lzma2ChunkParser.d.ts +0 -73
  37. package/dist/cjs/lzma/Lzma2ChunkParser.js +0 -148
  38. package/dist/cjs/lzma/Lzma2ChunkParser.js.map +0 -1
  39. package/dist/cjs/lzma/index.d.cts +0 -31
  40. package/dist/cjs/lzma/index.d.ts +0 -31
  41. package/dist/cjs/lzma/index.js +0 -83
  42. package/dist/cjs/lzma/index.js.map +0 -1
  43. package/dist/cjs/lzma/stream/transforms.d.cts +0 -46
  44. package/dist/cjs/lzma/stream/transforms.d.ts +0 -46
  45. package/dist/cjs/lzma/stream/transforms.js +0 -209
  46. package/dist/cjs/lzma/stream/transforms.js.map +0 -1
  47. package/dist/cjs/lzma/sync/Lzma2Decoder.d.cts +0 -63
  48. package/dist/cjs/lzma/sync/Lzma2Decoder.d.ts +0 -63
  49. package/dist/cjs/lzma/sync/Lzma2Decoder.js +0 -231
  50. package/dist/cjs/lzma/sync/Lzma2Decoder.js.map +0 -1
  51. package/dist/cjs/lzma/sync/LzmaDecoder.d.cts +0 -97
  52. package/dist/cjs/lzma/sync/LzmaDecoder.d.ts +0 -97
  53. package/dist/cjs/lzma/sync/LzmaDecoder.js +0 -580
  54. package/dist/cjs/lzma/sync/LzmaDecoder.js.map +0 -1
  55. package/dist/cjs/lzma/sync/RangeDecoder.d.cts +0 -69
  56. package/dist/cjs/lzma/sync/RangeDecoder.d.ts +0 -69
  57. package/dist/cjs/lzma/sync/RangeDecoder.js +0 -162
  58. package/dist/cjs/lzma/sync/RangeDecoder.js.map +0 -1
  59. package/dist/cjs/lzma/types.d.cts +0 -117
  60. package/dist/cjs/lzma/types.d.ts +0 -117
  61. package/dist/cjs/lzma/types.js +0 -264
  62. package/dist/cjs/lzma/types.js.map +0 -1
  63. package/dist/cjs/sevenz/codecs/Bcj.d.cts +0 -16
  64. package/dist/cjs/sevenz/codecs/Bcj.d.ts +0 -16
  65. package/dist/cjs/sevenz/codecs/Bcj.js +0 -183
  66. package/dist/cjs/sevenz/codecs/Bcj.js.map +0 -1
  67. package/dist/cjs/sevenz/codecs/BcjArm.d.cts +0 -21
  68. package/dist/cjs/sevenz/codecs/BcjArm.d.ts +0 -21
  69. package/dist/cjs/sevenz/codecs/BcjArm.js +0 -104
  70. package/dist/cjs/sevenz/codecs/BcjArm.js.map +0 -1
  71. package/dist/cjs/sevenz/codecs/BcjArm64.d.cts +0 -21
  72. package/dist/cjs/sevenz/codecs/BcjArm64.d.ts +0 -21
  73. package/dist/cjs/sevenz/codecs/BcjArm64.js +0 -65
  74. package/dist/cjs/sevenz/codecs/BcjArm64.js.map +0 -1
  75. package/dist/cjs/sevenz/codecs/BcjArmt.d.cts +0 -19
  76. package/dist/cjs/sevenz/codecs/BcjArmt.d.ts +0 -19
  77. package/dist/cjs/sevenz/codecs/BcjArmt.js +0 -76
  78. package/dist/cjs/sevenz/codecs/BcjArmt.js.map +0 -1
  79. package/dist/cjs/sevenz/codecs/BcjIa64.d.cts +0 -15
  80. package/dist/cjs/sevenz/codecs/BcjIa64.d.ts +0 -15
  81. package/dist/cjs/sevenz/codecs/BcjIa64.js +0 -141
  82. package/dist/cjs/sevenz/codecs/BcjIa64.js.map +0 -1
  83. package/dist/cjs/sevenz/codecs/BcjPpc.d.cts +0 -20
  84. package/dist/cjs/sevenz/codecs/BcjPpc.d.ts +0 -20
  85. package/dist/cjs/sevenz/codecs/BcjPpc.js +0 -64
  86. package/dist/cjs/sevenz/codecs/BcjPpc.js.map +0 -1
  87. package/dist/cjs/sevenz/codecs/BcjSparc.d.cts +0 -19
  88. package/dist/cjs/sevenz/codecs/BcjSparc.d.ts +0 -19
  89. package/dist/cjs/sevenz/codecs/BcjSparc.js +0 -69
  90. package/dist/cjs/sevenz/codecs/BcjSparc.js.map +0 -1
  91. package/dist/cjs/sevenz/codecs/Delta.d.cts +0 -16
  92. package/dist/cjs/sevenz/codecs/Delta.d.ts +0 -16
  93. package/dist/cjs/sevenz/codecs/Delta.js +0 -74
  94. package/dist/cjs/sevenz/codecs/Delta.js.map +0 -1
  95. package/dist/cjs/sevenz/codecs/Lzma.d.cts +0 -17
  96. package/dist/cjs/sevenz/codecs/Lzma.d.ts +0 -17
  97. package/dist/cjs/sevenz/codecs/Lzma.js +0 -40
  98. package/dist/cjs/sevenz/codecs/Lzma.js.map +0 -1
  99. package/dist/cjs/sevenz/codecs/Lzma2.d.cts +0 -20
  100. package/dist/cjs/sevenz/codecs/Lzma2.d.ts +0 -20
  101. package/dist/cjs/sevenz/codecs/Lzma2.js +0 -42
  102. package/dist/cjs/sevenz/codecs/Lzma2.js.map +0 -1
  103. package/dist/cjs/xz/Decoder.d.cts +0 -25
  104. package/dist/cjs/xz/Decoder.d.ts +0 -25
  105. package/dist/cjs/xz/Decoder.js +0 -194
  106. package/dist/cjs/xz/Decoder.js.map +0 -1
  107. package/dist/esm/lzma/Lzma2ChunkParser.d.ts +0 -73
  108. package/dist/esm/lzma/Lzma2ChunkParser.js +0 -137
  109. package/dist/esm/lzma/Lzma2ChunkParser.js.map +0 -1
  110. package/dist/esm/lzma/index.d.ts +0 -31
  111. package/dist/esm/lzma/index.js +0 -44
  112. package/dist/esm/lzma/index.js.map +0 -1
  113. package/dist/esm/lzma/stream/transforms.d.ts +0 -46
  114. package/dist/esm/lzma/stream/transforms.js +0 -189
  115. package/dist/esm/lzma/stream/transforms.js.map +0 -1
  116. package/dist/esm/lzma/sync/Lzma2Decoder.d.ts +0 -63
  117. package/dist/esm/lzma/sync/Lzma2Decoder.js +0 -211
  118. package/dist/esm/lzma/sync/Lzma2Decoder.js.map +0 -1
  119. package/dist/esm/lzma/sync/LzmaDecoder.d.ts +0 -97
  120. package/dist/esm/lzma/sync/LzmaDecoder.js +0 -543
  121. package/dist/esm/lzma/sync/LzmaDecoder.js.map +0 -1
  122. package/dist/esm/lzma/sync/RangeDecoder.d.ts +0 -69
  123. package/dist/esm/lzma/sync/RangeDecoder.js +0 -132
  124. package/dist/esm/lzma/sync/RangeDecoder.js.map +0 -1
  125. package/dist/esm/lzma/types.d.ts +0 -117
  126. package/dist/esm/lzma/types.js +0 -154
  127. package/dist/esm/lzma/types.js.map +0 -1
  128. package/dist/esm/sevenz/codecs/Bcj.d.ts +0 -16
  129. package/dist/esm/sevenz/codecs/Bcj.js +0 -175
  130. package/dist/esm/sevenz/codecs/Bcj.js.map +0 -1
  131. package/dist/esm/sevenz/codecs/BcjArm.d.ts +0 -21
  132. package/dist/esm/sevenz/codecs/BcjArm.js +0 -101
  133. package/dist/esm/sevenz/codecs/BcjArm.js.map +0 -1
  134. package/dist/esm/sevenz/codecs/BcjArm64.d.ts +0 -21
  135. package/dist/esm/sevenz/codecs/BcjArm64.js +0 -57
  136. package/dist/esm/sevenz/codecs/BcjArm64.js.map +0 -1
  137. package/dist/esm/sevenz/codecs/BcjArmt.d.ts +0 -19
  138. package/dist/esm/sevenz/codecs/BcjArmt.js +0 -66
  139. package/dist/esm/sevenz/codecs/BcjArmt.js.map +0 -1
  140. package/dist/esm/sevenz/codecs/BcjIa64.d.ts +0 -15
  141. package/dist/esm/sevenz/codecs/BcjIa64.js +0 -127
  142. package/dist/esm/sevenz/codecs/BcjIa64.js.map +0 -1
  143. package/dist/esm/sevenz/codecs/BcjPpc.d.ts +0 -20
  144. package/dist/esm/sevenz/codecs/BcjPpc.js +0 -55
  145. package/dist/esm/sevenz/codecs/BcjPpc.js.map +0 -1
  146. package/dist/esm/sevenz/codecs/BcjSparc.d.ts +0 -19
  147. package/dist/esm/sevenz/codecs/BcjSparc.js +0 -59
  148. package/dist/esm/sevenz/codecs/BcjSparc.js.map +0 -1
  149. package/dist/esm/sevenz/codecs/Delta.d.ts +0 -16
  150. package/dist/esm/sevenz/codecs/Delta.js +0 -66
  151. package/dist/esm/sevenz/codecs/Delta.js.map +0 -1
  152. package/dist/esm/sevenz/codecs/Lzma.d.ts +0 -17
  153. package/dist/esm/sevenz/codecs/Lzma.js +0 -33
  154. package/dist/esm/sevenz/codecs/Lzma.js.map +0 -1
  155. package/dist/esm/sevenz/codecs/Lzma2.d.ts +0 -20
  156. package/dist/esm/sevenz/codecs/Lzma2.js +0 -38
  157. package/dist/esm/sevenz/codecs/Lzma2.js.map +0 -1
  158. package/dist/esm/xz/Decoder.d.ts +0 -25
  159. package/dist/esm/xz/Decoder.js +0 -185
  160. package/dist/esm/xz/Decoder.js.map +0 -1
@@ -1,5 +1,5 @@
1
- export type { OutputSink } from './lzma/index.js';
2
- export { createLzma2Decoder, createLzmaDecoder, decodeLzma, decodeLzma2, detectFormat, Lzma2Decoder, LzmaDecoder, } from './lzma/index.js';
1
+ export * from './FileEntry.js';
2
+ export * from './nextEntry.js';
3
3
  export { default } from './SevenZipIterator.js';
4
+ export * from './sevenz/index.js';
4
5
  export * from './types.js';
5
- export { createXZDecoder, decodeXZ } from './xz/Decoder.js';
@@ -1,5 +1,5 @@
1
- export type { OutputSink } from './lzma/index.js';
2
- export { createLzma2Decoder, createLzmaDecoder, decodeLzma, decodeLzma2, detectFormat, Lzma2Decoder, LzmaDecoder, } from './lzma/index.js';
1
+ export * from './FileEntry.js';
2
+ export * from './nextEntry.js';
3
3
  export { default } from './SevenZipIterator.js';
4
+ export * from './sevenz/index.js';
4
5
  export * from './types.js';
5
- export { createXZDecoder, decodeXZ } from './xz/Decoder.js';
package/dist/cjs/index.js CHANGED
@@ -1,50 +1,19 @@
1
- // LZMA decoders for external use
1
+ // 7z-iterator - high-level 7z archive iterator
2
2
  "use strict";
3
3
  Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
- function _export(target, all) {
7
- for(var name in all)Object.defineProperty(target, name, {
8
- enumerable: true,
9
- get: Object.getOwnPropertyDescriptor(all, name).get
10
- });
11
- }
12
- _export(exports, {
13
- get Lzma2Decoder () {
14
- return _indexts.Lzma2Decoder;
15
- },
16
- get LzmaDecoder () {
17
- return _indexts.LzmaDecoder;
18
- },
19
- get createLzma2Decoder () {
20
- return _indexts.createLzma2Decoder;
21
- },
22
- get createLzmaDecoder () {
23
- return _indexts.createLzmaDecoder;
24
- },
25
- get createXZDecoder () {
26
- return _Decoderts.createXZDecoder;
27
- },
28
- get decodeLzma () {
29
- return _indexts.decodeLzma;
30
- },
31
- get decodeLzma2 () {
32
- return _indexts.decodeLzma2;
33
- },
34
- get decodeXZ () {
35
- return _Decoderts.decodeXZ;
36
- },
37
- get default () {
6
+ Object.defineProperty(exports, "default", {
7
+ enumerable: true,
8
+ get: function() {
38
9
  return _SevenZipIteratorts.default;
39
- },
40
- get detectFormat () {
41
- return _indexts.detectFormat;
42
10
  }
43
11
  });
44
- var _indexts = require("./lzma/index.js");
12
+ _export_star(require("./FileEntry.js"), exports);
13
+ _export_star(require("./nextEntry.js"), exports);
45
14
  var _SevenZipIteratorts = /*#__PURE__*/ _interop_require_default(require("./SevenZipIterator.js"));
15
+ _export_star(require("./sevenz/index.js"), exports);
46
16
  _export_star(require("./types.js"), exports);
47
- var _Decoderts = require("./xz/Decoder.js");
48
17
  function _export_star(from, to) {
49
18
  Object.keys(from).forEach(function(k) {
50
19
  if (k !== "default" && !Object.prototype.hasOwnProperty.call(to, k)) {
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/index.ts"],"sourcesContent":["// LZMA decoders for external use\n\nexport type { OutputSink } from './lzma/index.ts';\nexport {\n createLzma2Decoder,\n createLzmaDecoder,\n decodeLzma,\n decodeLzma2,\n detectFormat,\n Lzma2Decoder,\n LzmaDecoder,\n} from './lzma/index.ts';\nexport { default } from './SevenZipIterator.ts';\nexport * from './types.ts';\nexport { createXZDecoder, decodeXZ } from './xz/Decoder.ts';\n"],"names":["Lzma2Decoder","LzmaDecoder","createLzma2Decoder","createLzmaDecoder","createXZDecoder","decodeLzma","decodeLzma2","decodeXZ","default","detectFormat"],"mappings":"AAAA,iCAAiC;;;;;;;;;;;;QAS/BA;eAAAA,qBAAY;;QACZC;eAAAA,oBAAW;;QANXC;eAAAA,2BAAkB;;QAClBC;eAAAA,0BAAiB;;QASVC;eAAAA,0BAAe;;QARtBC;eAAAA,mBAAU;;QACVC;eAAAA,oBAAW;;QAOaC;eAAAA,mBAAQ;;QAFzBC;eAAAA,2BAAO;;QAJdC;eAAAA,qBAAY;;;uBAGP;yEACiB;qBACV;yBAC4B"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/index.ts"],"sourcesContent":["// 7z-iterator - high-level 7z archive iterator\n\nexport * from './FileEntry.ts';\nexport * from './nextEntry.ts';\nexport { default } from './SevenZipIterator.ts';\nexport * from './sevenz/index.ts';\nexport * from './types.ts';\n"],"names":["default"],"mappings":"AAAA,+CAA+C;;;;;+BAItCA;;;eAAAA,2BAAO;;;qBAFF;qBACA;yEACU;qBACV;qBACA"}
@@ -10,7 +10,6 @@ Object.defineProperty(exports, "default", {
10
10
  });
11
11
  var _calloncefn = /*#__PURE__*/ _interop_require_default(require("call-once-fn"));
12
12
  var _extractbaseiterator = require("extract-base-iterator");
13
- var _lodashcompact = /*#__PURE__*/ _interop_require_default(require("lodash.compact"));
14
13
  var _path = /*#__PURE__*/ _interop_require_default(require("path"));
15
14
  var _FileEntryts = /*#__PURE__*/ _interop_require_default(require("./FileEntry.js"));
16
15
  function _interop_require_default(obj) {
@@ -56,7 +55,7 @@ function nextEntry(iterator, callback) {
56
55
  // mtime must be timestamp (number) for FileAttributes compatibility
57
56
  var mtimeDate = entry.mtime || new Date();
58
57
  var attributes = {
59
- path: (0, _lodashcompact.default)(entry.path.split(_path.default.sep)).join(_path.default.sep),
58
+ path: entry.path.split(_path.default.sep).filter(Boolean).join(_path.default.sep),
60
59
  basename: entry.name,
61
60
  mtime: mtimeDate.getTime(),
62
61
  mode: entry.mode !== undefined ? entry.mode : defaultMode
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/nextEntry.ts"],"sourcesContent":["import once from 'call-once-fn';\nimport { type DirectoryAttributes, DirectoryEntry, type FileAttributes, type LinkAttributes, SymbolicLinkEntry } from 'extract-base-iterator';\nimport compact from 'lodash.compact';\nimport path from 'path';\nimport FileEntry from './FileEntry.ts';\nimport type SevenZipIterator from './SevenZipIterator.ts';\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\nimport type { Entry, EntryCallback } from './types.ts';\n\nexport type NextCallback = (error?: Error, entry?: Entry) => void;\n\n/** @internal */\ninterface InternalIterator {\n next(): SevenZipEntry | null;\n getParser(): SevenZipParser;\n}\n\n// Entry attributes object that gets mutated in switch - union of possible shapes\n// mtime is number for FileAttributes compatibility (timestamp in ms)\ntype EntryAttributesBuilder = {\n path: string;\n basename: string;\n mtime: number;\n mode: number;\n type?: 'file' | 'directory';\n size?: number;\n};\n\nexport default function nextEntry<_T>(iterator: SevenZipIterator, callback: EntryCallback): void {\n const internalIter = iterator._iterator as InternalIterator | null;\n if (!internalIter) {\n callback(new Error('iterator missing'));\n return;\n }\n\n let entry: SevenZipEntry | null = null;\n entry = internalIter.next();\n\n const nextCallback = once((err?: Error, entry?: Entry) => {\n // keep processing\n if (entry) iterator.push(nextEntry);\n err ? callback(err) : callback(null, entry ? { done: false, value: entry } : { done: true, value: null });\n }) as NextCallback;\n\n // done: signal iteration is complete (guard against stale lock)\n if (!iterator.lock || iterator.isDone() || !entry) return callback(null, { done: true, value: null });\n\n // Skip anti-files (these mark files to delete in delta archives)\n if (entry.isAntiFile) {\n iterator.push(nextEntry);\n return callback(null, null);\n }\n\n // Determine type from entry\n const type = entry.type;\n\n // Default modes (decimal values for Node 0.8 compatibility)\n // 0o755 = 493, 0o644 = 420\n const defaultMode = type === 'directory' ? 493 : 420;\n\n // Build attributes from 7z entry\n // mtime must be timestamp (number) for FileAttributes compatibility\n const mtimeDate = entry.mtime || new Date();\n const attributes: EntryAttributesBuilder = {\n path: compact(entry.path.split(path.sep)).join(path.sep),\n basename: entry.name,\n mtime: mtimeDate.getTime(),\n mode: entry.mode !== undefined ? entry.mode : defaultMode,\n };\n\n switch (type) {\n case 'directory':\n attributes.type = 'directory';\n return nextCallback(null, new DirectoryEntry(attributes as DirectoryAttributes));\n\n case 'link': {\n // For symlinks, the file content IS the symlink target path\n // Read the content to get the linkpath for SymbolicLinkEntry\n const parser = internalIter.getParser();\n const stream = parser.getEntryStream(entry);\n\n const chunks: Buffer[] = [];\n\n stream.on('data', (chunk: Buffer) => {\n chunks.push(chunk);\n });\n stream.on('end', () => {\n const linkpath = Buffer.concat(chunks).toString('utf8');\n\n const linkAttributes: LinkAttributes = {\n path: attributes.path,\n mtime: attributes.mtime,\n mode: attributes.mode,\n linkpath: linkpath,\n };\n\n nextCallback(null, new SymbolicLinkEntry(linkAttributes));\n });\n stream.on('error', (streamErr: Error) => {\n nextCallback(streamErr);\n });\n return;\n }\n\n case 'file': {\n attributes.type = 'file';\n attributes.size = entry.size;\n const parser = internalIter.getParser();\n\n const stream = parser.getEntryStream(entry);\n return nextCallback(null, new FileEntry(attributes as FileAttributes, stream, iterator.lock, entry._canStream));\n }\n }\n\n return callback(new Error(`Unrecognized entry type: ${type}`));\n}\n"],"names":["nextEntry","iterator","callback","internalIter","_iterator","Error","entry","next","nextCallback","once","err","push","done","value","lock","isDone","isAntiFile","type","defaultMode","mtimeDate","mtime","Date","attributes","path","compact","split","sep","join","basename","name","getTime","mode","undefined","DirectoryEntry","parser","getParser","stream","getEntryStream","chunks","on","chunk","linkpath","Buffer","concat","toString","linkAttributes","SymbolicLinkEntry","streamErr","size","FileEntry","_canStream"],"mappings":";;;;+BA4BA;;;eAAwBA;;;iEA5BP;mCACqG;oEAClG;2DACH;kEACK;;;;;;AAwBP,SAASA,UAAcC,QAA0B,EAAEC,QAAuB;IACvF,IAAMC,eAAeF,SAASG,SAAS;IACvC,IAAI,CAACD,cAAc;QACjBD,SAAS,IAAIG,MAAM;QACnB;IACF;IAEA,IAAIC,QAA8B;IAClCA,QAAQH,aAAaI,IAAI;IAEzB,IAAMC,eAAeC,IAAAA,mBAAI,EAAC,SAACC,KAAaJ;QACtC,kBAAkB;QAClB,IAAIA,OAAOL,SAASU,IAAI,CAACX;QACzBU,MAAMR,SAASQ,OAAOR,SAAS,MAAMI,QAAQ;YAAEM,MAAM;YAAOC,OAAOP;QAAM,IAAI;YAAEM,MAAM;YAAMC,OAAO;QAAK;IACzG;IAEA,gEAAgE;IAChE,IAAI,CAACZ,SAASa,IAAI,IAAIb,SAASc,MAAM,MAAM,CAACT,OAAO,OAAOJ,SAAS,MAAM;QAAEU,MAAM;QAAMC,OAAO;IAAK;IAEnG,iEAAiE;IACjE,IAAIP,MAAMU,UAAU,EAAE;QACpBf,SAASU,IAAI,CAACX;QACd,OAAOE,SAAS,MAAM;IACxB;IAEA,4BAA4B;IAC5B,IAAMe,OAAOX,MAAMW,IAAI;IAEvB,4DAA4D;IAC5D,2BAA2B;IAC3B,IAAMC,cAAcD,SAAS,cAAc,MAAM;IAEjD,iCAAiC;IACjC,oEAAoE;IACpE,IAAME,YAAYb,MAAMc,KAAK,IAAI,IAAIC;IACrC,IAAMC,aAAqC;QACzCC,MAAMC,IAAAA,sBAAO,EAAClB,MAAMiB,IAAI,CAACE,KAAK,CAACF,aAAI,CAACG,GAAG,GAAGC,IAAI,CAACJ,aAAI,CAACG,GAAG;QACvDE,UAAUtB,MAAMuB,IAAI;QACpBT,OAAOD,UAAUW,OAAO;QACxBC,MAAMzB,MAAMyB,IAAI,KAAKC,YAAY1B,MAAMyB,IAAI,GAAGb;IAChD;IAEA,OAAQD;QACN,KAAK;YACHK,WAAWL,IAAI,GAAG;YAClB,OAAOT,aAAa,MAAM,IAAIyB,mCAAc,CAACX;QAE/C,KAAK;YAAQ;gBACX,4DAA4D;gBAC5D,6DAA6D;gBAC7D,IAAMY,SAAS/B,aAAagC,SAAS;gBACrC,IAAMC,SAASF,OAAOG,cAAc,CAAC/B;gBAErC,IAAMgC,SAAmB,EAAE;gBAE3BF,OAAOG,EAAE,CAAC,QAAQ,SAACC;oBACjBF,OAAO3B,IAAI,CAAC6B;gBACd;gBACAJ,OAAOG,EAAE,CAAC,OAAO;oBACf,IAAME,WAAWC,OAAOC,MAAM,CAACL,QAAQM,QAAQ,CAAC;oBAEhD,IAAMC,iBAAiC;wBACrCtB,MAAMD,WAAWC,IAAI;wBACrBH,OAAOE,WAAWF,KAAK;wBACvBW,MAAMT,WAAWS,IAAI;wBACrBU,UAAUA;oBACZ;oBAEAjC,aAAa,MAAM,IAAIsC,sCAAiB,CAACD;gBAC3C;gBACAT,OAAOG,EAAE,CAAC,SAAS,SAACQ;oBAClBvC,aAAauC;gBACf;gBACA;YACF;QAEA,KAAK;YAAQ;gBACXzB,WAAWL,IAAI,GAAG;gBAClBK,WAAW0B,IAAI,GAAG1C,MAAM0C,IAAI;gBAC5B,IAAMd,UAAS/B,aAAagC,SAAS;gBAErC,IAAMC,UAASF,QAAOG,cAAc,CAAC/B;gBACrC,OAAOE,aAAa,MAAM,IAAIyC,oBAAS,CAAC3B,YAA8Bc,SAAQnC,SAASa,IAAI,EAAER,MAAM4C,UAAU;YAC/G;IACF;IAEA,OAAOhD,SAAS,IAAIG,MAAM,AAAC,4BAAgC,OAALY;AACxD"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/nextEntry.ts"],"sourcesContent":["import once from 'call-once-fn';\nimport { type DirectoryAttributes, DirectoryEntry, type FileAttributes, type LinkAttributes, SymbolicLinkEntry } from 'extract-base-iterator';\nimport path from 'path';\nimport FileEntry from './FileEntry.ts';\nimport type SevenZipIterator from './SevenZipIterator.ts';\nimport type { SevenZipEntry, SevenZipParser } from './sevenz/SevenZipParser.ts';\nimport type { Entry, EntryCallback } from './types.ts';\n\nexport type NextCallback = (error?: Error, entry?: Entry) => void;\n\n/** @internal */\ninterface InternalIterator {\n next(): SevenZipEntry | null;\n getParser(): SevenZipParser;\n}\n\n// Entry attributes object that gets mutated in switch - union of possible shapes\n// mtime is number for FileAttributes compatibility (timestamp in ms)\ntype EntryAttributesBuilder = {\n path: string;\n basename: string;\n mtime: number;\n mode: number;\n type?: 'file' | 'directory';\n size?: number;\n};\n\nexport default function nextEntry<_T>(iterator: SevenZipIterator, callback: EntryCallback): void {\n const internalIter = iterator._iterator as InternalIterator | null;\n if (!internalIter) {\n callback(new Error('iterator missing'));\n return;\n }\n\n let entry: SevenZipEntry | null = null;\n entry = internalIter.next();\n\n const nextCallback = once((err?: Error, entry?: Entry) => {\n // keep processing\n if (entry) iterator.push(nextEntry);\n err ? callback(err) : callback(null, entry ? { done: false, value: entry } : { done: true, value: null });\n }) as NextCallback;\n\n // done: signal iteration is complete (guard against stale lock)\n if (!iterator.lock || iterator.isDone() || !entry) return callback(null, { done: true, value: null });\n\n // Skip anti-files (these mark files to delete in delta archives)\n if (entry.isAntiFile) {\n iterator.push(nextEntry);\n return callback(null, null);\n }\n\n // Determine type from entry\n const type = entry.type;\n\n // Default modes (decimal values for Node 0.8 compatibility)\n // 0o755 = 493, 0o644 = 420\n const defaultMode = type === 'directory' ? 493 : 420;\n\n // Build attributes from 7z entry\n // mtime must be timestamp (number) for FileAttributes compatibility\n const mtimeDate = entry.mtime || new Date();\n const attributes: EntryAttributesBuilder = {\n path: entry.path.split(path.sep).filter(Boolean).join(path.sep),\n basename: entry.name,\n mtime: mtimeDate.getTime(),\n mode: entry.mode !== undefined ? entry.mode : defaultMode,\n };\n\n switch (type) {\n case 'directory':\n attributes.type = 'directory';\n return nextCallback(null, new DirectoryEntry(attributes as DirectoryAttributes));\n\n case 'link': {\n // For symlinks, the file content IS the symlink target path\n // Read the content to get the linkpath for SymbolicLinkEntry\n const parser = internalIter.getParser();\n const stream = parser.getEntryStream(entry);\n\n const chunks: Buffer[] = [];\n\n stream.on('data', (chunk: Buffer) => {\n chunks.push(chunk);\n });\n stream.on('end', () => {\n const linkpath = Buffer.concat(chunks).toString('utf8');\n\n const linkAttributes: LinkAttributes = {\n path: attributes.path,\n mtime: attributes.mtime,\n mode: attributes.mode,\n linkpath: linkpath,\n };\n\n nextCallback(null, new SymbolicLinkEntry(linkAttributes));\n });\n stream.on('error', (streamErr: Error) => {\n nextCallback(streamErr);\n });\n return;\n }\n\n case 'file': {\n attributes.type = 'file';\n attributes.size = entry.size;\n const parser = internalIter.getParser();\n\n const stream = parser.getEntryStream(entry);\n return nextCallback(null, new FileEntry(attributes as FileAttributes, stream, iterator.lock, entry._canStream));\n }\n }\n\n return callback(new Error(`Unrecognized entry type: ${type}`));\n}\n"],"names":["nextEntry","iterator","callback","internalIter","_iterator","Error","entry","next","nextCallback","once","err","push","done","value","lock","isDone","isAntiFile","type","defaultMode","mtimeDate","mtime","Date","attributes","path","split","sep","filter","Boolean","join","basename","name","getTime","mode","undefined","DirectoryEntry","parser","getParser","stream","getEntryStream","chunks","on","chunk","linkpath","Buffer","concat","toString","linkAttributes","SymbolicLinkEntry","streamErr","size","FileEntry","_canStream"],"mappings":";;;;+BA2BA;;;eAAwBA;;;iEA3BP;mCACqG;2DACrG;kEACK;;;;;;AAwBP,SAASA,UAAcC,QAA0B,EAAEC,QAAuB;IACvF,IAAMC,eAAeF,SAASG,SAAS;IACvC,IAAI,CAACD,cAAc;QACjBD,SAAS,IAAIG,MAAM;QACnB;IACF;IAEA,IAAIC,QAA8B;IAClCA,QAAQH,aAAaI,IAAI;IAEzB,IAAMC,eAAeC,IAAAA,mBAAI,EAAC,SAACC,KAAaJ;QACtC,kBAAkB;QAClB,IAAIA,OAAOL,SAASU,IAAI,CAACX;QACzBU,MAAMR,SAASQ,OAAOR,SAAS,MAAMI,QAAQ;YAAEM,MAAM;YAAOC,OAAOP;QAAM,IAAI;YAAEM,MAAM;YAAMC,OAAO;QAAK;IACzG;IAEA,gEAAgE;IAChE,IAAI,CAACZ,SAASa,IAAI,IAAIb,SAASc,MAAM,MAAM,CAACT,OAAO,OAAOJ,SAAS,MAAM;QAAEU,MAAM;QAAMC,OAAO;IAAK;IAEnG,iEAAiE;IACjE,IAAIP,MAAMU,UAAU,EAAE;QACpBf,SAASU,IAAI,CAACX;QACd,OAAOE,SAAS,MAAM;IACxB;IAEA,4BAA4B;IAC5B,IAAMe,OAAOX,MAAMW,IAAI;IAEvB,4DAA4D;IAC5D,2BAA2B;IAC3B,IAAMC,cAAcD,SAAS,cAAc,MAAM;IAEjD,iCAAiC;IACjC,oEAAoE;IACpE,IAAME,YAAYb,MAAMc,KAAK,IAAI,IAAIC;IACrC,IAAMC,aAAqC;QACzCC,MAAMjB,MAAMiB,IAAI,CAACC,KAAK,CAACD,aAAI,CAACE,GAAG,EAAEC,MAAM,CAACC,SAASC,IAAI,CAACL,aAAI,CAACE,GAAG;QAC9DI,UAAUvB,MAAMwB,IAAI;QACpBV,OAAOD,UAAUY,OAAO;QACxBC,MAAM1B,MAAM0B,IAAI,KAAKC,YAAY3B,MAAM0B,IAAI,GAAGd;IAChD;IAEA,OAAQD;QACN,KAAK;YACHK,WAAWL,IAAI,GAAG;YAClB,OAAOT,aAAa,MAAM,IAAI0B,mCAAc,CAACZ;QAE/C,KAAK;YAAQ;gBACX,4DAA4D;gBAC5D,6DAA6D;gBAC7D,IAAMa,SAAShC,aAAaiC,SAAS;gBACrC,IAAMC,SAASF,OAAOG,cAAc,CAAChC;gBAErC,IAAMiC,SAAmB,EAAE;gBAE3BF,OAAOG,EAAE,CAAC,QAAQ,SAACC;oBACjBF,OAAO5B,IAAI,CAAC8B;gBACd;gBACAJ,OAAOG,EAAE,CAAC,OAAO;oBACf,IAAME,WAAWC,OAAOC,MAAM,CAACL,QAAQM,QAAQ,CAAC;oBAEhD,IAAMC,iBAAiC;wBACrCvB,MAAMD,WAAWC,IAAI;wBACrBH,OAAOE,WAAWF,KAAK;wBACvBY,MAAMV,WAAWU,IAAI;wBACrBU,UAAUA;oBACZ;oBAEAlC,aAAa,MAAM,IAAIuC,sCAAiB,CAACD;gBAC3C;gBACAT,OAAOG,EAAE,CAAC,SAAS,SAACQ;oBAClBxC,aAAawC;gBACf;gBACA;YACF;QAEA,KAAK;YAAQ;gBACX1B,WAAWL,IAAI,GAAG;gBAClBK,WAAW2B,IAAI,GAAG3C,MAAM2C,IAAI;gBAC5B,IAAMd,UAAShC,aAAaiC,SAAS;gBAErC,IAAMC,UAASF,QAAOG,cAAc,CAAChC;gBACrC,OAAOE,aAAa,MAAM,IAAI0C,oBAAS,CAAC5B,YAA8Be,SAAQpC,SAASa,IAAI,EAAER,MAAM6C,UAAU;YAC/G;IACF;IAEA,OAAOjD,SAAS,IAAIG,MAAM,AAAC,4BAAgC,OAALY;AACxD"}
@@ -46,6 +46,7 @@ export declare class FileSource implements ArchiveSource {
46
46
  private size;
47
47
  constructor(fd: number, size: number);
48
48
  read(position: number, length: number): Buffer;
49
+ private readChunk;
49
50
  getSize(): number;
50
51
  close(): void;
51
52
  /**
@@ -46,6 +46,7 @@ export declare class FileSource implements ArchiveSource {
46
46
  private size;
47
47
  constructor(fd: number, size: number);
48
48
  read(position: number, length: number): Buffer;
49
+ private readChunk;
49
50
  getSize(): number;
50
51
  close(): void;
51
52
  /**
@@ -87,6 +87,29 @@ var FileSource = /*#__PURE__*/ function() {
87
87
  }
88
88
  var _proto = FileSource.prototype;
89
89
  _proto.read = function read(position, length) {
90
+ // Handle large reads by chunking to fit 32-bit signed int limit
91
+ var MAX_INT32 = 0x7fffffff; // 2,147,483,647 bytes (~2GB)
92
+ if (length <= MAX_INT32) {
93
+ return this.readChunk(position, length);
94
+ }
95
+ // For large reads, split into multiple chunks
96
+ var chunks = [];
97
+ var totalBytesRead = 0;
98
+ var currentPos = position;
99
+ while(totalBytesRead < length){
100
+ var remaining = length - totalBytesRead;
101
+ var chunkSize = Math.min(remaining, MAX_INT32);
102
+ var chunk = this.readChunk(currentPos, chunkSize);
103
+ chunks.push(chunk);
104
+ totalBytesRead += chunk.length;
105
+ currentPos += chunk.length;
106
+ if (chunk.length < chunkSize) {
107
+ break;
108
+ }
109
+ }
110
+ return Buffer.concat(chunks);
111
+ };
112
+ _proto.readChunk = function readChunk(position, length) {
90
113
  var buf = (0, _extractbaseiterator.allocBuffer)(length);
91
114
  var bytesRead = _fs.default.readSync(this.fd, buf, 0, length, position);
92
115
  if (bytesRead < length) {
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/ArchiveSource.ts"],"sourcesContent":["/**\n * ArchiveSource - Abstraction for reading 7z archive data\n *\n * Provides a common interface for reading archive data from either\n * a file descriptor or an in-memory buffer.\n */\n\nimport { allocBuffer, Readable } from 'extract-base-iterator';\nimport fs from 'fs';\nimport type Stream from 'stream';\n\n// Helper to create a Readable stream compatible with Node 0.8\nfunction createReadableStream(readFn: (stream: Stream.Readable) => void): Stream.Readable {\n const stream = new Readable();\n stream._read = function () {\n readFn(this);\n };\n return stream;\n}\n\n/**\n * Archive source abstraction - allows reading from file descriptor or buffer\n */\nexport interface ArchiveSource {\n read(position: number, length: number): Buffer;\n getSize(): number;\n close(): void;\n /**\n * Create a readable stream for a portion of the archive.\n * Used for streaming decompression.\n */\n createReadStream(offset: number, length: number): Stream.Readable;\n}\n\n/**\n * Buffer-based archive source\n *\n * Used when the entire archive is already in memory.\n */\nexport class BufferSource implements ArchiveSource {\n private buffer: Buffer;\n\n constructor(buffer: Buffer) {\n this.buffer = buffer;\n }\n\n read(position: number, length: number): Buffer {\n return this.buffer.slice(position, position + length);\n }\n\n getSize(): number {\n return this.buffer.length;\n }\n\n close(): void {\n // Nothing to close for buffer\n }\n\n /**\n * Create a readable stream for a portion of the buffer.\n * Streams the data in chunks to avoid blocking.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const buffer = this.buffer;\n const end = Math.min(offset + length, buffer.length);\n let currentPos = offset;\n const chunkSize = 65536; // 64KB chunks\n\n return createReadableStream((stream) => {\n if (currentPos >= end) {\n stream.push(null);\n return;\n }\n\n const toRead = Math.min(chunkSize, end - currentPos);\n const chunk = buffer.slice(currentPos, currentPos + toRead);\n currentPos += toRead;\n stream.push(chunk);\n });\n }\n}\n\n/**\n * File descriptor based archive source\n *\n * Used for reading directly from a file on disk.\n * More memory efficient for large archives.\n */\nexport class FileSource implements ArchiveSource {\n private fd: number;\n private size: number;\n\n constructor(fd: number, size: number) {\n this.fd = fd;\n this.size = size;\n }\n\n read(position: number, length: number): Buffer {\n const buf = allocBuffer(length);\n const bytesRead = fs.readSync(this.fd, buf, 0, length, position);\n if (bytesRead < length) {\n return buf.slice(0, bytesRead);\n }\n return buf;\n }\n\n getSize(): number {\n return this.size;\n }\n\n close(): void {\n try {\n fs.closeSync(this.fd);\n } catch (_e) {\n // Ignore close errors\n }\n }\n\n /**\n * Create a readable stream for a portion of the file.\n * Uses async fs.read() to avoid blocking the event loop.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const fd = this.fd;\n let bytesRead = 0;\n let reading = false;\n let finished = false;\n const chunkSize = 65536; // 64KB chunks\n let _streamRef: Stream.Readable | null = null;\n\n const stream = createReadableStream((s) => {\n _streamRef = s;\n if (reading || finished) return; // Prevent re-entrant reads\n\n const toRead = Math.min(chunkSize, length - bytesRead);\n if (toRead <= 0) {\n finished = true;\n s.push(null);\n return;\n }\n\n reading = true;\n const buffer = allocBuffer(toRead);\n const currentOffset = offset + bytesRead;\n\n fs.read(fd, buffer, 0, toRead, currentOffset, (err, n) => {\n reading = false;\n\n if (err) {\n // Emit error for Node 0.8 compatibility (no destroy method)\n s.emit('error', err);\n finished = true;\n s.push(null);\n return;\n }\n\n if (n === 0) {\n finished = true;\n s.push(null);\n } else {\n bytesRead += n;\n s.push(buffer.slice(0, n));\n }\n });\n });\n\n return stream;\n }\n}\n"],"names":["BufferSource","FileSource","createReadableStream","readFn","stream","Readable","_read","buffer","read","position","length","slice","getSize","close","createReadStream","offset","end","Math","min","currentPos","chunkSize","push","toRead","chunk","fd","size","buf","allocBuffer","bytesRead","fs","readSync","closeSync","_e","reading","finished","_streamRef","s","currentOffset","err","n","emit"],"mappings":"AAAA;;;;;CAKC;;;;;;;;;;;QAkCYA;eAAAA;;QAiDAC;eAAAA;;;mCAjFyB;yDACvB;;;;;;;;;;;AAGf,8DAA8D;AAC9D,SAASC,qBAAqBC,MAAyC;IACrE,IAAMC,SAAS,IAAIC,6BAAQ;IAC3BD,OAAOE,KAAK,GAAG;QACbH,OAAO,IAAI;IACb;IACA,OAAOC;AACT;AAqBO,IAAA,AAAMJ,6BAAN;;aAAMA,aAGCO,MAAc;gCAHfP;QAIT,IAAI,CAACO,MAAM,GAAGA;;iBAJLP;IAOXQ,OAAAA,IAEC,GAFDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,OAAO,IAAI,CAACH,MAAM,CAACI,KAAK,CAACF,UAAUA,WAAWC;IAChD;IAEAE,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACL,MAAM,CAACG,MAAM;IAC3B;IAEAG,OAAAA,KAEC,GAFDA,SAAAA;IACE,8BAA8B;IAChC;IAEA;;;GAGC,GACDC,OAAAA,gBAiBC,GAjBDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMH,SAAS,IAAI,CAACA,MAAM;QAC1B,IAAMS,MAAMC,KAAKC,GAAG,CAACH,SAASL,QAAQH,OAAOG,MAAM;QACnD,IAAIS,aAAaJ;QACjB,IAAMK,YAAY,OAAO,cAAc;QAEvC,OAAOlB,qBAAqB,SAACE;YAC3B,IAAIe,cAAcH,KAAK;gBACrBZ,OAAOiB,IAAI,CAAC;gBACZ;YACF;YAEA,IAAMC,SAASL,KAAKC,GAAG,CAACE,WAAWJ,MAAMG;YACzC,IAAMI,QAAQhB,OAAOI,KAAK,CAACQ,YAAYA,aAAaG;YACpDH,cAAcG;YACdlB,OAAOiB,IAAI,CAACE;QACd;IACF;WAxCWvB;;AAiDN,IAAA,AAAMC,2BAAN;;aAAMA,WAICuB,EAAU,EAAEC,IAAY;gCAJzBxB;QAKT,IAAI,CAACuB,EAAE,GAAGA;QACV,IAAI,CAACC,IAAI,GAAGA;;iBANHxB;IASXO,OAAAA,IAOC,GAPDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,IAAMgB,MAAMC,IAAAA,gCAAW,EAACjB;QACxB,IAAMkB,YAAYC,WAAE,CAACC,QAAQ,CAAC,IAAI,CAACN,EAAE,EAAEE,KAAK,GAAGhB,QAAQD;QACvD,IAAImB,YAAYlB,QAAQ;YACtB,OAAOgB,IAAIf,KAAK,CAAC,GAAGiB;QACtB;QACA,OAAOF;IACT;IAEAd,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACa,IAAI;IAClB;IAEAZ,OAAAA,KAMC,GANDA,SAAAA;QACE,IAAI;YACFgB,WAAE,CAACE,SAAS,CAAC,IAAI,CAACP,EAAE;QACtB,EAAE,OAAOQ,IAAI;QACX,sBAAsB;QACxB;IACF;IAEA;;;GAGC,GACDlB,OAAAA,gBA6CC,GA7CDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMc,KAAK,IAAI,CAACA,EAAE;QAClB,IAAII,YAAY;QAChB,IAAIK,UAAU;QACd,IAAIC,WAAW;QACf,IAAMd,YAAY,OAAO,cAAc;QACvC,IAAIe,aAAqC;QAEzC,IAAM/B,SAASF,qBAAqB,SAACkC;YACnCD,aAAaC;YACb,IAAIH,WAAWC,UAAU,QAAQ,2BAA2B;YAE5D,IAAMZ,SAASL,KAAKC,GAAG,CAACE,WAAWV,SAASkB;YAC5C,IAAIN,UAAU,GAAG;gBACfY,WAAW;gBACXE,EAAEf,IAAI,CAAC;gBACP;YACF;YAEAY,UAAU;YACV,IAAM1B,SAASoB,IAAAA,gCAAW,EAACL;YAC3B,IAAMe,gBAAgBtB,SAASa;YAE/BC,WAAE,CAACrB,IAAI,CAACgB,IAAIjB,QAAQ,GAAGe,QAAQe,eAAe,SAACC,KAAKC;gBAClDN,UAAU;gBAEV,IAAIK,KAAK;oBACP,4DAA4D;oBAC5DF,EAAEI,IAAI,CAAC,SAASF;oBAChBJ,WAAW;oBACXE,EAAEf,IAAI,CAAC;oBACP;gBACF;gBAEA,IAAIkB,MAAM,GAAG;oBACXL,WAAW;oBACXE,EAAEf,IAAI,CAAC;gBACT,OAAO;oBACLO,aAAaW;oBACbH,EAAEf,IAAI,CAACd,OAAOI,KAAK,CAAC,GAAG4B;gBACzB;YACF;QACF;QAEA,OAAOnC;IACT;WA/EWH"}
1
+ {"version":3,"sources":["/Users/kevin/Dev/OpenSource/iterators/7z-iterator/src/sevenz/ArchiveSource.ts"],"sourcesContent":["/**\n * ArchiveSource - Abstraction for reading 7z archive data\n *\n * Provides a common interface for reading archive data from either\n * a file descriptor or an in-memory buffer.\n */\n\nimport { allocBuffer, Readable } from 'extract-base-iterator';\nimport fs from 'fs';\nimport type Stream from 'stream';\n\n// Helper to create a Readable stream compatible with Node 0.8\nfunction createReadableStream(readFn: (stream: Stream.Readable) => void): Stream.Readable {\n const stream = new Readable();\n stream._read = function () {\n readFn(this);\n };\n return stream;\n}\n\n/**\n * Archive source abstraction - allows reading from file descriptor or buffer\n */\nexport interface ArchiveSource {\n read(position: number, length: number): Buffer;\n getSize(): number;\n close(): void;\n /**\n * Create a readable stream for a portion of the archive.\n * Used for streaming decompression.\n */\n createReadStream(offset: number, length: number): Stream.Readable;\n}\n\n/**\n * Buffer-based archive source\n *\n * Used when the entire archive is already in memory.\n */\nexport class BufferSource implements ArchiveSource {\n private buffer: Buffer;\n\n constructor(buffer: Buffer) {\n this.buffer = buffer;\n }\n\n read(position: number, length: number): Buffer {\n return this.buffer.slice(position, position + length);\n }\n\n getSize(): number {\n return this.buffer.length;\n }\n\n close(): void {\n // Nothing to close for buffer\n }\n\n /**\n * Create a readable stream for a portion of the buffer.\n * Streams the data in chunks to avoid blocking.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const buffer = this.buffer;\n const end = Math.min(offset + length, buffer.length);\n let currentPos = offset;\n const chunkSize = 65536; // 64KB chunks\n\n return createReadableStream((stream) => {\n if (currentPos >= end) {\n stream.push(null);\n return;\n }\n\n const toRead = Math.min(chunkSize, end - currentPos);\n const chunk = buffer.slice(currentPos, currentPos + toRead);\n currentPos += toRead;\n stream.push(chunk);\n });\n }\n}\n\n/**\n * File descriptor based archive source\n *\n * Used for reading directly from a file on disk.\n * More memory efficient for large archives.\n */\nexport class FileSource implements ArchiveSource {\n private fd: number;\n private size: number;\n\n constructor(fd: number, size: number) {\n this.fd = fd;\n this.size = size;\n }\n\n read(position: number, length: number): Buffer {\n // Handle large reads by chunking to fit 32-bit signed int limit\n const MAX_INT32 = 0x7fffffff; // 2,147,483,647 bytes (~2GB)\n\n if (length <= MAX_INT32) {\n return this.readChunk(position, length);\n }\n\n // For large reads, split into multiple chunks\n const chunks: Buffer[] = [];\n let totalBytesRead = 0;\n let currentPos = position;\n\n while (totalBytesRead < length) {\n const remaining = length - totalBytesRead;\n const chunkSize = Math.min(remaining, MAX_INT32);\n const chunk = this.readChunk(currentPos, chunkSize);\n\n chunks.push(chunk);\n totalBytesRead += chunk.length;\n currentPos += chunk.length;\n\n if (chunk.length < chunkSize) {\n // EOF reached\n break;\n }\n }\n\n return Buffer.concat(chunks);\n }\n\n private readChunk(position: number, length: number): Buffer {\n const buf = allocBuffer(length);\n const bytesRead = fs.readSync(this.fd, buf, 0, length, position);\n if (bytesRead < length) {\n return buf.slice(0, bytesRead);\n }\n return buf;\n }\n\n getSize(): number {\n return this.size;\n }\n\n close(): void {\n try {\n fs.closeSync(this.fd);\n } catch (_e) {\n // Ignore close errors\n }\n }\n\n /**\n * Create a readable stream for a portion of the file.\n * Uses async fs.read() to avoid blocking the event loop.\n */\n createReadStream(offset: number, length: number): Stream.Readable {\n const fd = this.fd;\n let bytesRead = 0;\n let reading = false;\n let finished = false;\n const chunkSize = 65536; // 64KB chunks\n let _streamRef: Stream.Readable | null = null;\n\n const stream = createReadableStream((s) => {\n _streamRef = s;\n if (reading || finished) return; // Prevent re-entrant reads\n\n const toRead = Math.min(chunkSize, length - bytesRead);\n if (toRead <= 0) {\n finished = true;\n s.push(null);\n return;\n }\n\n reading = true;\n const buffer = allocBuffer(toRead);\n const currentOffset = offset + bytesRead;\n\n fs.read(fd, buffer, 0, toRead, currentOffset, (err, n) => {\n reading = false;\n\n if (err) {\n // Emit error for Node 0.8 compatibility (no destroy method)\n s.emit('error', err);\n finished = true;\n s.push(null);\n return;\n }\n\n if (n === 0) {\n finished = true;\n s.push(null);\n } else {\n bytesRead += n;\n s.push(buffer.slice(0, n));\n }\n });\n });\n\n return stream;\n }\n}\n"],"names":["BufferSource","FileSource","createReadableStream","readFn","stream","Readable","_read","buffer","read","position","length","slice","getSize","close","createReadStream","offset","end","Math","min","currentPos","chunkSize","push","toRead","chunk","fd","size","MAX_INT32","readChunk","chunks","totalBytesRead","remaining","Buffer","concat","buf","allocBuffer","bytesRead","fs","readSync","closeSync","_e","reading","finished","_streamRef","s","currentOffset","err","n","emit"],"mappings":"AAAA;;;;;CAKC;;;;;;;;;;;QAkCYA;eAAAA;;QAiDAC;eAAAA;;;mCAjFyB;yDACvB;;;;;;;;;;;AAGf,8DAA8D;AAC9D,SAASC,qBAAqBC,MAAyC;IACrE,IAAMC,SAAS,IAAIC,6BAAQ;IAC3BD,OAAOE,KAAK,GAAG;QACbH,OAAO,IAAI;IACb;IACA,OAAOC;AACT;AAqBO,IAAA,AAAMJ,6BAAN;;aAAMA,aAGCO,MAAc;gCAHfP;QAIT,IAAI,CAACO,MAAM,GAAGA;;iBAJLP;IAOXQ,OAAAA,IAEC,GAFDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,OAAO,IAAI,CAACH,MAAM,CAACI,KAAK,CAACF,UAAUA,WAAWC;IAChD;IAEAE,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACL,MAAM,CAACG,MAAM;IAC3B;IAEAG,OAAAA,KAEC,GAFDA,SAAAA;IACE,8BAA8B;IAChC;IAEA;;;GAGC,GACDC,OAAAA,gBAiBC,GAjBDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMH,SAAS,IAAI,CAACA,MAAM;QAC1B,IAAMS,MAAMC,KAAKC,GAAG,CAACH,SAASL,QAAQH,OAAOG,MAAM;QACnD,IAAIS,aAAaJ;QACjB,IAAMK,YAAY,OAAO,cAAc;QAEvC,OAAOlB,qBAAqB,SAACE;YAC3B,IAAIe,cAAcH,KAAK;gBACrBZ,OAAOiB,IAAI,CAAC;gBACZ;YACF;YAEA,IAAMC,SAASL,KAAKC,GAAG,CAACE,WAAWJ,MAAMG;YACzC,IAAMI,QAAQhB,OAAOI,KAAK,CAACQ,YAAYA,aAAaG;YACpDH,cAAcG;YACdlB,OAAOiB,IAAI,CAACE;QACd;IACF;WAxCWvB;;AAiDN,IAAA,AAAMC,2BAAN;;aAAMA,WAICuB,EAAU,EAAEC,IAAY;gCAJzBxB;QAKT,IAAI,CAACuB,EAAE,GAAGA;QACV,IAAI,CAACC,IAAI,GAAGA;;iBANHxB;IASXO,OAAAA,IA6BC,GA7BDA,SAAAA,KAAKC,QAAgB,EAAEC,MAAc;QACnC,gEAAgE;QAChE,IAAMgB,YAAY,YAAY,6BAA6B;QAE3D,IAAIhB,UAAUgB,WAAW;YACvB,OAAO,IAAI,CAACC,SAAS,CAAClB,UAAUC;QAClC;QAEA,8CAA8C;QAC9C,IAAMkB,SAAmB,EAAE;QAC3B,IAAIC,iBAAiB;QACrB,IAAIV,aAAaV;QAEjB,MAAOoB,iBAAiBnB,OAAQ;YAC9B,IAAMoB,YAAYpB,SAASmB;YAC3B,IAAMT,YAAYH,KAAKC,GAAG,CAACY,WAAWJ;YACtC,IAAMH,QAAQ,IAAI,CAACI,SAAS,CAACR,YAAYC;YAEzCQ,OAAOP,IAAI,CAACE;YACZM,kBAAkBN,MAAMb,MAAM;YAC9BS,cAAcI,MAAMb,MAAM;YAE1B,IAAIa,MAAMb,MAAM,GAAGU,WAAW;gBAE5B;YACF;QACF;QAEA,OAAOW,OAAOC,MAAM,CAACJ;IACvB;IAEA,OAAQD,SAOP,GAPD,SAAQA,UAAUlB,QAAgB,EAAEC,MAAc;QAChD,IAAMuB,MAAMC,IAAAA,gCAAW,EAACxB;QACxB,IAAMyB,YAAYC,WAAE,CAACC,QAAQ,CAAC,IAAI,CAACb,EAAE,EAAES,KAAK,GAAGvB,QAAQD;QACvD,IAAI0B,YAAYzB,QAAQ;YACtB,OAAOuB,IAAItB,KAAK,CAAC,GAAGwB;QACtB;QACA,OAAOF;IACT;IAEArB,OAAAA,OAEC,GAFDA,SAAAA;QACE,OAAO,IAAI,CAACa,IAAI;IAClB;IAEAZ,OAAAA,KAMC,GANDA,SAAAA;QACE,IAAI;YACFuB,WAAE,CAACE,SAAS,CAAC,IAAI,CAACd,EAAE;QACtB,EAAE,OAAOe,IAAI;QACX,sBAAsB;QACxB;IACF;IAEA;;;GAGC,GACDzB,OAAAA,gBA6CC,GA7CDA,SAAAA,iBAAiBC,MAAc,EAAEL,MAAc;QAC7C,IAAMc,KAAK,IAAI,CAACA,EAAE;QAClB,IAAIW,YAAY;QAChB,IAAIK,UAAU;QACd,IAAIC,WAAW;QACf,IAAMrB,YAAY,OAAO,cAAc;QACvC,IAAIsB,aAAqC;QAEzC,IAAMtC,SAASF,qBAAqB,SAACyC;YACnCD,aAAaC;YACb,IAAIH,WAAWC,UAAU,QAAQ,2BAA2B;YAE5D,IAAMnB,SAASL,KAAKC,GAAG,CAACE,WAAWV,SAASyB;YAC5C,IAAIb,UAAU,GAAG;gBACfmB,WAAW;gBACXE,EAAEtB,IAAI,CAAC;gBACP;YACF;YAEAmB,UAAU;YACV,IAAMjC,SAAS2B,IAAAA,gCAAW,EAACZ;YAC3B,IAAMsB,gBAAgB7B,SAASoB;YAE/BC,WAAE,CAAC5B,IAAI,CAACgB,IAAIjB,QAAQ,GAAGe,QAAQsB,eAAe,SAACC,KAAKC;gBAClDN,UAAU;gBAEV,IAAIK,KAAK;oBACP,4DAA4D;oBAC5DF,EAAEI,IAAI,CAAC,SAASF;oBAChBJ,WAAW;oBACXE,EAAEtB,IAAI,CAAC;oBACP;gBACF;gBAEA,IAAIyB,MAAM,GAAG;oBACXL,WAAW;oBACXE,EAAEtB,IAAI,CAAC;gBACT,OAAO;oBACLc,aAAaW;oBACbH,EAAEtB,IAAI,CAACd,OAAOI,KAAK,CAAC,GAAGmC;gBACzB;YACF;QACF;QAEA,OAAO1C;IACT;WA9GWH"}
@@ -733,17 +733,32 @@ var SevenZipParser = /*#__PURE__*/ function() {
733
733
  return data;
734
734
  }
735
735
  // Calculate packed data position
736
- var packPos = _constantsts.SIGNATURE_HEADER_SIZE + this.streamsInfo.packPos;
736
+ // Use Math.max to prevent 32-bit signed overflow
737
+ var signedHeaderSize = _constantsts.SIGNATURE_HEADER_SIZE;
738
+ var signedPackPos = this.streamsInfo.packPos;
739
+ var packPos = Math.max(signedHeaderSize, 0) + Math.max(signedPackPos, 0);
737
740
  // Find which pack stream this folder uses
738
741
  var packStreamIndex = 0;
739
742
  for(var j = 0; j < folderIndex; j++){
740
743
  packStreamIndex += this.streamsInfo.folders[j].packedStreams.length;
741
744
  }
742
- // Calculate position of this pack stream
745
+ // Calculate position of this pack stream - PREVENT OVERFLOW
743
746
  for(var k = 0; k < packStreamIndex; k++){
744
- packPos += this.streamsInfo.packSizes[k];
747
+ var size = this.streamsInfo.packSizes[k];
748
+ if (packPos + size < packPos) {
749
+ throw (0, _constantsts.createCodedError)("Pack position overflow at index ".concat(k), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
750
+ }
751
+ packPos += size;
745
752
  }
746
753
  var packSize = this.streamsInfo.packSizes[packStreamIndex];
754
+ // Validate pack size to prevent overflow
755
+ // Upper bound is Number.MAX_SAFE_INTEGER (2^53-1 = 9PB) - safe for all realistic archives
756
+ if (packSize < 0 || packSize > Number.MAX_SAFE_INTEGER) {
757
+ throw (0, _constantsts.createCodedError)("Invalid pack size: ".concat(packSize), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
758
+ }
759
+ if (packPos < 0 || packPos > Number.MAX_SAFE_INTEGER) {
760
+ throw (0, _constantsts.createCodedError)("Invalid pack position: ".concat(packPos), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
761
+ }
747
762
  // Read packed data
748
763
  var packedData = this.source.read(packPos, packSize);
749
764
  // Decompress through codec chain
@@ -753,6 +768,10 @@ var SevenZipParser = /*#__PURE__*/ function() {
753
768
  var codec = (0, _indexts.getCodec)(coderInfo.id);
754
769
  // Get unpack size for this coder (needed by LZMA)
755
770
  var unpackSize = folder.unpackSizes[l];
771
+ // Validate unpack size to prevent overflow
772
+ if (unpackSize < 0 || unpackSize > Number.MAX_SAFE_INTEGER) {
773
+ throw (0, _constantsts.createCodedError)("Invalid unpack size: ".concat(unpackSize), _constantsts.ErrorCode.CORRUPT_ARCHIVE);
774
+ }
756
775
  data2 = codec.decode(data2, coderInfo.properties, unpackSize);
757
776
  }
758
777
  // Cache only if more files remain in this folder