@cj-tech-master/excelts 4.2.0-canary.20260110111632.c88c61c → 4.2.1-canary.20260111102127.f808a37

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (232) hide show
  1. package/THIRD_PARTY_NOTICES.md +0 -31
  2. package/dist/browser/index.browser.d.ts +1 -0
  3. package/dist/browser/index.browser.js +12 -0
  4. package/dist/{esm/modules/archive → browser/modules/archive/compression}/compress.base.js +1 -1
  5. package/dist/{types/modules/archive → browser/modules/archive/compression}/compress.browser.d.ts +2 -8
  6. package/dist/browser/modules/archive/{compress.browser.js → compression/compress.browser.js} +3 -11
  7. package/dist/browser/modules/archive/{compress.d.ts → compression/compress.d.ts} +2 -2
  8. package/dist/{esm/modules/archive → browser/modules/archive/compression}/compress.js +1 -1
  9. package/dist/browser/modules/archive/{crc32.browser.d.ts → compression/crc32.browser.d.ts} +1 -1
  10. package/dist/browser/modules/archive/{crc32.d.ts → compression/crc32.d.ts} +1 -1
  11. package/dist/browser/modules/archive/{crc32.js → compression/crc32.js} +1 -1
  12. package/dist/browser/modules/archive/{deflate-fallback.js → compression/deflate-fallback.js} +1 -1
  13. package/dist/browser/modules/archive/{streaming-compress.browser.d.ts → compression/streaming-compress.browser.d.ts} +2 -2
  14. package/dist/browser/modules/archive/{streaming-compress.browser.js → compression/streaming-compress.browser.js} +3 -3
  15. package/dist/browser/modules/archive/{streaming-compress.d.ts → compression/streaming-compress.d.ts} +2 -2
  16. package/dist/browser/modules/archive/{streaming-compress.js → compression/streaming-compress.js} +2 -2
  17. package/dist/browser/modules/archive/defaults.d.ts +1 -0
  18. package/dist/browser/modules/archive/defaults.js +6 -3
  19. package/dist/browser/modules/archive/index.base.d.ts +4 -4
  20. package/dist/browser/modules/archive/index.base.js +3 -6
  21. package/dist/browser/modules/archive/index.browser.d.ts +3 -4
  22. package/dist/browser/modules/archive/index.browser.js +3 -7
  23. package/dist/browser/modules/archive/index.d.ts +3 -4
  24. package/dist/browser/modules/archive/index.js +3 -5
  25. package/dist/browser/modules/archive/internal/byte-queue.d.ts +33 -0
  26. package/dist/browser/modules/archive/internal/byte-queue.js +407 -0
  27. package/dist/browser/modules/archive/io/archive-sink.d.ts +9 -0
  28. package/dist/browser/modules/archive/io/archive-sink.js +77 -0
  29. package/dist/browser/modules/archive/io/archive-source.d.ts +8 -0
  30. package/dist/browser/modules/archive/io/archive-source.js +107 -0
  31. package/dist/browser/modules/archive/{extract.d.ts → unzip/extract.d.ts} +2 -2
  32. package/dist/browser/modules/archive/unzip/index.d.ts +40 -0
  33. package/dist/browser/modules/archive/unzip/index.js +164 -0
  34. package/dist/browser/modules/archive/{parse.base.d.ts → unzip/stream.base.d.ts} +36 -2
  35. package/dist/browser/modules/archive/unzip/stream.base.js +1022 -0
  36. package/dist/browser/modules/archive/{parse.browser.d.ts → unzip/stream.browser.d.ts} +1 -1
  37. package/dist/browser/modules/archive/{parse.browser.js → unzip/stream.browser.js} +371 -110
  38. package/dist/browser/modules/archive/{parse.d.ts → unzip/stream.d.ts} +2 -2
  39. package/dist/{esm/modules/archive/parse.js → browser/modules/archive/unzip/stream.js} +6 -5
  40. package/dist/browser/modules/archive/{zip-parser.d.ts → unzip/zip-parser.d.ts} +1 -1
  41. package/dist/{esm/modules/archive → browser/modules/archive/unzip}/zip-parser.js +38 -24
  42. package/dist/browser/modules/archive/utils/async-queue.d.ts +7 -0
  43. package/dist/browser/modules/archive/utils/async-queue.js +103 -0
  44. package/dist/browser/modules/archive/utils/bytes.js +16 -16
  45. package/dist/browser/modules/archive/utils/compressibility.d.ts +10 -0
  46. package/dist/browser/modules/archive/utils/compressibility.js +57 -0
  47. package/dist/browser/modules/archive/utils/parse-buffer.js +21 -23
  48. package/dist/browser/modules/archive/utils/pattern-scanner.d.ts +21 -0
  49. package/dist/browser/modules/archive/utils/pattern-scanner.js +27 -0
  50. package/dist/browser/modules/archive/utils/timestamps.js +62 -1
  51. package/dist/browser/modules/archive/utils/zip-extra-fields.d.ts +1 -1
  52. package/dist/browser/modules/archive/utils/zip-extra-fields.js +26 -14
  53. package/dist/browser/modules/archive/zip/index.d.ts +42 -0
  54. package/dist/browser/modules/archive/zip/index.js +157 -0
  55. package/dist/browser/modules/archive/{streaming-zip.d.ts → zip/stream.d.ts} +28 -5
  56. package/dist/browser/modules/archive/{streaming-zip.js → zip/stream.js} +192 -48
  57. package/dist/browser/modules/archive/zip/zip-bytes.d.ts +73 -0
  58. package/dist/browser/modules/archive/zip/zip-bytes.js +239 -0
  59. package/dist/{esm/modules/archive → browser/modules/archive/zip}/zip-entry-metadata.js +3 -3
  60. package/dist/browser/modules/archive/{zip-records.d.ts → zip-spec/zip-records.d.ts} +20 -0
  61. package/dist/browser/modules/archive/zip-spec/zip-records.js +126 -0
  62. package/dist/browser/modules/excel/stream/workbook-reader.browser.js +1 -1
  63. package/dist/browser/modules/excel/stream/workbook-writer.browser.d.ts +1 -1
  64. package/dist/browser/modules/excel/stream/workbook-writer.browser.js +1 -1
  65. package/dist/browser/modules/excel/xlsx/xlsx.browser.js +3 -6
  66. package/dist/browser/modules/excel/xlsx/xlsx.js +1 -1
  67. package/dist/browser/modules/stream/streams.browser.d.ts +28 -30
  68. package/dist/browser/modules/stream/streams.browser.js +830 -710
  69. package/dist/browser/modules/stream/streams.js +140 -58
  70. package/dist/cjs/modules/archive/{compress.base.js → compression/compress.base.js} +1 -1
  71. package/dist/cjs/modules/archive/{compress.browser.js → compression/compress.browser.js} +3 -11
  72. package/dist/cjs/modules/archive/{compress.js → compression/compress.js} +1 -1
  73. package/dist/cjs/modules/archive/{crc32.js → compression/crc32.js} +1 -1
  74. package/dist/cjs/modules/archive/{deflate-fallback.js → compression/deflate-fallback.js} +1 -1
  75. package/dist/cjs/modules/archive/{streaming-compress.browser.js → compression/streaming-compress.browser.js} +3 -3
  76. package/dist/cjs/modules/archive/{streaming-compress.js → compression/streaming-compress.js} +2 -2
  77. package/dist/cjs/modules/archive/defaults.js +7 -4
  78. package/dist/cjs/modules/archive/index.base.js +9 -19
  79. package/dist/cjs/modules/archive/index.browser.js +4 -10
  80. package/dist/cjs/modules/archive/index.js +4 -8
  81. package/dist/cjs/modules/archive/internal/byte-queue.js +411 -0
  82. package/dist/cjs/modules/archive/io/archive-sink.js +82 -0
  83. package/dist/cjs/modules/archive/io/archive-source.js +114 -0
  84. package/dist/cjs/modules/archive/unzip/index.js +170 -0
  85. package/dist/cjs/modules/archive/unzip/stream.base.js +1044 -0
  86. package/dist/cjs/modules/archive/{parse.browser.js → unzip/stream.browser.js} +372 -111
  87. package/dist/cjs/modules/archive/{parse.js → unzip/stream.js} +9 -8
  88. package/dist/cjs/modules/archive/{zip-parser.js → unzip/zip-parser.js} +47 -33
  89. package/dist/cjs/modules/archive/utils/async-queue.js +106 -0
  90. package/dist/cjs/modules/archive/utils/bytes.js +16 -16
  91. package/dist/cjs/modules/archive/utils/compressibility.js +60 -0
  92. package/dist/cjs/modules/archive/utils/parse-buffer.js +21 -23
  93. package/dist/cjs/modules/archive/utils/pattern-scanner.js +31 -0
  94. package/dist/cjs/modules/archive/utils/timestamps.js +64 -3
  95. package/dist/cjs/modules/archive/utils/zip-extra-fields.js +26 -14
  96. package/dist/cjs/modules/archive/zip/index.js +162 -0
  97. package/dist/cjs/modules/archive/{streaming-zip.js → zip/stream.js} +194 -50
  98. package/dist/cjs/modules/archive/zip/zip-bytes.js +242 -0
  99. package/dist/cjs/modules/archive/{zip-entry-metadata.js → zip/zip-entry-metadata.js} +5 -5
  100. package/dist/cjs/modules/archive/zip-spec/zip-records.js +136 -0
  101. package/dist/cjs/modules/excel/stream/workbook-reader.browser.js +2 -2
  102. package/dist/cjs/modules/excel/stream/workbook-writer.browser.js +4 -4
  103. package/dist/cjs/modules/excel/xlsx/xlsx.browser.js +6 -9
  104. package/dist/cjs/modules/excel/xlsx/xlsx.js +2 -2
  105. package/dist/cjs/modules/stream/streams.browser.js +830 -710
  106. package/dist/cjs/modules/stream/streams.js +140 -58
  107. package/dist/esm/index.browser.js +12 -0
  108. package/dist/{browser/modules/archive → esm/modules/archive/compression}/compress.base.js +1 -1
  109. package/dist/esm/modules/archive/{compress.browser.js → compression/compress.browser.js} +3 -11
  110. package/dist/{browser/modules/archive → esm/modules/archive/compression}/compress.js +1 -1
  111. package/dist/esm/modules/archive/{crc32.js → compression/crc32.js} +1 -1
  112. package/dist/esm/modules/archive/{deflate-fallback.js → compression/deflate-fallback.js} +1 -1
  113. package/dist/esm/modules/archive/{streaming-compress.browser.js → compression/streaming-compress.browser.js} +3 -3
  114. package/dist/esm/modules/archive/{streaming-compress.js → compression/streaming-compress.js} +2 -2
  115. package/dist/esm/modules/archive/defaults.js +6 -3
  116. package/dist/esm/modules/archive/index.base.js +3 -6
  117. package/dist/esm/modules/archive/index.browser.js +3 -7
  118. package/dist/esm/modules/archive/index.js +3 -5
  119. package/dist/esm/modules/archive/internal/byte-queue.js +407 -0
  120. package/dist/esm/modules/archive/io/archive-sink.js +77 -0
  121. package/dist/esm/modules/archive/io/archive-source.js +107 -0
  122. package/dist/esm/modules/archive/unzip/index.js +164 -0
  123. package/dist/esm/modules/archive/unzip/stream.base.js +1022 -0
  124. package/dist/esm/modules/archive/{parse.browser.js → unzip/stream.browser.js} +371 -110
  125. package/dist/{browser/modules/archive/parse.js → esm/modules/archive/unzip/stream.js} +6 -5
  126. package/dist/{browser/modules/archive → esm/modules/archive/unzip}/zip-parser.js +38 -24
  127. package/dist/esm/modules/archive/utils/async-queue.js +103 -0
  128. package/dist/esm/modules/archive/utils/bytes.js +16 -16
  129. package/dist/esm/modules/archive/utils/compressibility.js +57 -0
  130. package/dist/esm/modules/archive/utils/parse-buffer.js +21 -23
  131. package/dist/esm/modules/archive/utils/pattern-scanner.js +27 -0
  132. package/dist/esm/modules/archive/utils/timestamps.js +62 -1
  133. package/dist/esm/modules/archive/utils/zip-extra-fields.js +26 -14
  134. package/dist/esm/modules/archive/zip/index.js +157 -0
  135. package/dist/esm/modules/archive/{streaming-zip.js → zip/stream.js} +192 -48
  136. package/dist/esm/modules/archive/zip/zip-bytes.js +239 -0
  137. package/dist/{browser/modules/archive → esm/modules/archive/zip}/zip-entry-metadata.js +3 -3
  138. package/dist/esm/modules/archive/zip-spec/zip-records.js +126 -0
  139. package/dist/esm/modules/excel/stream/workbook-reader.browser.js +1 -1
  140. package/dist/esm/modules/excel/stream/workbook-writer.browser.js +1 -1
  141. package/dist/esm/modules/excel/xlsx/xlsx.browser.js +3 -6
  142. package/dist/esm/modules/excel/xlsx/xlsx.js +1 -1
  143. package/dist/esm/modules/stream/streams.browser.js +830 -710
  144. package/dist/esm/modules/stream/streams.js +140 -58
  145. package/dist/iife/THIRD_PARTY_NOTICES.md +0 -31
  146. package/dist/iife/excelts.iife.js +6190 -4400
  147. package/dist/iife/excelts.iife.js.map +1 -1
  148. package/dist/iife/excelts.iife.min.js +103 -31
  149. package/dist/types/index.browser.d.ts +1 -0
  150. package/dist/{browser/modules/archive → types/modules/archive/compression}/compress.browser.d.ts +2 -8
  151. package/dist/types/modules/archive/{streaming-compress.browser.d.ts → compression/streaming-compress.browser.d.ts} +1 -1
  152. package/dist/types/modules/archive/defaults.d.ts +1 -0
  153. package/dist/types/modules/archive/index.base.d.ts +4 -4
  154. package/dist/types/modules/archive/index.browser.d.ts +3 -4
  155. package/dist/types/modules/archive/index.d.ts +3 -4
  156. package/dist/types/modules/archive/internal/byte-queue.d.ts +33 -0
  157. package/dist/types/modules/archive/io/archive-sink.d.ts +9 -0
  158. package/dist/types/modules/archive/io/archive-source.d.ts +8 -0
  159. package/dist/types/modules/archive/unzip/index.d.ts +40 -0
  160. package/dist/types/modules/archive/{parse.base.d.ts → unzip/stream.base.d.ts} +38 -4
  161. package/dist/types/modules/archive/{parse.browser.d.ts → unzip/stream.browser.d.ts} +2 -2
  162. package/dist/types/modules/archive/{parse.d.ts → unzip/stream.d.ts} +3 -3
  163. package/dist/types/modules/archive/{zip-parser.d.ts → unzip/zip-parser.d.ts} +1 -1
  164. package/dist/types/modules/archive/utils/async-queue.d.ts +7 -0
  165. package/dist/types/modules/archive/utils/compressibility.d.ts +10 -0
  166. package/dist/types/modules/archive/utils/pattern-scanner.d.ts +21 -0
  167. package/dist/types/modules/archive/utils/zip-extra-fields.d.ts +1 -1
  168. package/dist/types/modules/archive/zip/index.d.ts +42 -0
  169. package/dist/types/modules/archive/{streaming-zip.d.ts → zip/stream.d.ts} +29 -6
  170. package/dist/types/modules/archive/zip/zip-bytes.d.ts +73 -0
  171. package/dist/types/modules/archive/{zip-entry-metadata.d.ts → zip/zip-entry-metadata.d.ts} +1 -1
  172. package/dist/types/modules/archive/{zip-records.d.ts → zip-spec/zip-records.d.ts} +20 -0
  173. package/dist/types/modules/excel/stream/workbook-writer.browser.d.ts +1 -1
  174. package/dist/types/modules/stream/streams.browser.d.ts +28 -30
  175. package/package.json +5 -1
  176. package/dist/browser/modules/archive/byte-queue.d.ts +0 -18
  177. package/dist/browser/modules/archive/byte-queue.js +0 -125
  178. package/dist/browser/modules/archive/parse.base.js +0 -644
  179. package/dist/browser/modules/archive/utils/zip-extra.d.ts +0 -18
  180. package/dist/browser/modules/archive/utils/zip-extra.js +0 -68
  181. package/dist/browser/modules/archive/zip-builder.d.ts +0 -117
  182. package/dist/browser/modules/archive/zip-builder.js +0 -292
  183. package/dist/browser/modules/archive/zip-constants.d.ts +0 -18
  184. package/dist/browser/modules/archive/zip-constants.js +0 -23
  185. package/dist/browser/modules/archive/zip-records.js +0 -84
  186. package/dist/cjs/modules/archive/byte-queue.js +0 -129
  187. package/dist/cjs/modules/archive/parse.base.js +0 -666
  188. package/dist/cjs/modules/archive/utils/zip-extra.js +0 -74
  189. package/dist/cjs/modules/archive/zip-builder.js +0 -297
  190. package/dist/cjs/modules/archive/zip-constants.js +0 -26
  191. package/dist/cjs/modules/archive/zip-records.js +0 -90
  192. package/dist/esm/modules/archive/byte-queue.js +0 -125
  193. package/dist/esm/modules/archive/parse.base.js +0 -644
  194. package/dist/esm/modules/archive/utils/zip-extra.js +0 -68
  195. package/dist/esm/modules/archive/zip-builder.js +0 -292
  196. package/dist/esm/modules/archive/zip-constants.js +0 -23
  197. package/dist/esm/modules/archive/zip-records.js +0 -84
  198. package/dist/types/modules/archive/byte-queue.d.ts +0 -18
  199. package/dist/types/modules/archive/utils/zip-extra.d.ts +0 -18
  200. package/dist/types/modules/archive/zip-builder.d.ts +0 -117
  201. package/dist/types/modules/archive/zip-constants.d.ts +0 -18
  202. /package/dist/browser/modules/archive/{compress.base.d.ts → compression/compress.base.d.ts} +0 -0
  203. /package/dist/browser/modules/archive/{crc32.base.d.ts → compression/crc32.base.d.ts} +0 -0
  204. /package/dist/browser/modules/archive/{crc32.base.js → compression/crc32.base.js} +0 -0
  205. /package/dist/browser/modules/archive/{crc32.browser.js → compression/crc32.browser.js} +0 -0
  206. /package/dist/browser/modules/archive/{deflate-fallback.d.ts → compression/deflate-fallback.d.ts} +0 -0
  207. /package/dist/browser/modules/archive/{streaming-compress.base.d.ts → compression/streaming-compress.base.d.ts} +0 -0
  208. /package/dist/browser/modules/archive/{streaming-compress.base.js → compression/streaming-compress.base.js} +0 -0
  209. /package/dist/browser/modules/archive/{extract.js → unzip/extract.js} +0 -0
  210. /package/dist/browser/modules/archive/{zip-entry-metadata.d.ts → zip/zip-entry-metadata.d.ts} +0 -0
  211. /package/dist/browser/modules/archive/{zip-entry-info.d.ts → zip-spec/zip-entry-info.d.ts} +0 -0
  212. /package/dist/browser/modules/archive/{zip-entry-info.js → zip-spec/zip-entry-info.js} +0 -0
  213. /package/dist/cjs/modules/archive/{crc32.base.js → compression/crc32.base.js} +0 -0
  214. /package/dist/cjs/modules/archive/{crc32.browser.js → compression/crc32.browser.js} +0 -0
  215. /package/dist/cjs/modules/archive/{streaming-compress.base.js → compression/streaming-compress.base.js} +0 -0
  216. /package/dist/cjs/modules/archive/{extract.js → unzip/extract.js} +0 -0
  217. /package/dist/cjs/modules/archive/{zip-entry-info.js → zip-spec/zip-entry-info.js} +0 -0
  218. /package/dist/esm/modules/archive/{crc32.base.js → compression/crc32.base.js} +0 -0
  219. /package/dist/esm/modules/archive/{crc32.browser.js → compression/crc32.browser.js} +0 -0
  220. /package/dist/esm/modules/archive/{streaming-compress.base.js → compression/streaming-compress.base.js} +0 -0
  221. /package/dist/esm/modules/archive/{extract.js → unzip/extract.js} +0 -0
  222. /package/dist/esm/modules/archive/{zip-entry-info.js → zip-spec/zip-entry-info.js} +0 -0
  223. /package/dist/types/modules/archive/{compress.base.d.ts → compression/compress.base.d.ts} +0 -0
  224. /package/dist/types/modules/archive/{compress.d.ts → compression/compress.d.ts} +0 -0
  225. /package/dist/types/modules/archive/{crc32.base.d.ts → compression/crc32.base.d.ts} +0 -0
  226. /package/dist/types/modules/archive/{crc32.browser.d.ts → compression/crc32.browser.d.ts} +0 -0
  227. /package/dist/types/modules/archive/{crc32.d.ts → compression/crc32.d.ts} +0 -0
  228. /package/dist/types/modules/archive/{deflate-fallback.d.ts → compression/deflate-fallback.d.ts} +0 -0
  229. /package/dist/types/modules/archive/{streaming-compress.base.d.ts → compression/streaming-compress.base.d.ts} +0 -0
  230. /package/dist/types/modules/archive/{streaming-compress.d.ts → compression/streaming-compress.d.ts} +0 -0
  231. /package/dist/types/modules/archive/{extract.d.ts → unzip/extract.d.ts} +0 -0
  232. /package/dist/types/modules/archive/{zip-entry-info.d.ts → zip-spec/zip-entry-info.d.ts} +0 -0
@@ -0,0 +1,1044 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DEFAULT_PARSE_THRESHOLD_BYTES = exports.PullStream = exports.parseExtraField = exports.END_OF_CENTRAL_DIRECTORY_FORMAT = exports.CENTRAL_DIRECTORY_FILE_HEADER_FORMAT = exports.DATA_DESCRIPTOR_FORMAT = exports.LOCAL_FILE_HEADER_FORMAT = exports.CRX_HEADER_FORMAT = exports.DATA_DESCRIPTOR_SIGNATURE_BYTES = void 0;
4
+ exports.decodeZipEntryPath = decodeZipEntryPath;
5
+ exports.isZipUnicodeFlag = isZipUnicodeFlag;
6
+ exports.isZipDirectoryPath = isZipDirectoryPath;
7
+ exports.getZipEntryType = getZipEntryType;
8
+ exports.buildZipEntryProps = buildZipEntryProps;
9
+ exports.resolveZipEntryLastModifiedDateTime = resolveZipEntryLastModifiedDateTime;
10
+ exports.hasDataDescriptorFlag = hasDataDescriptorFlag;
11
+ exports.isFileSizeKnown = isFileSizeKnown;
12
+ exports.autodrain = autodrain;
13
+ exports.bufferStream = bufferStream;
14
+ exports.readCrxHeader = readCrxHeader;
15
+ exports.readLocalFileHeader = readLocalFileHeader;
16
+ exports.readDataDescriptor = readDataDescriptor;
17
+ exports.consumeCentralDirectoryFileHeader = consumeCentralDirectoryFileHeader;
18
+ exports.consumeEndOfCentralDirectoryRecord = consumeEndOfCentralDirectoryRecord;
19
+ exports.scanValidatedDataDescriptor = scanValidatedDataDescriptor;
20
+ exports.streamUntilValidatedDataDescriptor = streamUntilValidatedDataDescriptor;
21
+ exports.runParseLoop = runParseLoop;
22
+ const timestamps_1 = require("../utils/timestamps.js");
23
+ const _stream_1 = require("../../stream/index.js");
24
+ const parse_buffer_1 = require("../utils/parse-buffer.js");
25
+ const byte_queue_1 = require("../internal/byte-queue.js");
26
+ const bytes_1 = require("../utils/bytes.js");
27
+ const pattern_scanner_1 = require("../utils/pattern-scanner.js");
28
+ const binary_1 = require("../utils/binary.js");
29
+ const zip_extra_fields_1 = require("../utils/zip-extra-fields.js");
30
+ const zip_records_1 = require("../zip-spec/zip-records.js");
31
+ exports.DATA_DESCRIPTOR_SIGNATURE_BYTES = (0, binary_1.writeUint32LE)(zip_records_1.DATA_DESCRIPTOR_SIG);
32
+ const DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK = 256 * 1024;
33
+ // Shared parseBuffer() formats
34
+ exports.CRX_HEADER_FORMAT = [
35
+ ["version", 4],
36
+ ["pubKeyLength", 4],
37
+ ["signatureLength", 4]
38
+ ];
39
+ exports.LOCAL_FILE_HEADER_FORMAT = [
40
+ ["versionsNeededToExtract", 2],
41
+ ["flags", 2],
42
+ ["compressionMethod", 2],
43
+ ["lastModifiedTime", 2],
44
+ ["lastModifiedDate", 2],
45
+ ["crc32", 4],
46
+ ["compressedSize", 4],
47
+ ["uncompressedSize", 4],
48
+ ["fileNameLength", 2],
49
+ ["extraFieldLength", 2]
50
+ ];
51
+ exports.DATA_DESCRIPTOR_FORMAT = [
52
+ ["dataDescriptorSignature", 4],
53
+ ["crc32", 4],
54
+ ["compressedSize", 4],
55
+ ["uncompressedSize", 4]
56
+ ];
57
+ exports.CENTRAL_DIRECTORY_FILE_HEADER_FORMAT = [
58
+ ["versionMadeBy", 2],
59
+ ["versionsNeededToExtract", 2],
60
+ ["flags", 2],
61
+ ["compressionMethod", 2],
62
+ ["lastModifiedTime", 2],
63
+ ["lastModifiedDate", 2],
64
+ ["crc32", 4],
65
+ ["compressedSize", 4],
66
+ ["uncompressedSize", 4],
67
+ ["fileNameLength", 2],
68
+ ["extraFieldLength", 2],
69
+ ["fileCommentLength", 2],
70
+ ["diskNumber", 2],
71
+ ["internalFileAttributes", 2],
72
+ ["externalFileAttributes", 4],
73
+ ["offsetToLocalFileHeader", 4]
74
+ ];
75
+ exports.END_OF_CENTRAL_DIRECTORY_FORMAT = [
76
+ ["diskNumber", 2],
77
+ ["diskStart", 2],
78
+ ["numberOfRecordsOnDisk", 2],
79
+ ["numberOfRecords", 2],
80
+ ["sizeOfCentralDirectory", 4],
81
+ ["offsetToStartOfCentralDirectory", 4],
82
+ ["commentLength", 2]
83
+ ];
84
+ const textDecoder = new TextDecoder();
85
+ const textEncoder = new TextEncoder();
86
+ function decodeZipEntryPath(pathBuffer) {
87
+ return textDecoder.decode(pathBuffer);
88
+ }
89
+ function isZipUnicodeFlag(flags) {
90
+ return ((flags || 0) & 0x800) !== 0;
91
+ }
92
+ function isZipDirectoryPath(path) {
93
+ if (path.length === 0) {
94
+ return false;
95
+ }
96
+ const last = path.charCodeAt(path.length - 1);
97
+ return last === 47 || last === 92;
98
+ }
99
+ function getZipEntryType(path, uncompressedSize) {
100
+ return uncompressedSize === 0 && isZipDirectoryPath(path) ? "Directory" : "File";
101
+ }
102
+ function buildZipEntryProps(path, pathBuffer, flags) {
103
+ return {
104
+ path,
105
+ pathBuffer,
106
+ flags: {
107
+ isUnicode: isZipUnicodeFlag(flags)
108
+ }
109
+ };
110
+ }
111
+ function resolveZipEntryLastModifiedDateTime(vars, extraFields) {
112
+ const dosDate = vars.lastModifiedDate || 0;
113
+ const dosTime = vars.lastModifiedTime || 0;
114
+ const dosDateTime = (0, timestamps_1.parseDosDateTimeUTC)(dosDate, dosTime);
115
+ const unixSecondsMtime = extraFields.mtimeUnixSeconds;
116
+ if (unixSecondsMtime === undefined) {
117
+ return dosDateTime;
118
+ }
119
+ return (0, timestamps_1.resolveZipLastModifiedDateFromUnixSeconds)(dosDate, dosTime, unixSecondsMtime);
120
+ }
121
+ exports.parseExtraField = zip_extra_fields_1.parseZipExtraFields;
122
+ function hasDataDescriptorFlag(flags) {
123
+ return ((flags || 0) & 0x08) !== 0;
124
+ }
125
+ function isFileSizeKnown(flags, compressedSize) {
126
+ return !hasDataDescriptorFlag(flags) || (compressedSize || 0) > 0;
127
+ }
128
+ function autodrain(stream) {
129
+ const draining = stream.pipe(new _stream_1.Transform({
130
+ transform(_chunk, _encoding, callback) {
131
+ callback();
132
+ }
133
+ }));
134
+ draining.promise = () => new Promise((resolve, reject) => {
135
+ draining.on("finish", resolve);
136
+ draining.on("error", reject);
137
+ });
138
+ return draining;
139
+ }
140
+ /**
141
+ * Collects all data from a readable stream into a single Uint8Array.
142
+ */
143
+ function bufferStream(entry) {
144
+ return new Promise((resolve, reject) => {
145
+ const chunks = [];
146
+ const stream = new _stream_1.Transform({
147
+ transform(d, _encoding, callback) {
148
+ chunks.push(d);
149
+ callback();
150
+ }
151
+ });
152
+ stream.on("finish", () => {
153
+ resolve(chunks.length === 1 ? chunks[0] : (0, _stream_1.concatUint8Arrays)(chunks));
154
+ });
155
+ stream.on("error", reject);
156
+ entry.on("error", reject).pipe(stream);
157
+ });
158
+ }
159
+ const STR_FUNCTION = "function";
160
+ class PullStream extends _stream_1.Duplex {
161
+ get buffer() {
162
+ return this._queue.view();
163
+ }
164
+ set buffer(value) {
165
+ this._queue.reset(value);
166
+ }
167
+ constructor() {
168
+ super({ decodeStrings: false, objectMode: true });
169
+ this._queue = new byte_queue_1.ByteQueue();
170
+ this.finished = false;
171
+ this.on("finish", () => {
172
+ this.finished = true;
173
+ this.emit("chunk", false);
174
+ });
175
+ }
176
+ _write(chunk, _encoding, callback) {
177
+ const data = typeof chunk === "string" ? textEncoder.encode(chunk) : chunk;
178
+ this._queue.append(data);
179
+ this.cb = callback;
180
+ this.emit("chunk");
181
+ }
182
+ _read() { }
183
+ _maybeReleaseWriteCallback() {
184
+ if (typeof this.cb === STR_FUNCTION) {
185
+ const callback = this.cb;
186
+ this.cb = undefined;
187
+ callback();
188
+ }
189
+ }
190
+ /**
191
+ * The `eof` parameter is interpreted as `file_length` if the type is number
192
+ * otherwise (i.e. Uint8Array) it is interpreted as a pattern signaling end of stream
193
+ */
194
+ stream(eof, includeEof) {
195
+ const p = new _stream_1.PassThrough({ highWaterMark: DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK });
196
+ let done = false;
197
+ let waitingDrain = false;
198
+ const eofIsNumber = typeof eof === "number";
199
+ let remainingBytes = eofIsNumber ? eof : 0;
200
+ const pattern = eofIsNumber ? undefined : eof;
201
+ const patternLen = pattern ? pattern.length : 0;
202
+ const minTailBytes = eofIsNumber ? 0 : patternLen;
203
+ const scanner = eofIsNumber ? undefined : new pattern_scanner_1.PatternScanner(pattern);
204
+ const cb = () => {
205
+ this._maybeReleaseWriteCallback();
206
+ };
207
+ const pull = () => {
208
+ if (done || waitingDrain) {
209
+ return;
210
+ }
211
+ while (true) {
212
+ const available = this._queue.length;
213
+ if (!available) {
214
+ break;
215
+ }
216
+ let packet;
217
+ if (eofIsNumber) {
218
+ const toRead = Math.min(remainingBytes, available);
219
+ if (toRead > 0) {
220
+ packet = this._queue.read(toRead);
221
+ remainingBytes -= toRead;
222
+ }
223
+ done = done || remainingBytes === 0;
224
+ }
225
+ else {
226
+ const bufLen = this._queue.length;
227
+ const match = scanner.find(this._queue);
228
+ if (match !== -1) {
229
+ // store signature match byte offset to allow us to reference
230
+ // this for zip64 offset
231
+ this.match = match;
232
+ const toRead = includeEof ? match + patternLen : match;
233
+ if (toRead > 0) {
234
+ packet = this._queue.read(toRead);
235
+ scanner.onConsume(toRead);
236
+ }
237
+ done = true;
238
+ }
239
+ else {
240
+ // No match yet. Avoid rescanning bytes that can't start a match.
241
+ scanner.onNoMatch(bufLen);
242
+ const len = bufLen - patternLen;
243
+ if (len <= 0) {
244
+ // Keep enough bytes to detect a split signature.
245
+ if (this._queue.length === 0 ||
246
+ (minTailBytes && this._queue.length <= minTailBytes)) {
247
+ cb();
248
+ }
249
+ }
250
+ else {
251
+ packet = this._queue.read(len);
252
+ scanner.onConsume(len);
253
+ }
254
+ }
255
+ }
256
+ if (!packet) {
257
+ break;
258
+ }
259
+ const ok = p.write(packet);
260
+ // If we drained the internal buffer (or kept only a minimal tail), allow upstream to continue.
261
+ if (this._queue.length === 0 || (minTailBytes && this._queue.length <= minTailBytes)) {
262
+ cb();
263
+ }
264
+ if (!ok) {
265
+ waitingDrain = true;
266
+ p.once("drain", () => {
267
+ waitingDrain = false;
268
+ pull();
269
+ });
270
+ return;
271
+ }
272
+ if (done) {
273
+ cb();
274
+ this.removeListener("chunk", pull);
275
+ p.end();
276
+ return;
277
+ }
278
+ }
279
+ if (!done) {
280
+ if (this.finished) {
281
+ this.removeListener("chunk", pull);
282
+ cb();
283
+ p.destroy(new Error("FILE_ENDED"));
284
+ }
285
+ return;
286
+ }
287
+ this.removeListener("chunk", pull);
288
+ cb();
289
+ p.end();
290
+ };
291
+ this.on("chunk", pull);
292
+ pull();
293
+ return p;
294
+ }
295
+ pull(eof, includeEof) {
296
+ if (eof === 0) {
297
+ return Promise.resolve(new Uint8Array(0));
298
+ }
299
+ // If we already have the required data in buffer
300
+ // we can resolve the request immediately
301
+ if (typeof eof === "number" && this._queue.length >= eof) {
302
+ const data = this._queue.read(eof);
303
+ // Allow the upstream writer to continue once the consumer makes progress.
304
+ // Waiting for a full drain can deadlock when the producer must call `end()`
305
+ // but is blocked behind a deferred write callback.
306
+ this._maybeReleaseWriteCallback();
307
+ return Promise.resolve(data);
308
+ }
309
+ // Otherwise we wait for more data and fulfill directly from the internal queue.
310
+ // This avoids constructing intermediate streams for small pulls (hot path).
311
+ const chunks = [];
312
+ let pullStreamRejectHandler;
313
+ // Pattern scanning state (only used when eof is a pattern)
314
+ const eofIsNumber = typeof eof === "number";
315
+ const pattern = eofIsNumber ? undefined : eof;
316
+ const patternLen = pattern ? pattern.length : 0;
317
+ const scanner = eofIsNumber ? undefined : new pattern_scanner_1.PatternScanner(pattern);
318
+ return new Promise((resolve, reject) => {
319
+ let settled = false;
320
+ pullStreamRejectHandler = (e) => {
321
+ this.__emittedError = e;
322
+ cleanup();
323
+ reject(e);
324
+ };
325
+ if (this.finished) {
326
+ reject(new Error("FILE_ENDED"));
327
+ return;
328
+ }
329
+ const cleanup = () => {
330
+ this.removeListener("chunk", onChunk);
331
+ this.removeListener("finish", onFinish);
332
+ this.removeListener("error", pullStreamRejectHandler);
333
+ };
334
+ const finalize = () => {
335
+ cleanup();
336
+ settled = true;
337
+ if (chunks.length === 0) {
338
+ resolve(new Uint8Array(0));
339
+ return;
340
+ }
341
+ resolve(chunks.length === 1 ? chunks[0] : (0, _stream_1.concatUint8Arrays)(chunks));
342
+ };
343
+ const onFinish = () => {
344
+ if (settled) {
345
+ return;
346
+ }
347
+ // Try one last time to drain anything already buffered.
348
+ onChunk();
349
+ if (!settled) {
350
+ cleanup();
351
+ reject(new Error("FILE_ENDED"));
352
+ }
353
+ };
354
+ const onChunk = () => {
355
+ if (typeof eof === "number") {
356
+ const available = this._queue.length;
357
+ if (available <= 0) {
358
+ return;
359
+ }
360
+ const toRead = Math.min(eof, available);
361
+ if (toRead > 0) {
362
+ chunks.push(this._queue.read(toRead));
363
+ eof -= toRead;
364
+ }
365
+ // Allow upstream to continue as soon as we consume bytes.
366
+ // This avoids deadlocks when the last upstream chunk is waiting on its
367
+ // callback and the parser needs an EOF signal after draining buffered data.
368
+ this._maybeReleaseWriteCallback();
369
+ if (eof === 0) {
370
+ finalize();
371
+ }
372
+ return;
373
+ }
374
+ // eof is a pattern
375
+ while (this._queue.length > 0) {
376
+ const bufLen = this._queue.length;
377
+ const match = scanner.find(this._queue);
378
+ if (match !== -1) {
379
+ // store signature match byte offset to allow us to reference
380
+ // this for zip64 offset
381
+ this.match = match;
382
+ const toRead = includeEof ? match + patternLen : match;
383
+ if (toRead > 0) {
384
+ chunks.push(this._queue.read(toRead));
385
+ scanner.onConsume(toRead);
386
+ }
387
+ if (this._queue.length === 0 || (patternLen && this._queue.length <= patternLen)) {
388
+ this._maybeReleaseWriteCallback();
389
+ }
390
+ finalize();
391
+ return;
392
+ }
393
+ // No match yet. Avoid rescanning bytes that can't start a match.
394
+ scanner.onNoMatch(bufLen);
395
+ const safeLen = bufLen - patternLen;
396
+ if (safeLen <= 0) {
397
+ // Keep enough bytes to detect a split signature.
398
+ this._maybeReleaseWriteCallback();
399
+ return;
400
+ }
401
+ chunks.push(this._queue.read(safeLen));
402
+ scanner.onConsume(safeLen);
403
+ if (this._queue.length === 0 || (patternLen && this._queue.length <= patternLen)) {
404
+ this._maybeReleaseWriteCallback();
405
+ return;
406
+ }
407
+ }
408
+ };
409
+ this.once("error", pullStreamRejectHandler);
410
+ this.on("chunk", onChunk);
411
+ this.once("finish", onFinish);
412
+ // Attempt immediate fulfillment from any already-buffered data.
413
+ onChunk();
414
+ });
415
+ }
416
+ pullUntil(pattern, includeEof) {
417
+ return this.pull(pattern, includeEof);
418
+ }
419
+ }
420
+ exports.PullStream = PullStream;
421
+ async function readCrxHeader(pull) {
422
+ const data = await pull(12);
423
+ const header = data.length >= 12 ? parseCrxHeaderFast(data) : (0, parse_buffer_1.parseTyped)(data, exports.CRX_HEADER_FORMAT);
424
+ const pubKeyLength = header.pubKeyLength || 0;
425
+ const signatureLength = header.signatureLength || 0;
426
+ const keyAndSig = await pull(pubKeyLength + signatureLength);
427
+ header.publicKey = keyAndSig.subarray(0, pubKeyLength);
428
+ header.signature = keyAndSig.subarray(pubKeyLength);
429
+ return header;
430
+ }
431
+ async function readLocalFileHeader(pull) {
432
+ const data = await pull(26);
433
+ const vars = data.length >= 26
434
+ ? parseLocalFileHeaderVarsFast(data)
435
+ : (0, parse_buffer_1.parseTyped)(data, exports.LOCAL_FILE_HEADER_FORMAT);
436
+ const fileNameBuffer = await pull(vars.fileNameLength || 0);
437
+ const extraFieldData = await pull(vars.extraFieldLength || 0);
438
+ return { vars, fileNameBuffer, extraFieldData };
439
+ }
440
+ async function readDataDescriptor(pull) {
441
+ const data = await pull(16);
442
+ return data.length >= 16
443
+ ? parseDataDescriptorVarsFast(data)
444
+ : (0, parse_buffer_1.parseTyped)(data, exports.DATA_DESCRIPTOR_FORMAT);
445
+ }
446
+ async function consumeCentralDirectoryFileHeader(pull) {
447
+ const data = await pull(42);
448
+ const vars = (0, parse_buffer_1.parseTyped)(data, exports.CENTRAL_DIRECTORY_FILE_HEADER_FORMAT);
449
+ await pull(vars.fileNameLength || 0);
450
+ await pull(vars.extraFieldLength || 0);
451
+ await pull(vars.fileCommentLength || 0);
452
+ }
453
+ async function consumeEndOfCentralDirectoryRecord(pull) {
454
+ const data = await pull(18);
455
+ const vars = (0, parse_buffer_1.parseTyped)(data, exports.END_OF_CENTRAL_DIRECTORY_FORMAT);
456
+ await pull(vars.commentLength || 0);
457
+ }
458
+ // =============================================================================
459
+ // Validated Data Descriptor Scan (shared by Node + Browser)
460
+ // =============================================================================
461
+ function isValidZipRecordSignature(sig) {
462
+ switch (sig) {
463
+ case zip_records_1.LOCAL_FILE_HEADER_SIG:
464
+ case zip_records_1.CENTRAL_DIR_HEADER_SIG:
465
+ case zip_records_1.END_OF_CENTRAL_DIR_SIG:
466
+ case zip_records_1.ZIP64_END_OF_CENTRAL_DIR_SIG:
467
+ case zip_records_1.ZIP64_END_OF_CENTRAL_DIR_LOCATOR_SIG:
468
+ return true;
469
+ default:
470
+ return false;
471
+ }
472
+ }
473
+ function readUint32LEFromBytes(view, offset) {
474
+ return ((view[offset] |
475
+ 0 |
476
+ ((view[offset + 1] | 0) << 8) |
477
+ ((view[offset + 2] | 0) << 16) |
478
+ ((view[offset + 3] | 0) << 24)) >>>
479
+ 0);
480
+ }
481
+ function readUint16LEFromBytes(view, offset) {
482
+ return (view[offset] | ((view[offset + 1] | 0) << 8)) >>> 0;
483
+ }
484
+ function parseCrxHeaderFast(data) {
485
+ return {
486
+ version: readUint32LEFromBytes(data, 0),
487
+ pubKeyLength: readUint32LEFromBytes(data, 4),
488
+ signatureLength: readUint32LEFromBytes(data, 8)
489
+ };
490
+ }
491
+ function parseLocalFileHeaderVarsFast(data) {
492
+ return {
493
+ versionsNeededToExtract: readUint16LEFromBytes(data, 0),
494
+ flags: readUint16LEFromBytes(data, 2),
495
+ compressionMethod: readUint16LEFromBytes(data, 4),
496
+ lastModifiedTime: readUint16LEFromBytes(data, 6),
497
+ lastModifiedDate: readUint16LEFromBytes(data, 8),
498
+ crc32: readUint32LEFromBytes(data, 10),
499
+ compressedSize: readUint32LEFromBytes(data, 14),
500
+ uncompressedSize: readUint32LEFromBytes(data, 18),
501
+ fileNameLength: readUint16LEFromBytes(data, 22),
502
+ extraFieldLength: readUint16LEFromBytes(data, 24)
503
+ };
504
+ }
505
+ function parseDataDescriptorVarsFast(data) {
506
+ return {
507
+ dataDescriptorSignature: readUint32LEFromBytes(data, 0),
508
+ crc32: readUint32LEFromBytes(data, 4),
509
+ compressedSize: readUint32LEFromBytes(data, 8),
510
+ uncompressedSize: readUint32LEFromBytes(data, 12)
511
+ };
512
+ }
513
+ function indexOf4BytesPattern(buffer, pattern, startIndex) {
514
+ if (pattern.length !== 4) {
515
+ return (0, bytes_1.indexOfUint8ArrayPattern)(buffer, pattern, startIndex);
516
+ }
517
+ const b0 = pattern[0];
518
+ const b1 = pattern[1];
519
+ const b2 = pattern[2];
520
+ const b3 = pattern[3];
521
+ const bufLen = buffer.length;
522
+ let start = startIndex | 0;
523
+ if (start < 0) {
524
+ start = 0;
525
+ }
526
+ if (start > bufLen - 4) {
527
+ return -1;
528
+ }
529
+ const last = bufLen - 4;
530
+ let i = buffer.indexOf(b0, start);
531
+ while (i !== -1 && i <= last) {
532
+ if (buffer[i + 1] === b1 && buffer[i + 2] === b2 && buffer[i + 3] === b3) {
533
+ return i;
534
+ }
535
+ i = buffer.indexOf(b0, i + 1);
536
+ }
537
+ return -1;
538
+ }
539
+ function initScanResult(out) {
540
+ if (out) {
541
+ return out;
542
+ }
543
+ return { foundIndex: -1, nextSearchFrom: 0 };
544
+ }
545
+ /**
546
+ * Scan for a validated DATA_DESCRIPTOR record boundary.
547
+ *
548
+ * Scanning for the 4-byte signature alone is unsafe because it can appear inside
549
+ * compressed data. We validate a candidate by requiring:
550
+ * - the next 4 bytes after the 16-byte descriptor form a known ZIP record signature, and
551
+ * - the descriptor's compressedSize matches the number of compressed bytes emitted so far.
552
+ */
553
+ function scanValidatedDataDescriptor(view, dataDescriptorSignature, bytesEmitted, startIndex = 0, out) {
554
+ const result = initScanResult(out);
555
+ const viewLen = view.length;
556
+ let searchFrom = startIndex | 0;
557
+ if (searchFrom < 0) {
558
+ searchFrom = 0;
559
+ }
560
+ if (searchFrom > viewLen) {
561
+ searchFrom = viewLen;
562
+ }
563
+ // To avoid missing a signature split across chunk boundaries, we may need
564
+ // to re-check the last (sigLen - 1) bytes on the next scan.
565
+ const sigLen = dataDescriptorSignature.length | 0;
566
+ const overlap = sigLen > 0 ? sigLen - 1 : 0;
567
+ const viewLimit = Math.max(0, viewLen - overlap);
568
+ while (searchFrom < viewLen) {
569
+ const match = indexOf4BytesPattern(view, dataDescriptorSignature, searchFrom);
570
+ if (match === -1) {
571
+ result.foundIndex = -1;
572
+ result.nextSearchFrom = Math.max(searchFrom, viewLimit);
573
+ return result;
574
+ }
575
+ const idx = match;
576
+ // Need 16 bytes for descriptor + 4 bytes for next record signature.
577
+ const nextSigOffset = idx + 16;
578
+ if (nextSigOffset + 4 <= viewLen) {
579
+ const nextSig = readUint32LEFromBytes(view, nextSigOffset);
580
+ const descriptorCompressedSize = readUint32LEFromBytes(view, idx + 8);
581
+ const expectedCompressedSize = (bytesEmitted + idx) >>> 0;
582
+ if (isValidZipRecordSignature(nextSig) &&
583
+ descriptorCompressedSize === expectedCompressedSize) {
584
+ result.foundIndex = idx;
585
+ result.nextSearchFrom = idx;
586
+ return result;
587
+ }
588
+ searchFrom = idx + 1;
589
+ continue;
590
+ }
591
+ // Not enough bytes to validate yet. Re-check this candidate once more bytes arrive.
592
+ result.foundIndex = -1;
593
+ result.nextSearchFrom = idx;
594
+ return result;
595
+ }
596
+ result.foundIndex = -1;
597
+ result.nextSearchFrom = Math.max(searchFrom, viewLimit);
598
+ return result;
599
+ }
600
+ /**
601
+ * Stream compressed file data until we reach a validated DATA_DESCRIPTOR boundary.
602
+ *
603
+ * This encapsulates the shared logic used by both Node and browser parsers.
604
+ */
605
+ function streamUntilValidatedDataDescriptor(options) {
606
+ const { source, dataDescriptorSignature } = options;
607
+ const keepTailBytes = options.keepTailBytes ?? 20;
608
+ const errorMessage = options.errorMessage ?? "FILE_ENDED: Data descriptor not found";
609
+ const output = new _stream_1.PassThrough({ highWaterMark: DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK });
610
+ let done = false;
611
+ let waitingDrain = false;
612
+ // Total number of compressed bytes already emitted for this entry.
613
+ let bytesEmitted = 0;
614
+ const scanner = new pattern_scanner_1.PatternScanner(dataDescriptorSignature);
615
+ let unsubscribe;
616
+ const cleanup = () => {
617
+ if (unsubscribe) {
618
+ unsubscribe();
619
+ unsubscribe = undefined;
620
+ }
621
+ };
622
+ const pull = () => {
623
+ if (done) {
624
+ return;
625
+ }
626
+ if (waitingDrain) {
627
+ return;
628
+ }
629
+ let available = source.getLength();
630
+ if (available === 0) {
631
+ // If we have no buffered data, ensure upstream isn't stuck behind a
632
+ // deferred write callback.
633
+ source.maybeReleaseWriteCallback?.();
634
+ }
635
+ while (available > 0) {
636
+ // Try to find and validate a descriptor candidate.
637
+ while (true) {
638
+ const idx = scanner.find(source);
639
+ if (idx === -1) {
640
+ break;
641
+ }
642
+ // Need 16 bytes for descriptor + 4 bytes for next record signature.
643
+ const nextSigOffset = idx + 16;
644
+ if (nextSigOffset + 4 <= available) {
645
+ const nextSig = source.peekUint32LE(nextSigOffset);
646
+ const descriptorCompressedSize = source.peekUint32LE(idx + 8);
647
+ const expectedCompressedSize = (bytesEmitted + idx) >>> 0;
648
+ if (nextSig !== null &&
649
+ descriptorCompressedSize !== null &&
650
+ isValidZipRecordSignature(nextSig) &&
651
+ descriptorCompressedSize === expectedCompressedSize) {
652
+ if (idx > 0) {
653
+ if (source.peekChunks && source.discard) {
654
+ const parts = source.peekChunks(idx);
655
+ let written = 0;
656
+ for (const part of parts) {
657
+ const ok = output.write(part);
658
+ written += part.length;
659
+ if (!ok) {
660
+ waitingDrain = true;
661
+ output.once("drain", () => {
662
+ waitingDrain = false;
663
+ pull();
664
+ });
665
+ break;
666
+ }
667
+ }
668
+ if (written > 0) {
669
+ source.discard(written);
670
+ bytesEmitted += written;
671
+ available -= written;
672
+ scanner.onConsume(written);
673
+ }
674
+ if (waitingDrain) {
675
+ return;
676
+ }
677
+ }
678
+ else {
679
+ const ok = output.write(source.read(idx));
680
+ bytesEmitted += idx;
681
+ available -= idx;
682
+ scanner.onConsume(idx);
683
+ if (!ok) {
684
+ waitingDrain = true;
685
+ output.once("drain", () => {
686
+ waitingDrain = false;
687
+ pull();
688
+ });
689
+ return;
690
+ }
691
+ }
692
+ }
693
+ done = true;
694
+ source.maybeReleaseWriteCallback?.();
695
+ cleanup();
696
+ output.end();
697
+ return;
698
+ }
699
+ scanner.searchFrom = idx + 1;
700
+ continue;
701
+ }
702
+ // Not enough bytes to validate yet. Re-check this candidate once more bytes arrive.
703
+ scanner.searchFrom = idx;
704
+ break;
705
+ }
706
+ // No validated match yet.
707
+ scanner.onNoMatch(available);
708
+ // Flush most of the buffered data but keep a tail so a potential signature
709
+ // split across chunks can still be detected/validated.
710
+ const flushLen = Math.max(0, available - keepTailBytes);
711
+ if (flushLen > 0) {
712
+ if (source.peekChunks && source.discard) {
713
+ const parts = source.peekChunks(flushLen);
714
+ let written = 0;
715
+ for (const part of parts) {
716
+ const ok = output.write(part);
717
+ written += part.length;
718
+ if (!ok) {
719
+ waitingDrain = true;
720
+ output.once("drain", () => {
721
+ waitingDrain = false;
722
+ pull();
723
+ });
724
+ break;
725
+ }
726
+ }
727
+ if (written > 0) {
728
+ source.discard(written);
729
+ bytesEmitted += written;
730
+ available -= written;
731
+ scanner.onConsume(written);
732
+ }
733
+ if (available <= keepTailBytes) {
734
+ source.maybeReleaseWriteCallback?.();
735
+ }
736
+ return;
737
+ }
738
+ const ok = output.write(source.read(flushLen));
739
+ bytesEmitted += flushLen;
740
+ available -= flushLen;
741
+ scanner.onConsume(flushLen);
742
+ if (available <= keepTailBytes) {
743
+ source.maybeReleaseWriteCallback?.();
744
+ }
745
+ if (!ok) {
746
+ waitingDrain = true;
747
+ output.once("drain", () => {
748
+ waitingDrain = false;
749
+ pull();
750
+ });
751
+ }
752
+ return;
753
+ }
754
+ // Need more data.
755
+ // IMPORTANT: If we keep a tail and cannot flush anything yet, we must still
756
+ // release upstream write callbacks; otherwise the producer can deadlock waiting
757
+ // for backpressure while we wait for more bytes to arrive.
758
+ source.maybeReleaseWriteCallback?.();
759
+ break;
760
+ }
761
+ if (!done && source.isFinished()) {
762
+ done = true;
763
+ cleanup();
764
+ output.destroy(new Error(errorMessage));
765
+ }
766
+ };
767
+ unsubscribe = source.onDataAvailable(pull);
768
+ queueMicrotask(pull);
769
+ return output;
770
+ }
771
+ /**
772
+ * Default threshold for small file optimization (5MB).
773
+ */
774
+ exports.DEFAULT_PARSE_THRESHOLD_BYTES = 5 * 1024 * 1024;
775
+ const endDirectorySignature = (0, binary_1.writeUint32LE)(zip_records_1.END_OF_CENTRAL_DIR_SIG);
776
+ async function runParseLoop(opts, io, emitter, inflateFactory, state, inflateRawSync) {
777
+ const thresholdBytes = opts.thresholdBytes ?? exports.DEFAULT_PARSE_THRESHOLD_BYTES;
778
+ while (true) {
779
+ const sigBytes = await io.pull(4);
780
+ if (sigBytes.length === 0) {
781
+ emitter.emitClose();
782
+ return;
783
+ }
784
+ const signature = (0, binary_1.readUint32LE)(sigBytes, 0);
785
+ if (signature === 0x34327243) {
786
+ state.crxHeader = await readCrxHeader(async (length) => io.pull(length));
787
+ emitter.emitCrxHeader(state.crxHeader);
788
+ continue;
789
+ }
790
+ if (signature === zip_records_1.LOCAL_FILE_HEADER_SIG) {
791
+ await readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync);
792
+ continue;
793
+ }
794
+ if (signature === zip_records_1.CENTRAL_DIR_HEADER_SIG) {
795
+ state.reachedCD = true;
796
+ await consumeCentralDirectoryFileHeader(async (length) => io.pull(length));
797
+ continue;
798
+ }
799
+ if (signature === zip_records_1.END_OF_CENTRAL_DIR_SIG) {
800
+ await consumeEndOfCentralDirectoryRecord(async (length) => io.pull(length));
801
+ io.setDone();
802
+ emitter.emitClose();
803
+ return;
804
+ }
805
+ if (state.reachedCD) {
806
+ // We are in central directory trailing data; resync by scanning for EOCD signature.
807
+ // consumeEndOfCentralDirectoryRecord expects the EOCD signature to be consumed, so includeEof=true.
808
+ const includeEof = true;
809
+ await io.pullUntil(endDirectorySignature, includeEof);
810
+ await consumeEndOfCentralDirectoryRecord(async (length) => io.pull(length));
811
+ io.setDone();
812
+ emitter.emitClose();
813
+ return;
814
+ }
815
+ emitter.emitError(new Error("invalid signature: 0x" + signature.toString(16)));
816
+ emitter.emitClose();
817
+ return;
818
+ }
819
+ }
820
+ async function pumpKnownCompressedSizeToEntry(io, inflater, entry, compressedSize) {
821
+ // Keep chunks reasonably large to reduce per-await overhead.
822
+ const CHUNK_SIZE = 256 * 1024;
823
+ let remaining = compressedSize;
824
+ let err = null;
825
+ const onError = (e) => {
826
+ err = e;
827
+ };
828
+ inflater.once("error", onError);
829
+ entry.once("error", onError);
830
+ let skipping = false;
831
+ const waitForDrainOrSkipSignal = async () => {
832
+ await new Promise(resolve => {
833
+ const anyInflater = inflater;
834
+ const cleanup = () => {
835
+ try {
836
+ anyInflater?.removeListener?.("drain", onDrain);
837
+ }
838
+ catch {
839
+ // ignore
840
+ }
841
+ try {
842
+ entry.removeListener("__autodrain", onAutodrain);
843
+ }
844
+ catch {
845
+ // ignore
846
+ }
847
+ try {
848
+ entry.removeListener("close", onClose);
849
+ }
850
+ catch {
851
+ // ignore
852
+ }
853
+ };
854
+ const onDrain = () => {
855
+ cleanup();
856
+ resolve();
857
+ };
858
+ const onAutodrain = () => {
859
+ cleanup();
860
+ resolve();
861
+ };
862
+ const onClose = () => {
863
+ cleanup();
864
+ resolve();
865
+ };
866
+ if (typeof anyInflater?.once === "function") {
867
+ anyInflater.once("drain", onDrain);
868
+ }
869
+ entry.once("__autodrain", onAutodrain);
870
+ entry.once("close", onClose);
871
+ });
872
+ };
873
+ const switchToSkip = async () => {
874
+ if (skipping) {
875
+ return;
876
+ }
877
+ skipping = true;
878
+ // Stop forwarding decompressed output. We only need to advance the ZIP cursor.
879
+ try {
880
+ const anyInflater = inflater;
881
+ if (typeof anyInflater.unpipe === "function") {
882
+ anyInflater.unpipe(entry);
883
+ }
884
+ }
885
+ catch {
886
+ // ignore
887
+ }
888
+ // End the entry as early as possible so downstream drain resolves quickly.
889
+ try {
890
+ if (!entry.writableEnded && !entry.destroyed) {
891
+ entry.end();
892
+ }
893
+ }
894
+ catch {
895
+ // ignore
896
+ }
897
+ // Stop the inflater to avoid work/backpressure.
898
+ try {
899
+ const anyInflater = inflater;
900
+ if (typeof anyInflater.destroy === "function") {
901
+ anyInflater.destroy();
902
+ }
903
+ }
904
+ catch {
905
+ // ignore
906
+ }
907
+ };
908
+ try {
909
+ // Pipe decompressed output into the entry stream.
910
+ inflater.pipe(entry);
911
+ while (remaining > 0) {
912
+ if (err) {
913
+ throw err;
914
+ }
915
+ // If downstream decides to autodrain mid-entry (common when a consumer bails out
916
+ // early due to a limit), stop inflating and just skip the remaining compressed bytes.
917
+ if (!skipping && (entry.__autodraining || entry.destroyed)) {
918
+ await switchToSkip();
919
+ }
920
+ const toPull = Math.min(CHUNK_SIZE, remaining);
921
+ const chunk = await io.pull(toPull);
922
+ if (chunk.length !== toPull) {
923
+ throw new Error("FILE_ENDED");
924
+ }
925
+ remaining -= chunk.length;
926
+ if (!skipping) {
927
+ const ok = inflater.write(chunk);
928
+ if (!ok) {
929
+ await waitForDrainOrSkipSignal();
930
+ }
931
+ }
932
+ }
933
+ if (!skipping) {
934
+ inflater.end();
935
+ }
936
+ // Wait for all writes to complete (not for consumption).
937
+ await (0, _stream_1.finished)(entry, { readable: false });
938
+ }
939
+ finally {
940
+ inflater.removeListener("error", onError);
941
+ entry.removeListener("error", onError);
942
+ }
943
+ }
944
+ async function readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync) {
945
+ const { vars: headerVars, fileNameBuffer, extraFieldData } = await readLocalFileHeader(async (l) => io.pull(l));
946
+ const vars = headerVars;
947
+ if (state.crxHeader) {
948
+ vars.crxHeader = state.crxHeader;
949
+ }
950
+ const fileName = decodeZipEntryPath(fileNameBuffer);
951
+ const entry = new _stream_1.PassThrough({
952
+ highWaterMark: DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK
953
+ });
954
+ let autodraining = false;
955
+ entry.autodrain = function () {
956
+ autodraining = true;
957
+ entry.__autodraining = true;
958
+ // Signal producers that downstream has switched to drain mode.
959
+ // This helps avoid deadlocks if the producer is waiting on backpressure.
960
+ entry.emit("__autodrain");
961
+ return autodrain(entry);
962
+ };
963
+ entry.buffer = function () {
964
+ return bufferStream(entry);
965
+ };
966
+ entry.path = fileName;
967
+ entry.props = buildZipEntryProps(fileName, fileNameBuffer, vars.flags);
968
+ entry.type = getZipEntryType(fileName, vars.uncompressedSize || 0);
969
+ if (opts.verbose) {
970
+ if (entry.type === "Directory") {
971
+ console.log(" creating:", fileName);
972
+ }
973
+ else if (entry.type === "File") {
974
+ if (vars.compressionMethod === 0) {
975
+ console.log(" extracting:", fileName);
976
+ }
977
+ else {
978
+ console.log(" inflating:", fileName);
979
+ }
980
+ }
981
+ }
982
+ const extra = (0, exports.parseExtraField)(extraFieldData, vars);
983
+ vars.lastModifiedDateTime = resolveZipEntryLastModifiedDateTime(vars, extra);
984
+ entry.vars = vars;
985
+ entry.extraFields = extra;
986
+ entry.__autodraining = autodraining;
987
+ const fileSizeKnown = isFileSizeKnown(vars.flags, vars.compressedSize);
988
+ if (fileSizeKnown) {
989
+ entry.size = vars.uncompressedSize || 0;
990
+ }
991
+ if (opts.forceStream) {
992
+ emitter.pushEntry(entry);
993
+ }
994
+ else {
995
+ emitter.emitEntry(entry);
996
+ emitter.pushEntryIfPiped(entry);
997
+ }
998
+ if (opts.verbose) {
999
+ console.log({
1000
+ filename: fileName,
1001
+ vars: vars,
1002
+ extraFields: entry.extraFields
1003
+ });
1004
+ }
1005
+ // Small file optimization: use sync decompression if:
1006
+ // 1. Entry sizes are trusted (no data descriptor)
1007
+ // 2. File size is known and below threshold
1008
+ // 3. inflateRawSync is provided
1009
+ // 4. File needs decompression (compressionMethod != 0)
1010
+ // 5. Not autodraining
1011
+ //
1012
+ // We require BOTH compressedSize and uncompressedSize <= thresholdBytes.
1013
+ // This prevents materializing large highly-compressible files in memory,
1014
+ // which can cause massive peak RSS and negate streaming backpressure.
1015
+ const sizesTrusted = !hasDataDescriptorFlag(vars.flags);
1016
+ const compressedSize = vars.compressedSize || 0;
1017
+ const uncompressedSize = vars.uncompressedSize || 0;
1018
+ const useSmallFileOptimization = sizesTrusted &&
1019
+ fileSizeKnown &&
1020
+ inflateRawSync &&
1021
+ vars.compressionMethod !== 0 &&
1022
+ !autodraining &&
1023
+ compressedSize <= thresholdBytes &&
1024
+ uncompressedSize <= thresholdBytes;
1025
+ if (useSmallFileOptimization) {
1026
+ // Read compressed data directly and decompress synchronously
1027
+ const compressedData = await io.pull(compressedSize);
1028
+ const decompressedData = inflateRawSync(compressedData);
1029
+ entry.end(decompressedData);
1030
+ // Wait for entry stream write to complete (not for read/consume)
1031
+ await (0, _stream_1.finished)(entry, { readable: false });
1032
+ return;
1033
+ }
1034
+ const inflater = vars.compressionMethod && !autodraining
1035
+ ? inflateFactory()
1036
+ : new _stream_1.PassThrough({ highWaterMark: DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK });
1037
+ if (fileSizeKnown) {
1038
+ await pumpKnownCompressedSizeToEntry(io, inflater, entry, vars.compressedSize || 0);
1039
+ return;
1040
+ }
1041
+ await (0, _stream_1.pipeline)(io.streamUntilDataDescriptor(), inflater, entry);
1042
+ const dd = await readDataDescriptor(async (l) => io.pull(l));
1043
+ entry.size = dd.uncompressedSize || 0;
1044
+ }