@cj-tech-master/excelts 4.2.0 → 4.2.1-canary.20260111102127.f808a37

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (241) hide show
  1. package/THIRD_PARTY_NOTICES.md +0 -31
  2. package/dist/browser/index.browser.d.ts +1 -0
  3. package/dist/browser/index.browser.js +12 -0
  4. package/dist/{types/modules/archive → browser/modules/archive/compression}/compress.base.d.ts +1 -0
  5. package/dist/browser/modules/archive/{compress.base.js → compression/compress.base.js} +2 -1
  6. package/dist/{types/modules/archive → browser/modules/archive/compression}/compress.browser.d.ts +10 -8
  7. package/dist/{esm/modules/archive → browser/modules/archive/compression}/compress.browser.js +18 -19
  8. package/dist/browser/modules/archive/{compress.d.ts → compression/compress.d.ts} +2 -2
  9. package/dist/browser/modules/archive/{compress.js → compression/compress.js} +1 -1
  10. package/dist/browser/modules/archive/{crc32.browser.d.ts → compression/crc32.browser.d.ts} +1 -1
  11. package/dist/browser/modules/archive/{crc32.d.ts → compression/crc32.d.ts} +1 -1
  12. package/dist/browser/modules/archive/{crc32.js → compression/crc32.js} +1 -1
  13. package/dist/browser/modules/archive/{deflate-fallback.js → compression/deflate-fallback.js} +1 -1
  14. package/dist/browser/modules/archive/{streaming-compress.browser.d.ts → compression/streaming-compress.browser.d.ts} +2 -2
  15. package/dist/browser/modules/archive/{streaming-compress.browser.js → compression/streaming-compress.browser.js} +3 -3
  16. package/dist/browser/modules/archive/{streaming-compress.d.ts → compression/streaming-compress.d.ts} +2 -2
  17. package/dist/browser/modules/archive/{streaming-compress.js → compression/streaming-compress.js} +2 -2
  18. package/dist/browser/modules/archive/defaults.d.ts +1 -0
  19. package/dist/browser/modules/archive/defaults.js +6 -3
  20. package/dist/browser/modules/archive/index.base.d.ts +4 -4
  21. package/dist/browser/modules/archive/index.base.js +3 -6
  22. package/dist/browser/modules/archive/index.browser.d.ts +3 -4
  23. package/dist/browser/modules/archive/index.browser.js +3 -7
  24. package/dist/browser/modules/archive/index.d.ts +3 -4
  25. package/dist/browser/modules/archive/index.js +3 -5
  26. package/dist/browser/modules/archive/internal/byte-queue.d.ts +33 -0
  27. package/dist/browser/modules/archive/internal/byte-queue.js +407 -0
  28. package/dist/browser/modules/archive/io/archive-sink.d.ts +9 -0
  29. package/dist/browser/modules/archive/io/archive-sink.js +77 -0
  30. package/dist/browser/modules/archive/io/archive-source.d.ts +8 -0
  31. package/dist/browser/modules/archive/io/archive-source.js +107 -0
  32. package/dist/browser/modules/archive/{extract.d.ts → unzip/extract.d.ts} +2 -2
  33. package/dist/browser/modules/archive/unzip/index.d.ts +40 -0
  34. package/dist/browser/modules/archive/unzip/index.js +164 -0
  35. package/dist/browser/modules/archive/{parse.base.d.ts → unzip/stream.base.d.ts} +58 -3
  36. package/dist/browser/modules/archive/unzip/stream.base.js +1022 -0
  37. package/dist/browser/modules/archive/{parse.browser.d.ts → unzip/stream.browser.d.ts} +1 -1
  38. package/dist/browser/modules/archive/{parse.browser.js → unzip/stream.browser.js} +376 -110
  39. package/dist/browser/modules/archive/{parse.d.ts → unzip/stream.d.ts} +2 -2
  40. package/dist/{esm/modules/archive/parse.js → browser/modules/archive/unzip/stream.js} +7 -6
  41. package/dist/{types/modules/archive → browser/modules/archive/unzip}/zip-parser.d.ts +1 -1
  42. package/dist/{esm/modules/archive → browser/modules/archive/unzip}/zip-parser.js +38 -24
  43. package/dist/browser/modules/archive/utils/async-queue.d.ts +7 -0
  44. package/dist/browser/modules/archive/utils/async-queue.js +103 -0
  45. package/dist/browser/modules/archive/utils/bytes.js +16 -16
  46. package/dist/browser/modules/archive/utils/compressibility.d.ts +10 -0
  47. package/dist/browser/modules/archive/utils/compressibility.js +57 -0
  48. package/dist/browser/modules/archive/utils/parse-buffer.js +21 -23
  49. package/dist/browser/modules/archive/utils/pattern-scanner.d.ts +21 -0
  50. package/dist/browser/modules/archive/utils/pattern-scanner.js +27 -0
  51. package/dist/browser/modules/archive/utils/timestamps.js +62 -1
  52. package/dist/browser/modules/archive/utils/zip-extra-fields.d.ts +1 -1
  53. package/dist/browser/modules/archive/utils/zip-extra-fields.js +26 -14
  54. package/dist/browser/modules/archive/zip/index.d.ts +42 -0
  55. package/dist/browser/modules/archive/zip/index.js +157 -0
  56. package/dist/browser/modules/archive/{streaming-zip.d.ts → zip/stream.d.ts} +28 -5
  57. package/dist/browser/modules/archive/{streaming-zip.js → zip/stream.js} +192 -48
  58. package/dist/browser/modules/archive/zip/zip-bytes.d.ts +73 -0
  59. package/dist/browser/modules/archive/zip/zip-bytes.js +239 -0
  60. package/dist/{esm/modules/archive → browser/modules/archive/zip}/zip-entry-metadata.js +3 -3
  61. package/dist/browser/modules/archive/{zip-records.d.ts → zip-spec/zip-records.d.ts} +20 -0
  62. package/dist/browser/modules/archive/zip-spec/zip-records.js +126 -0
  63. package/dist/browser/modules/excel/form-control.d.ts +2 -0
  64. package/dist/browser/modules/excel/form-control.js +54 -16
  65. package/dist/browser/modules/excel/stream/workbook-reader.browser.js +1 -1
  66. package/dist/browser/modules/excel/stream/workbook-writer.browser.d.ts +1 -1
  67. package/dist/browser/modules/excel/stream/workbook-writer.browser.js +1 -1
  68. package/dist/browser/modules/excel/xlsx/xform/sheet/worksheet-xform.js +17 -3
  69. package/dist/browser/modules/excel/xlsx/xlsx.browser.js +3 -6
  70. package/dist/browser/modules/excel/xlsx/xlsx.js +1 -1
  71. package/dist/browser/modules/stream/streams.browser.d.ts +28 -28
  72. package/dist/browser/modules/stream/streams.browser.js +850 -697
  73. package/dist/browser/modules/stream/streams.js +140 -58
  74. package/dist/cjs/modules/archive/{compress.base.js → compression/compress.base.js} +2 -1
  75. package/dist/cjs/modules/archive/{compress.browser.js → compression/compress.browser.js} +18 -19
  76. package/dist/cjs/modules/archive/{compress.js → compression/compress.js} +1 -1
  77. package/dist/cjs/modules/archive/{crc32.js → compression/crc32.js} +1 -1
  78. package/dist/cjs/modules/archive/{deflate-fallback.js → compression/deflate-fallback.js} +1 -1
  79. package/dist/cjs/modules/archive/{streaming-compress.browser.js → compression/streaming-compress.browser.js} +3 -3
  80. package/dist/cjs/modules/archive/{streaming-compress.js → compression/streaming-compress.js} +2 -2
  81. package/dist/cjs/modules/archive/defaults.js +7 -4
  82. package/dist/cjs/modules/archive/index.base.js +9 -19
  83. package/dist/cjs/modules/archive/index.browser.js +4 -10
  84. package/dist/cjs/modules/archive/index.js +4 -8
  85. package/dist/cjs/modules/archive/internal/byte-queue.js +411 -0
  86. package/dist/cjs/modules/archive/io/archive-sink.js +82 -0
  87. package/dist/cjs/modules/archive/io/archive-source.js +114 -0
  88. package/dist/cjs/modules/archive/unzip/index.js +170 -0
  89. package/dist/cjs/modules/archive/unzip/stream.base.js +1044 -0
  90. package/dist/cjs/modules/archive/{parse.browser.js → unzip/stream.browser.js} +377 -111
  91. package/dist/cjs/modules/archive/{parse.js → unzip/stream.js} +9 -8
  92. package/dist/cjs/modules/archive/{zip-parser.js → unzip/zip-parser.js} +47 -33
  93. package/dist/cjs/modules/archive/utils/async-queue.js +106 -0
  94. package/dist/cjs/modules/archive/utils/bytes.js +16 -16
  95. package/dist/cjs/modules/archive/utils/compressibility.js +60 -0
  96. package/dist/cjs/modules/archive/utils/parse-buffer.js +21 -23
  97. package/dist/cjs/modules/archive/utils/pattern-scanner.js +31 -0
  98. package/dist/cjs/modules/archive/utils/timestamps.js +64 -3
  99. package/dist/cjs/modules/archive/utils/zip-extra-fields.js +26 -14
  100. package/dist/cjs/modules/archive/zip/index.js +162 -0
  101. package/dist/cjs/modules/archive/{streaming-zip.js → zip/stream.js} +194 -50
  102. package/dist/cjs/modules/archive/zip/zip-bytes.js +242 -0
  103. package/dist/cjs/modules/archive/{zip-entry-metadata.js → zip/zip-entry-metadata.js} +5 -5
  104. package/dist/cjs/modules/archive/zip-spec/zip-records.js +136 -0
  105. package/dist/cjs/modules/excel/form-control.js +54 -16
  106. package/dist/cjs/modules/excel/stream/workbook-reader.browser.js +2 -2
  107. package/dist/cjs/modules/excel/stream/workbook-writer.browser.js +4 -4
  108. package/dist/cjs/modules/excel/xlsx/xform/sheet/worksheet-xform.js +17 -3
  109. package/dist/cjs/modules/excel/xlsx/xlsx.browser.js +6 -9
  110. package/dist/cjs/modules/excel/xlsx/xlsx.js +2 -2
  111. package/dist/cjs/modules/stream/streams.browser.js +850 -697
  112. package/dist/cjs/modules/stream/streams.js +140 -58
  113. package/dist/esm/index.browser.js +12 -0
  114. package/dist/esm/modules/archive/{compress.base.js → compression/compress.base.js} +2 -1
  115. package/dist/{browser/modules/archive → esm/modules/archive/compression}/compress.browser.js +18 -19
  116. package/dist/esm/modules/archive/{compress.js → compression/compress.js} +1 -1
  117. package/dist/esm/modules/archive/{crc32.js → compression/crc32.js} +1 -1
  118. package/dist/esm/modules/archive/{deflate-fallback.js → compression/deflate-fallback.js} +1 -1
  119. package/dist/esm/modules/archive/{streaming-compress.browser.js → compression/streaming-compress.browser.js} +3 -3
  120. package/dist/esm/modules/archive/{streaming-compress.js → compression/streaming-compress.js} +2 -2
  121. package/dist/esm/modules/archive/defaults.js +6 -3
  122. package/dist/esm/modules/archive/index.base.js +3 -6
  123. package/dist/esm/modules/archive/index.browser.js +3 -7
  124. package/dist/esm/modules/archive/index.js +3 -5
  125. package/dist/esm/modules/archive/internal/byte-queue.js +407 -0
  126. package/dist/esm/modules/archive/io/archive-sink.js +77 -0
  127. package/dist/esm/modules/archive/io/archive-source.js +107 -0
  128. package/dist/esm/modules/archive/unzip/index.js +164 -0
  129. package/dist/esm/modules/archive/unzip/stream.base.js +1022 -0
  130. package/dist/esm/modules/archive/{parse.browser.js → unzip/stream.browser.js} +376 -110
  131. package/dist/{browser/modules/archive/parse.js → esm/modules/archive/unzip/stream.js} +7 -6
  132. package/dist/{browser/modules/archive → esm/modules/archive/unzip}/zip-parser.js +38 -24
  133. package/dist/esm/modules/archive/utils/async-queue.js +103 -0
  134. package/dist/esm/modules/archive/utils/bytes.js +16 -16
  135. package/dist/esm/modules/archive/utils/compressibility.js +57 -0
  136. package/dist/esm/modules/archive/utils/parse-buffer.js +21 -23
  137. package/dist/esm/modules/archive/utils/pattern-scanner.js +27 -0
  138. package/dist/esm/modules/archive/utils/timestamps.js +62 -1
  139. package/dist/esm/modules/archive/utils/zip-extra-fields.js +26 -14
  140. package/dist/esm/modules/archive/zip/index.js +157 -0
  141. package/dist/esm/modules/archive/{streaming-zip.js → zip/stream.js} +192 -48
  142. package/dist/esm/modules/archive/zip/zip-bytes.js +239 -0
  143. package/dist/{browser/modules/archive → esm/modules/archive/zip}/zip-entry-metadata.js +3 -3
  144. package/dist/esm/modules/archive/zip-spec/zip-records.js +126 -0
  145. package/dist/esm/modules/excel/form-control.js +54 -16
  146. package/dist/esm/modules/excel/stream/workbook-reader.browser.js +1 -1
  147. package/dist/esm/modules/excel/stream/workbook-writer.browser.js +1 -1
  148. package/dist/esm/modules/excel/xlsx/xform/sheet/worksheet-xform.js +17 -3
  149. package/dist/esm/modules/excel/xlsx/xlsx.browser.js +3 -6
  150. package/dist/esm/modules/excel/xlsx/xlsx.js +1 -1
  151. package/dist/esm/modules/stream/streams.browser.js +850 -697
  152. package/dist/esm/modules/stream/streams.js +140 -58
  153. package/dist/iife/THIRD_PARTY_NOTICES.md +81 -0
  154. package/dist/iife/excelts.iife.js +4777 -2863
  155. package/dist/iife/excelts.iife.js.map +1 -1
  156. package/dist/iife/excelts.iife.min.js +103 -31
  157. package/dist/types/index.browser.d.ts +1 -0
  158. package/dist/{browser/modules/archive → types/modules/archive/compression}/compress.base.d.ts +1 -0
  159. package/dist/{browser/modules/archive → types/modules/archive/compression}/compress.browser.d.ts +10 -8
  160. package/dist/types/modules/archive/{streaming-compress.browser.d.ts → compression/streaming-compress.browser.d.ts} +1 -1
  161. package/dist/types/modules/archive/defaults.d.ts +1 -0
  162. package/dist/types/modules/archive/index.base.d.ts +4 -4
  163. package/dist/types/modules/archive/index.browser.d.ts +3 -4
  164. package/dist/types/modules/archive/index.d.ts +3 -4
  165. package/dist/types/modules/archive/internal/byte-queue.d.ts +33 -0
  166. package/dist/types/modules/archive/io/archive-sink.d.ts +9 -0
  167. package/dist/types/modules/archive/io/archive-source.d.ts +8 -0
  168. package/dist/types/modules/archive/unzip/index.d.ts +40 -0
  169. package/dist/types/modules/archive/{parse.base.d.ts → unzip/stream.base.d.ts} +60 -5
  170. package/dist/types/modules/archive/{parse.browser.d.ts → unzip/stream.browser.d.ts} +2 -2
  171. package/dist/types/modules/archive/{parse.d.ts → unzip/stream.d.ts} +3 -3
  172. package/dist/{browser/modules/archive → types/modules/archive/unzip}/zip-parser.d.ts +1 -1
  173. package/dist/types/modules/archive/utils/async-queue.d.ts +7 -0
  174. package/dist/types/modules/archive/utils/compressibility.d.ts +10 -0
  175. package/dist/types/modules/archive/utils/pattern-scanner.d.ts +21 -0
  176. package/dist/types/modules/archive/utils/zip-extra-fields.d.ts +1 -1
  177. package/dist/types/modules/archive/zip/index.d.ts +42 -0
  178. package/dist/types/modules/archive/{streaming-zip.d.ts → zip/stream.d.ts} +29 -6
  179. package/dist/types/modules/archive/zip/zip-bytes.d.ts +73 -0
  180. package/dist/types/modules/archive/{zip-entry-metadata.d.ts → zip/zip-entry-metadata.d.ts} +1 -1
  181. package/dist/types/modules/archive/{zip-records.d.ts → zip-spec/zip-records.d.ts} +20 -0
  182. package/dist/types/modules/excel/form-control.d.ts +2 -0
  183. package/dist/types/modules/excel/stream/workbook-writer.browser.d.ts +1 -1
  184. package/dist/types/modules/stream/streams.browser.d.ts +28 -28
  185. package/package.json +10 -6
  186. package/dist/browser/modules/archive/byte-queue.d.ts +0 -18
  187. package/dist/browser/modules/archive/byte-queue.js +0 -125
  188. package/dist/browser/modules/archive/parse.base.js +0 -610
  189. package/dist/browser/modules/archive/utils/zip-extra.d.ts +0 -18
  190. package/dist/browser/modules/archive/utils/zip-extra.js +0 -68
  191. package/dist/browser/modules/archive/zip-builder.d.ts +0 -117
  192. package/dist/browser/modules/archive/zip-builder.js +0 -292
  193. package/dist/browser/modules/archive/zip-constants.d.ts +0 -18
  194. package/dist/browser/modules/archive/zip-constants.js +0 -23
  195. package/dist/browser/modules/archive/zip-records.js +0 -84
  196. package/dist/cjs/modules/archive/byte-queue.js +0 -129
  197. package/dist/cjs/modules/archive/parse.base.js +0 -632
  198. package/dist/cjs/modules/archive/utils/zip-extra.js +0 -74
  199. package/dist/cjs/modules/archive/zip-builder.js +0 -297
  200. package/dist/cjs/modules/archive/zip-constants.js +0 -26
  201. package/dist/cjs/modules/archive/zip-records.js +0 -90
  202. package/dist/esm/modules/archive/byte-queue.js +0 -125
  203. package/dist/esm/modules/archive/parse.base.js +0 -610
  204. package/dist/esm/modules/archive/utils/zip-extra.js +0 -68
  205. package/dist/esm/modules/archive/zip-builder.js +0 -292
  206. package/dist/esm/modules/archive/zip-constants.js +0 -23
  207. package/dist/esm/modules/archive/zip-records.js +0 -84
  208. package/dist/types/modules/archive/byte-queue.d.ts +0 -18
  209. package/dist/types/modules/archive/utils/zip-extra.d.ts +0 -18
  210. package/dist/types/modules/archive/zip-builder.d.ts +0 -117
  211. package/dist/types/modules/archive/zip-constants.d.ts +0 -18
  212. /package/dist/browser/modules/archive/{crc32.base.d.ts → compression/crc32.base.d.ts} +0 -0
  213. /package/dist/browser/modules/archive/{crc32.base.js → compression/crc32.base.js} +0 -0
  214. /package/dist/browser/modules/archive/{crc32.browser.js → compression/crc32.browser.js} +0 -0
  215. /package/dist/browser/modules/archive/{deflate-fallback.d.ts → compression/deflate-fallback.d.ts} +0 -0
  216. /package/dist/browser/modules/archive/{streaming-compress.base.d.ts → compression/streaming-compress.base.d.ts} +0 -0
  217. /package/dist/browser/modules/archive/{streaming-compress.base.js → compression/streaming-compress.base.js} +0 -0
  218. /package/dist/browser/modules/archive/{extract.js → unzip/extract.js} +0 -0
  219. /package/dist/browser/modules/archive/{zip-entry-metadata.d.ts → zip/zip-entry-metadata.d.ts} +0 -0
  220. /package/dist/browser/modules/archive/{zip-entry-info.d.ts → zip-spec/zip-entry-info.d.ts} +0 -0
  221. /package/dist/browser/modules/archive/{zip-entry-info.js → zip-spec/zip-entry-info.js} +0 -0
  222. /package/dist/cjs/modules/archive/{crc32.base.js → compression/crc32.base.js} +0 -0
  223. /package/dist/cjs/modules/archive/{crc32.browser.js → compression/crc32.browser.js} +0 -0
  224. /package/dist/cjs/modules/archive/{streaming-compress.base.js → compression/streaming-compress.base.js} +0 -0
  225. /package/dist/cjs/modules/archive/{extract.js → unzip/extract.js} +0 -0
  226. /package/dist/cjs/modules/archive/{zip-entry-info.js → zip-spec/zip-entry-info.js} +0 -0
  227. /package/dist/esm/modules/archive/{crc32.base.js → compression/crc32.base.js} +0 -0
  228. /package/dist/esm/modules/archive/{crc32.browser.js → compression/crc32.browser.js} +0 -0
  229. /package/dist/esm/modules/archive/{streaming-compress.base.js → compression/streaming-compress.base.js} +0 -0
  230. /package/dist/esm/modules/archive/{extract.js → unzip/extract.js} +0 -0
  231. /package/dist/esm/modules/archive/{zip-entry-info.js → zip-spec/zip-entry-info.js} +0 -0
  232. /package/dist/{LICENSE → iife/LICENSE} +0 -0
  233. /package/dist/types/modules/archive/{compress.d.ts → compression/compress.d.ts} +0 -0
  234. /package/dist/types/modules/archive/{crc32.base.d.ts → compression/crc32.base.d.ts} +0 -0
  235. /package/dist/types/modules/archive/{crc32.browser.d.ts → compression/crc32.browser.d.ts} +0 -0
  236. /package/dist/types/modules/archive/{crc32.d.ts → compression/crc32.d.ts} +0 -0
  237. /package/dist/types/modules/archive/{deflate-fallback.d.ts → compression/deflate-fallback.d.ts} +0 -0
  238. /package/dist/types/modules/archive/{streaming-compress.base.d.ts → compression/streaming-compress.base.d.ts} +0 -0
  239. /package/dist/types/modules/archive/{streaming-compress.d.ts → compression/streaming-compress.d.ts} +0 -0
  240. /package/dist/types/modules/archive/{extract.d.ts → unzip/extract.d.ts} +0 -0
  241. /package/dist/types/modules/archive/{zip-entry-info.d.ts → zip-spec/zip-entry-info.d.ts} +0 -0
@@ -0,0 +1,1022 @@
1
+ import { parseDosDateTimeUTC, resolveZipLastModifiedDateFromUnixSeconds } from "../utils/timestamps.js";
2
+ import { Duplex, PassThrough, Transform, concatUint8Arrays, pipeline, finished } from "../../stream/index.js";
3
+ import { parseTyped as parseBuffer } from "../utils/parse-buffer.js";
4
+ import { ByteQueue } from "../internal/byte-queue.js";
5
+ import { indexOfUint8ArrayPattern } from "../utils/bytes.js";
6
+ import { PatternScanner } from "../utils/pattern-scanner.js";
7
+ import { readUint32LE, writeUint32LE } from "../utils/binary.js";
8
+ import { parseZipExtraFields } from "../utils/zip-extra-fields.js";
9
+ import { CENTRAL_DIR_HEADER_SIG, DATA_DESCRIPTOR_SIG, END_OF_CENTRAL_DIR_SIG, LOCAL_FILE_HEADER_SIG, ZIP64_END_OF_CENTRAL_DIR_LOCATOR_SIG, ZIP64_END_OF_CENTRAL_DIR_SIG } from "../zip-spec/zip-records.js";
10
+ export const DATA_DESCRIPTOR_SIGNATURE_BYTES = writeUint32LE(DATA_DESCRIPTOR_SIG);
11
+ const DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK = 256 * 1024;
12
+ // Shared parseBuffer() formats
13
+ export const CRX_HEADER_FORMAT = [
14
+ ["version", 4],
15
+ ["pubKeyLength", 4],
16
+ ["signatureLength", 4]
17
+ ];
18
+ export const LOCAL_FILE_HEADER_FORMAT = [
19
+ ["versionsNeededToExtract", 2],
20
+ ["flags", 2],
21
+ ["compressionMethod", 2],
22
+ ["lastModifiedTime", 2],
23
+ ["lastModifiedDate", 2],
24
+ ["crc32", 4],
25
+ ["compressedSize", 4],
26
+ ["uncompressedSize", 4],
27
+ ["fileNameLength", 2],
28
+ ["extraFieldLength", 2]
29
+ ];
30
+ export const DATA_DESCRIPTOR_FORMAT = [
31
+ ["dataDescriptorSignature", 4],
32
+ ["crc32", 4],
33
+ ["compressedSize", 4],
34
+ ["uncompressedSize", 4]
35
+ ];
36
+ export const CENTRAL_DIRECTORY_FILE_HEADER_FORMAT = [
37
+ ["versionMadeBy", 2],
38
+ ["versionsNeededToExtract", 2],
39
+ ["flags", 2],
40
+ ["compressionMethod", 2],
41
+ ["lastModifiedTime", 2],
42
+ ["lastModifiedDate", 2],
43
+ ["crc32", 4],
44
+ ["compressedSize", 4],
45
+ ["uncompressedSize", 4],
46
+ ["fileNameLength", 2],
47
+ ["extraFieldLength", 2],
48
+ ["fileCommentLength", 2],
49
+ ["diskNumber", 2],
50
+ ["internalFileAttributes", 2],
51
+ ["externalFileAttributes", 4],
52
+ ["offsetToLocalFileHeader", 4]
53
+ ];
54
+ export const END_OF_CENTRAL_DIRECTORY_FORMAT = [
55
+ ["diskNumber", 2],
56
+ ["diskStart", 2],
57
+ ["numberOfRecordsOnDisk", 2],
58
+ ["numberOfRecords", 2],
59
+ ["sizeOfCentralDirectory", 4],
60
+ ["offsetToStartOfCentralDirectory", 4],
61
+ ["commentLength", 2]
62
+ ];
63
+ const textDecoder = new TextDecoder();
64
+ const textEncoder = new TextEncoder();
65
+ export function decodeZipEntryPath(pathBuffer) {
66
+ return textDecoder.decode(pathBuffer);
67
+ }
68
+ export function isZipUnicodeFlag(flags) {
69
+ return ((flags || 0) & 0x800) !== 0;
70
+ }
71
+ export function isZipDirectoryPath(path) {
72
+ if (path.length === 0) {
73
+ return false;
74
+ }
75
+ const last = path.charCodeAt(path.length - 1);
76
+ return last === 47 || last === 92;
77
+ }
78
+ export function getZipEntryType(path, uncompressedSize) {
79
+ return uncompressedSize === 0 && isZipDirectoryPath(path) ? "Directory" : "File";
80
+ }
81
+ export function buildZipEntryProps(path, pathBuffer, flags) {
82
+ return {
83
+ path,
84
+ pathBuffer,
85
+ flags: {
86
+ isUnicode: isZipUnicodeFlag(flags)
87
+ }
88
+ };
89
+ }
90
+ export function resolveZipEntryLastModifiedDateTime(vars, extraFields) {
91
+ const dosDate = vars.lastModifiedDate || 0;
92
+ const dosTime = vars.lastModifiedTime || 0;
93
+ const dosDateTime = parseDosDateTimeUTC(dosDate, dosTime);
94
+ const unixSecondsMtime = extraFields.mtimeUnixSeconds;
95
+ if (unixSecondsMtime === undefined) {
96
+ return dosDateTime;
97
+ }
98
+ return resolveZipLastModifiedDateFromUnixSeconds(dosDate, dosTime, unixSecondsMtime);
99
+ }
100
+ export const parseExtraField = parseZipExtraFields;
101
+ export function hasDataDescriptorFlag(flags) {
102
+ return ((flags || 0) & 0x08) !== 0;
103
+ }
104
+ export function isFileSizeKnown(flags, compressedSize) {
105
+ return !hasDataDescriptorFlag(flags) || (compressedSize || 0) > 0;
106
+ }
107
+ export function autodrain(stream) {
108
+ const draining = stream.pipe(new Transform({
109
+ transform(_chunk, _encoding, callback) {
110
+ callback();
111
+ }
112
+ }));
113
+ draining.promise = () => new Promise((resolve, reject) => {
114
+ draining.on("finish", resolve);
115
+ draining.on("error", reject);
116
+ });
117
+ return draining;
118
+ }
119
+ /**
120
+ * Collects all data from a readable stream into a single Uint8Array.
121
+ */
122
+ export function bufferStream(entry) {
123
+ return new Promise((resolve, reject) => {
124
+ const chunks = [];
125
+ const stream = new Transform({
126
+ transform(d, _encoding, callback) {
127
+ chunks.push(d);
128
+ callback();
129
+ }
130
+ });
131
+ stream.on("finish", () => {
132
+ resolve(chunks.length === 1 ? chunks[0] : concatUint8Arrays(chunks));
133
+ });
134
+ stream.on("error", reject);
135
+ entry.on("error", reject).pipe(stream);
136
+ });
137
+ }
138
+ const STR_FUNCTION = "function";
139
+ export class PullStream extends Duplex {
140
+ get buffer() {
141
+ return this._queue.view();
142
+ }
143
+ set buffer(value) {
144
+ this._queue.reset(value);
145
+ }
146
+ constructor() {
147
+ super({ decodeStrings: false, objectMode: true });
148
+ this._queue = new ByteQueue();
149
+ this.finished = false;
150
+ this.on("finish", () => {
151
+ this.finished = true;
152
+ this.emit("chunk", false);
153
+ });
154
+ }
155
+ _write(chunk, _encoding, callback) {
156
+ const data = typeof chunk === "string" ? textEncoder.encode(chunk) : chunk;
157
+ this._queue.append(data);
158
+ this.cb = callback;
159
+ this.emit("chunk");
160
+ }
161
+ _read() { }
162
+ _maybeReleaseWriteCallback() {
163
+ if (typeof this.cb === STR_FUNCTION) {
164
+ const callback = this.cb;
165
+ this.cb = undefined;
166
+ callback();
167
+ }
168
+ }
169
+ /**
170
+ * The `eof` parameter is interpreted as `file_length` if the type is number
171
+ * otherwise (i.e. Uint8Array) it is interpreted as a pattern signaling end of stream
172
+ */
173
+ stream(eof, includeEof) {
174
+ const p = new PassThrough({ highWaterMark: DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK });
175
+ let done = false;
176
+ let waitingDrain = false;
177
+ const eofIsNumber = typeof eof === "number";
178
+ let remainingBytes = eofIsNumber ? eof : 0;
179
+ const pattern = eofIsNumber ? undefined : eof;
180
+ const patternLen = pattern ? pattern.length : 0;
181
+ const minTailBytes = eofIsNumber ? 0 : patternLen;
182
+ const scanner = eofIsNumber ? undefined : new PatternScanner(pattern);
183
+ const cb = () => {
184
+ this._maybeReleaseWriteCallback();
185
+ };
186
+ const pull = () => {
187
+ if (done || waitingDrain) {
188
+ return;
189
+ }
190
+ while (true) {
191
+ const available = this._queue.length;
192
+ if (!available) {
193
+ break;
194
+ }
195
+ let packet;
196
+ if (eofIsNumber) {
197
+ const toRead = Math.min(remainingBytes, available);
198
+ if (toRead > 0) {
199
+ packet = this._queue.read(toRead);
200
+ remainingBytes -= toRead;
201
+ }
202
+ done = done || remainingBytes === 0;
203
+ }
204
+ else {
205
+ const bufLen = this._queue.length;
206
+ const match = scanner.find(this._queue);
207
+ if (match !== -1) {
208
+ // store signature match byte offset to allow us to reference
209
+ // this for zip64 offset
210
+ this.match = match;
211
+ const toRead = includeEof ? match + patternLen : match;
212
+ if (toRead > 0) {
213
+ packet = this._queue.read(toRead);
214
+ scanner.onConsume(toRead);
215
+ }
216
+ done = true;
217
+ }
218
+ else {
219
+ // No match yet. Avoid rescanning bytes that can't start a match.
220
+ scanner.onNoMatch(bufLen);
221
+ const len = bufLen - patternLen;
222
+ if (len <= 0) {
223
+ // Keep enough bytes to detect a split signature.
224
+ if (this._queue.length === 0 ||
225
+ (minTailBytes && this._queue.length <= minTailBytes)) {
226
+ cb();
227
+ }
228
+ }
229
+ else {
230
+ packet = this._queue.read(len);
231
+ scanner.onConsume(len);
232
+ }
233
+ }
234
+ }
235
+ if (!packet) {
236
+ break;
237
+ }
238
+ const ok = p.write(packet);
239
+ // If we drained the internal buffer (or kept only a minimal tail), allow upstream to continue.
240
+ if (this._queue.length === 0 || (minTailBytes && this._queue.length <= minTailBytes)) {
241
+ cb();
242
+ }
243
+ if (!ok) {
244
+ waitingDrain = true;
245
+ p.once("drain", () => {
246
+ waitingDrain = false;
247
+ pull();
248
+ });
249
+ return;
250
+ }
251
+ if (done) {
252
+ cb();
253
+ this.removeListener("chunk", pull);
254
+ p.end();
255
+ return;
256
+ }
257
+ }
258
+ if (!done) {
259
+ if (this.finished) {
260
+ this.removeListener("chunk", pull);
261
+ cb();
262
+ p.destroy(new Error("FILE_ENDED"));
263
+ }
264
+ return;
265
+ }
266
+ this.removeListener("chunk", pull);
267
+ cb();
268
+ p.end();
269
+ };
270
+ this.on("chunk", pull);
271
+ pull();
272
+ return p;
273
+ }
274
+ pull(eof, includeEof) {
275
+ if (eof === 0) {
276
+ return Promise.resolve(new Uint8Array(0));
277
+ }
278
+ // If we already have the required data in buffer
279
+ // we can resolve the request immediately
280
+ if (typeof eof === "number" && this._queue.length >= eof) {
281
+ const data = this._queue.read(eof);
282
+ // Allow the upstream writer to continue once the consumer makes progress.
283
+ // Waiting for a full drain can deadlock when the producer must call `end()`
284
+ // but is blocked behind a deferred write callback.
285
+ this._maybeReleaseWriteCallback();
286
+ return Promise.resolve(data);
287
+ }
288
+ // Otherwise we wait for more data and fulfill directly from the internal queue.
289
+ // This avoids constructing intermediate streams for small pulls (hot path).
290
+ const chunks = [];
291
+ let pullStreamRejectHandler;
292
+ // Pattern scanning state (only used when eof is a pattern)
293
+ const eofIsNumber = typeof eof === "number";
294
+ const pattern = eofIsNumber ? undefined : eof;
295
+ const patternLen = pattern ? pattern.length : 0;
296
+ const scanner = eofIsNumber ? undefined : new PatternScanner(pattern);
297
+ return new Promise((resolve, reject) => {
298
+ let settled = false;
299
+ pullStreamRejectHandler = (e) => {
300
+ this.__emittedError = e;
301
+ cleanup();
302
+ reject(e);
303
+ };
304
+ if (this.finished) {
305
+ reject(new Error("FILE_ENDED"));
306
+ return;
307
+ }
308
+ const cleanup = () => {
309
+ this.removeListener("chunk", onChunk);
310
+ this.removeListener("finish", onFinish);
311
+ this.removeListener("error", pullStreamRejectHandler);
312
+ };
313
+ const finalize = () => {
314
+ cleanup();
315
+ settled = true;
316
+ if (chunks.length === 0) {
317
+ resolve(new Uint8Array(0));
318
+ return;
319
+ }
320
+ resolve(chunks.length === 1 ? chunks[0] : concatUint8Arrays(chunks));
321
+ };
322
+ const onFinish = () => {
323
+ if (settled) {
324
+ return;
325
+ }
326
+ // Try one last time to drain anything already buffered.
327
+ onChunk();
328
+ if (!settled) {
329
+ cleanup();
330
+ reject(new Error("FILE_ENDED"));
331
+ }
332
+ };
333
+ const onChunk = () => {
334
+ if (typeof eof === "number") {
335
+ const available = this._queue.length;
336
+ if (available <= 0) {
337
+ return;
338
+ }
339
+ const toRead = Math.min(eof, available);
340
+ if (toRead > 0) {
341
+ chunks.push(this._queue.read(toRead));
342
+ eof -= toRead;
343
+ }
344
+ // Allow upstream to continue as soon as we consume bytes.
345
+ // This avoids deadlocks when the last upstream chunk is waiting on its
346
+ // callback and the parser needs an EOF signal after draining buffered data.
347
+ this._maybeReleaseWriteCallback();
348
+ if (eof === 0) {
349
+ finalize();
350
+ }
351
+ return;
352
+ }
353
+ // eof is a pattern
354
+ while (this._queue.length > 0) {
355
+ const bufLen = this._queue.length;
356
+ const match = scanner.find(this._queue);
357
+ if (match !== -1) {
358
+ // store signature match byte offset to allow us to reference
359
+ // this for zip64 offset
360
+ this.match = match;
361
+ const toRead = includeEof ? match + patternLen : match;
362
+ if (toRead > 0) {
363
+ chunks.push(this._queue.read(toRead));
364
+ scanner.onConsume(toRead);
365
+ }
366
+ if (this._queue.length === 0 || (patternLen && this._queue.length <= patternLen)) {
367
+ this._maybeReleaseWriteCallback();
368
+ }
369
+ finalize();
370
+ return;
371
+ }
372
+ // No match yet. Avoid rescanning bytes that can't start a match.
373
+ scanner.onNoMatch(bufLen);
374
+ const safeLen = bufLen - patternLen;
375
+ if (safeLen <= 0) {
376
+ // Keep enough bytes to detect a split signature.
377
+ this._maybeReleaseWriteCallback();
378
+ return;
379
+ }
380
+ chunks.push(this._queue.read(safeLen));
381
+ scanner.onConsume(safeLen);
382
+ if (this._queue.length === 0 || (patternLen && this._queue.length <= patternLen)) {
383
+ this._maybeReleaseWriteCallback();
384
+ return;
385
+ }
386
+ }
387
+ };
388
+ this.once("error", pullStreamRejectHandler);
389
+ this.on("chunk", onChunk);
390
+ this.once("finish", onFinish);
391
+ // Attempt immediate fulfillment from any already-buffered data.
392
+ onChunk();
393
+ });
394
+ }
395
+ pullUntil(pattern, includeEof) {
396
+ return this.pull(pattern, includeEof);
397
+ }
398
+ }
399
+ export async function readCrxHeader(pull) {
400
+ const data = await pull(12);
401
+ const header = data.length >= 12 ? parseCrxHeaderFast(data) : parseBuffer(data, CRX_HEADER_FORMAT);
402
+ const pubKeyLength = header.pubKeyLength || 0;
403
+ const signatureLength = header.signatureLength || 0;
404
+ const keyAndSig = await pull(pubKeyLength + signatureLength);
405
+ header.publicKey = keyAndSig.subarray(0, pubKeyLength);
406
+ header.signature = keyAndSig.subarray(pubKeyLength);
407
+ return header;
408
+ }
409
+ export async function readLocalFileHeader(pull) {
410
+ const data = await pull(26);
411
+ const vars = data.length >= 26
412
+ ? parseLocalFileHeaderVarsFast(data)
413
+ : parseBuffer(data, LOCAL_FILE_HEADER_FORMAT);
414
+ const fileNameBuffer = await pull(vars.fileNameLength || 0);
415
+ const extraFieldData = await pull(vars.extraFieldLength || 0);
416
+ return { vars, fileNameBuffer, extraFieldData };
417
+ }
418
+ export async function readDataDescriptor(pull) {
419
+ const data = await pull(16);
420
+ return data.length >= 16
421
+ ? parseDataDescriptorVarsFast(data)
422
+ : parseBuffer(data, DATA_DESCRIPTOR_FORMAT);
423
+ }
424
+ export async function consumeCentralDirectoryFileHeader(pull) {
425
+ const data = await pull(42);
426
+ const vars = parseBuffer(data, CENTRAL_DIRECTORY_FILE_HEADER_FORMAT);
427
+ await pull(vars.fileNameLength || 0);
428
+ await pull(vars.extraFieldLength || 0);
429
+ await pull(vars.fileCommentLength || 0);
430
+ }
431
+ export async function consumeEndOfCentralDirectoryRecord(pull) {
432
+ const data = await pull(18);
433
+ const vars = parseBuffer(data, END_OF_CENTRAL_DIRECTORY_FORMAT);
434
+ await pull(vars.commentLength || 0);
435
+ }
436
+ // =============================================================================
437
+ // Validated Data Descriptor Scan (shared by Node + Browser)
438
+ // =============================================================================
439
+ function isValidZipRecordSignature(sig) {
440
+ switch (sig) {
441
+ case LOCAL_FILE_HEADER_SIG:
442
+ case CENTRAL_DIR_HEADER_SIG:
443
+ case END_OF_CENTRAL_DIR_SIG:
444
+ case ZIP64_END_OF_CENTRAL_DIR_SIG:
445
+ case ZIP64_END_OF_CENTRAL_DIR_LOCATOR_SIG:
446
+ return true;
447
+ default:
448
+ return false;
449
+ }
450
+ }
451
+ function readUint32LEFromBytes(view, offset) {
452
+ return ((view[offset] |
453
+ 0 |
454
+ ((view[offset + 1] | 0) << 8) |
455
+ ((view[offset + 2] | 0) << 16) |
456
+ ((view[offset + 3] | 0) << 24)) >>>
457
+ 0);
458
+ }
459
+ function readUint16LEFromBytes(view, offset) {
460
+ return (view[offset] | ((view[offset + 1] | 0) << 8)) >>> 0;
461
+ }
462
+ function parseCrxHeaderFast(data) {
463
+ return {
464
+ version: readUint32LEFromBytes(data, 0),
465
+ pubKeyLength: readUint32LEFromBytes(data, 4),
466
+ signatureLength: readUint32LEFromBytes(data, 8)
467
+ };
468
+ }
469
+ function parseLocalFileHeaderVarsFast(data) {
470
+ return {
471
+ versionsNeededToExtract: readUint16LEFromBytes(data, 0),
472
+ flags: readUint16LEFromBytes(data, 2),
473
+ compressionMethod: readUint16LEFromBytes(data, 4),
474
+ lastModifiedTime: readUint16LEFromBytes(data, 6),
475
+ lastModifiedDate: readUint16LEFromBytes(data, 8),
476
+ crc32: readUint32LEFromBytes(data, 10),
477
+ compressedSize: readUint32LEFromBytes(data, 14),
478
+ uncompressedSize: readUint32LEFromBytes(data, 18),
479
+ fileNameLength: readUint16LEFromBytes(data, 22),
480
+ extraFieldLength: readUint16LEFromBytes(data, 24)
481
+ };
482
+ }
483
+ function parseDataDescriptorVarsFast(data) {
484
+ return {
485
+ dataDescriptorSignature: readUint32LEFromBytes(data, 0),
486
+ crc32: readUint32LEFromBytes(data, 4),
487
+ compressedSize: readUint32LEFromBytes(data, 8),
488
+ uncompressedSize: readUint32LEFromBytes(data, 12)
489
+ };
490
+ }
491
+ function indexOf4BytesPattern(buffer, pattern, startIndex) {
492
+ if (pattern.length !== 4) {
493
+ return indexOfUint8ArrayPattern(buffer, pattern, startIndex);
494
+ }
495
+ const b0 = pattern[0];
496
+ const b1 = pattern[1];
497
+ const b2 = pattern[2];
498
+ const b3 = pattern[3];
499
+ const bufLen = buffer.length;
500
+ let start = startIndex | 0;
501
+ if (start < 0) {
502
+ start = 0;
503
+ }
504
+ if (start > bufLen - 4) {
505
+ return -1;
506
+ }
507
+ const last = bufLen - 4;
508
+ let i = buffer.indexOf(b0, start);
509
+ while (i !== -1 && i <= last) {
510
+ if (buffer[i + 1] === b1 && buffer[i + 2] === b2 && buffer[i + 3] === b3) {
511
+ return i;
512
+ }
513
+ i = buffer.indexOf(b0, i + 1);
514
+ }
515
+ return -1;
516
+ }
517
+ function initScanResult(out) {
518
+ if (out) {
519
+ return out;
520
+ }
521
+ return { foundIndex: -1, nextSearchFrom: 0 };
522
+ }
523
+ /**
524
+ * Scan for a validated DATA_DESCRIPTOR record boundary.
525
+ *
526
+ * Scanning for the 4-byte signature alone is unsafe because it can appear inside
527
+ * compressed data. We validate a candidate by requiring:
528
+ * - the next 4 bytes after the 16-byte descriptor form a known ZIP record signature, and
529
+ * - the descriptor's compressedSize matches the number of compressed bytes emitted so far.
530
+ */
531
+ export function scanValidatedDataDescriptor(view, dataDescriptorSignature, bytesEmitted, startIndex = 0, out) {
532
+ const result = initScanResult(out);
533
+ const viewLen = view.length;
534
+ let searchFrom = startIndex | 0;
535
+ if (searchFrom < 0) {
536
+ searchFrom = 0;
537
+ }
538
+ if (searchFrom > viewLen) {
539
+ searchFrom = viewLen;
540
+ }
541
+ // To avoid missing a signature split across chunk boundaries, we may need
542
+ // to re-check the last (sigLen - 1) bytes on the next scan.
543
+ const sigLen = dataDescriptorSignature.length | 0;
544
+ const overlap = sigLen > 0 ? sigLen - 1 : 0;
545
+ const viewLimit = Math.max(0, viewLen - overlap);
546
+ while (searchFrom < viewLen) {
547
+ const match = indexOf4BytesPattern(view, dataDescriptorSignature, searchFrom);
548
+ if (match === -1) {
549
+ result.foundIndex = -1;
550
+ result.nextSearchFrom = Math.max(searchFrom, viewLimit);
551
+ return result;
552
+ }
553
+ const idx = match;
554
+ // Need 16 bytes for descriptor + 4 bytes for next record signature.
555
+ const nextSigOffset = idx + 16;
556
+ if (nextSigOffset + 4 <= viewLen) {
557
+ const nextSig = readUint32LEFromBytes(view, nextSigOffset);
558
+ const descriptorCompressedSize = readUint32LEFromBytes(view, idx + 8);
559
+ const expectedCompressedSize = (bytesEmitted + idx) >>> 0;
560
+ if (isValidZipRecordSignature(nextSig) &&
561
+ descriptorCompressedSize === expectedCompressedSize) {
562
+ result.foundIndex = idx;
563
+ result.nextSearchFrom = idx;
564
+ return result;
565
+ }
566
+ searchFrom = idx + 1;
567
+ continue;
568
+ }
569
+ // Not enough bytes to validate yet. Re-check this candidate once more bytes arrive.
570
+ result.foundIndex = -1;
571
+ result.nextSearchFrom = idx;
572
+ return result;
573
+ }
574
+ result.foundIndex = -1;
575
+ result.nextSearchFrom = Math.max(searchFrom, viewLimit);
576
+ return result;
577
+ }
578
+ /**
579
+ * Stream compressed file data until we reach a validated DATA_DESCRIPTOR boundary.
580
+ *
581
+ * This encapsulates the shared logic used by both Node and browser parsers.
582
+ */
583
+ export function streamUntilValidatedDataDescriptor(options) {
584
+ const { source, dataDescriptorSignature } = options;
585
+ const keepTailBytes = options.keepTailBytes ?? 20;
586
+ const errorMessage = options.errorMessage ?? "FILE_ENDED: Data descriptor not found";
587
+ const output = new PassThrough({ highWaterMark: DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK });
588
+ let done = false;
589
+ let waitingDrain = false;
590
+ // Total number of compressed bytes already emitted for this entry.
591
+ let bytesEmitted = 0;
592
+ const scanner = new PatternScanner(dataDescriptorSignature);
593
+ let unsubscribe;
594
+ const cleanup = () => {
595
+ if (unsubscribe) {
596
+ unsubscribe();
597
+ unsubscribe = undefined;
598
+ }
599
+ };
600
+ const pull = () => {
601
+ if (done) {
602
+ return;
603
+ }
604
+ if (waitingDrain) {
605
+ return;
606
+ }
607
+ let available = source.getLength();
608
+ if (available === 0) {
609
+ // If we have no buffered data, ensure upstream isn't stuck behind a
610
+ // deferred write callback.
611
+ source.maybeReleaseWriteCallback?.();
612
+ }
613
+ while (available > 0) {
614
+ // Try to find and validate a descriptor candidate.
615
+ while (true) {
616
+ const idx = scanner.find(source);
617
+ if (idx === -1) {
618
+ break;
619
+ }
620
+ // Need 16 bytes for descriptor + 4 bytes for next record signature.
621
+ const nextSigOffset = idx + 16;
622
+ if (nextSigOffset + 4 <= available) {
623
+ const nextSig = source.peekUint32LE(nextSigOffset);
624
+ const descriptorCompressedSize = source.peekUint32LE(idx + 8);
625
+ const expectedCompressedSize = (bytesEmitted + idx) >>> 0;
626
+ if (nextSig !== null &&
627
+ descriptorCompressedSize !== null &&
628
+ isValidZipRecordSignature(nextSig) &&
629
+ descriptorCompressedSize === expectedCompressedSize) {
630
+ if (idx > 0) {
631
+ if (source.peekChunks && source.discard) {
632
+ const parts = source.peekChunks(idx);
633
+ let written = 0;
634
+ for (const part of parts) {
635
+ const ok = output.write(part);
636
+ written += part.length;
637
+ if (!ok) {
638
+ waitingDrain = true;
639
+ output.once("drain", () => {
640
+ waitingDrain = false;
641
+ pull();
642
+ });
643
+ break;
644
+ }
645
+ }
646
+ if (written > 0) {
647
+ source.discard(written);
648
+ bytesEmitted += written;
649
+ available -= written;
650
+ scanner.onConsume(written);
651
+ }
652
+ if (waitingDrain) {
653
+ return;
654
+ }
655
+ }
656
+ else {
657
+ const ok = output.write(source.read(idx));
658
+ bytesEmitted += idx;
659
+ available -= idx;
660
+ scanner.onConsume(idx);
661
+ if (!ok) {
662
+ waitingDrain = true;
663
+ output.once("drain", () => {
664
+ waitingDrain = false;
665
+ pull();
666
+ });
667
+ return;
668
+ }
669
+ }
670
+ }
671
+ done = true;
672
+ source.maybeReleaseWriteCallback?.();
673
+ cleanup();
674
+ output.end();
675
+ return;
676
+ }
677
+ scanner.searchFrom = idx + 1;
678
+ continue;
679
+ }
680
+ // Not enough bytes to validate yet. Re-check this candidate once more bytes arrive.
681
+ scanner.searchFrom = idx;
682
+ break;
683
+ }
684
+ // No validated match yet.
685
+ scanner.onNoMatch(available);
686
+ // Flush most of the buffered data but keep a tail so a potential signature
687
+ // split across chunks can still be detected/validated.
688
+ const flushLen = Math.max(0, available - keepTailBytes);
689
+ if (flushLen > 0) {
690
+ if (source.peekChunks && source.discard) {
691
+ const parts = source.peekChunks(flushLen);
692
+ let written = 0;
693
+ for (const part of parts) {
694
+ const ok = output.write(part);
695
+ written += part.length;
696
+ if (!ok) {
697
+ waitingDrain = true;
698
+ output.once("drain", () => {
699
+ waitingDrain = false;
700
+ pull();
701
+ });
702
+ break;
703
+ }
704
+ }
705
+ if (written > 0) {
706
+ source.discard(written);
707
+ bytesEmitted += written;
708
+ available -= written;
709
+ scanner.onConsume(written);
710
+ }
711
+ if (available <= keepTailBytes) {
712
+ source.maybeReleaseWriteCallback?.();
713
+ }
714
+ return;
715
+ }
716
+ const ok = output.write(source.read(flushLen));
717
+ bytesEmitted += flushLen;
718
+ available -= flushLen;
719
+ scanner.onConsume(flushLen);
720
+ if (available <= keepTailBytes) {
721
+ source.maybeReleaseWriteCallback?.();
722
+ }
723
+ if (!ok) {
724
+ waitingDrain = true;
725
+ output.once("drain", () => {
726
+ waitingDrain = false;
727
+ pull();
728
+ });
729
+ }
730
+ return;
731
+ }
732
+ // Need more data.
733
+ // IMPORTANT: If we keep a tail and cannot flush anything yet, we must still
734
+ // release upstream write callbacks; otherwise the producer can deadlock waiting
735
+ // for backpressure while we wait for more bytes to arrive.
736
+ source.maybeReleaseWriteCallback?.();
737
+ break;
738
+ }
739
+ if (!done && source.isFinished()) {
740
+ done = true;
741
+ cleanup();
742
+ output.destroy(new Error(errorMessage));
743
+ }
744
+ };
745
+ unsubscribe = source.onDataAvailable(pull);
746
+ queueMicrotask(pull);
747
+ return output;
748
+ }
749
+ /**
750
+ * Default threshold for small file optimization (5MB).
751
+ */
752
+ export const DEFAULT_PARSE_THRESHOLD_BYTES = 5 * 1024 * 1024;
753
+ const endDirectorySignature = writeUint32LE(END_OF_CENTRAL_DIR_SIG);
754
+ export async function runParseLoop(opts, io, emitter, inflateFactory, state, inflateRawSync) {
755
+ const thresholdBytes = opts.thresholdBytes ?? DEFAULT_PARSE_THRESHOLD_BYTES;
756
+ while (true) {
757
+ const sigBytes = await io.pull(4);
758
+ if (sigBytes.length === 0) {
759
+ emitter.emitClose();
760
+ return;
761
+ }
762
+ const signature = readUint32LE(sigBytes, 0);
763
+ if (signature === 0x34327243) {
764
+ state.crxHeader = await readCrxHeader(async (length) => io.pull(length));
765
+ emitter.emitCrxHeader(state.crxHeader);
766
+ continue;
767
+ }
768
+ if (signature === LOCAL_FILE_HEADER_SIG) {
769
+ await readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync);
770
+ continue;
771
+ }
772
+ if (signature === CENTRAL_DIR_HEADER_SIG) {
773
+ state.reachedCD = true;
774
+ await consumeCentralDirectoryFileHeader(async (length) => io.pull(length));
775
+ continue;
776
+ }
777
+ if (signature === END_OF_CENTRAL_DIR_SIG) {
778
+ await consumeEndOfCentralDirectoryRecord(async (length) => io.pull(length));
779
+ io.setDone();
780
+ emitter.emitClose();
781
+ return;
782
+ }
783
+ if (state.reachedCD) {
784
+ // We are in central directory trailing data; resync by scanning for EOCD signature.
785
+ // consumeEndOfCentralDirectoryRecord expects the EOCD signature to be consumed, so includeEof=true.
786
+ const includeEof = true;
787
+ await io.pullUntil(endDirectorySignature, includeEof);
788
+ await consumeEndOfCentralDirectoryRecord(async (length) => io.pull(length));
789
+ io.setDone();
790
+ emitter.emitClose();
791
+ return;
792
+ }
793
+ emitter.emitError(new Error("invalid signature: 0x" + signature.toString(16)));
794
+ emitter.emitClose();
795
+ return;
796
+ }
797
+ }
798
+ async function pumpKnownCompressedSizeToEntry(io, inflater, entry, compressedSize) {
799
+ // Keep chunks reasonably large to reduce per-await overhead.
800
+ const CHUNK_SIZE = 256 * 1024;
801
+ let remaining = compressedSize;
802
+ let err = null;
803
+ const onError = (e) => {
804
+ err = e;
805
+ };
806
+ inflater.once("error", onError);
807
+ entry.once("error", onError);
808
+ let skipping = false;
809
+ const waitForDrainOrSkipSignal = async () => {
810
+ await new Promise(resolve => {
811
+ const anyInflater = inflater;
812
+ const cleanup = () => {
813
+ try {
814
+ anyInflater?.removeListener?.("drain", onDrain);
815
+ }
816
+ catch {
817
+ // ignore
818
+ }
819
+ try {
820
+ entry.removeListener("__autodrain", onAutodrain);
821
+ }
822
+ catch {
823
+ // ignore
824
+ }
825
+ try {
826
+ entry.removeListener("close", onClose);
827
+ }
828
+ catch {
829
+ // ignore
830
+ }
831
+ };
832
+ const onDrain = () => {
833
+ cleanup();
834
+ resolve();
835
+ };
836
+ const onAutodrain = () => {
837
+ cleanup();
838
+ resolve();
839
+ };
840
+ const onClose = () => {
841
+ cleanup();
842
+ resolve();
843
+ };
844
+ if (typeof anyInflater?.once === "function") {
845
+ anyInflater.once("drain", onDrain);
846
+ }
847
+ entry.once("__autodrain", onAutodrain);
848
+ entry.once("close", onClose);
849
+ });
850
+ };
851
+ const switchToSkip = async () => {
852
+ if (skipping) {
853
+ return;
854
+ }
855
+ skipping = true;
856
+ // Stop forwarding decompressed output. We only need to advance the ZIP cursor.
857
+ try {
858
+ const anyInflater = inflater;
859
+ if (typeof anyInflater.unpipe === "function") {
860
+ anyInflater.unpipe(entry);
861
+ }
862
+ }
863
+ catch {
864
+ // ignore
865
+ }
866
+ // End the entry as early as possible so downstream drain resolves quickly.
867
+ try {
868
+ if (!entry.writableEnded && !entry.destroyed) {
869
+ entry.end();
870
+ }
871
+ }
872
+ catch {
873
+ // ignore
874
+ }
875
+ // Stop the inflater to avoid work/backpressure.
876
+ try {
877
+ const anyInflater = inflater;
878
+ if (typeof anyInflater.destroy === "function") {
879
+ anyInflater.destroy();
880
+ }
881
+ }
882
+ catch {
883
+ // ignore
884
+ }
885
+ };
886
+ try {
887
+ // Pipe decompressed output into the entry stream.
888
+ inflater.pipe(entry);
889
+ while (remaining > 0) {
890
+ if (err) {
891
+ throw err;
892
+ }
893
+ // If downstream decides to autodrain mid-entry (common when a consumer bails out
894
+ // early due to a limit), stop inflating and just skip the remaining compressed bytes.
895
+ if (!skipping && (entry.__autodraining || entry.destroyed)) {
896
+ await switchToSkip();
897
+ }
898
+ const toPull = Math.min(CHUNK_SIZE, remaining);
899
+ const chunk = await io.pull(toPull);
900
+ if (chunk.length !== toPull) {
901
+ throw new Error("FILE_ENDED");
902
+ }
903
+ remaining -= chunk.length;
904
+ if (!skipping) {
905
+ const ok = inflater.write(chunk);
906
+ if (!ok) {
907
+ await waitForDrainOrSkipSignal();
908
+ }
909
+ }
910
+ }
911
+ if (!skipping) {
912
+ inflater.end();
913
+ }
914
+ // Wait for all writes to complete (not for consumption).
915
+ await finished(entry, { readable: false });
916
+ }
917
+ finally {
918
+ inflater.removeListener("error", onError);
919
+ entry.removeListener("error", onError);
920
+ }
921
+ }
922
+ async function readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync) {
923
+ const { vars: headerVars, fileNameBuffer, extraFieldData } = await readLocalFileHeader(async (l) => io.pull(l));
924
+ const vars = headerVars;
925
+ if (state.crxHeader) {
926
+ vars.crxHeader = state.crxHeader;
927
+ }
928
+ const fileName = decodeZipEntryPath(fileNameBuffer);
929
+ const entry = new PassThrough({
930
+ highWaterMark: DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK
931
+ });
932
+ let autodraining = false;
933
+ entry.autodrain = function () {
934
+ autodraining = true;
935
+ entry.__autodraining = true;
936
+ // Signal producers that downstream has switched to drain mode.
937
+ // This helps avoid deadlocks if the producer is waiting on backpressure.
938
+ entry.emit("__autodrain");
939
+ return autodrain(entry);
940
+ };
941
+ entry.buffer = function () {
942
+ return bufferStream(entry);
943
+ };
944
+ entry.path = fileName;
945
+ entry.props = buildZipEntryProps(fileName, fileNameBuffer, vars.flags);
946
+ entry.type = getZipEntryType(fileName, vars.uncompressedSize || 0);
947
+ if (opts.verbose) {
948
+ if (entry.type === "Directory") {
949
+ console.log(" creating:", fileName);
950
+ }
951
+ else if (entry.type === "File") {
952
+ if (vars.compressionMethod === 0) {
953
+ console.log(" extracting:", fileName);
954
+ }
955
+ else {
956
+ console.log(" inflating:", fileName);
957
+ }
958
+ }
959
+ }
960
+ const extra = parseExtraField(extraFieldData, vars);
961
+ vars.lastModifiedDateTime = resolveZipEntryLastModifiedDateTime(vars, extra);
962
+ entry.vars = vars;
963
+ entry.extraFields = extra;
964
+ entry.__autodraining = autodraining;
965
+ const fileSizeKnown = isFileSizeKnown(vars.flags, vars.compressedSize);
966
+ if (fileSizeKnown) {
967
+ entry.size = vars.uncompressedSize || 0;
968
+ }
969
+ if (opts.forceStream) {
970
+ emitter.pushEntry(entry);
971
+ }
972
+ else {
973
+ emitter.emitEntry(entry);
974
+ emitter.pushEntryIfPiped(entry);
975
+ }
976
+ if (opts.verbose) {
977
+ console.log({
978
+ filename: fileName,
979
+ vars: vars,
980
+ extraFields: entry.extraFields
981
+ });
982
+ }
983
+ // Small file optimization: use sync decompression if:
984
+ // 1. Entry sizes are trusted (no data descriptor)
985
+ // 2. File size is known and below threshold
986
+ // 3. inflateRawSync is provided
987
+ // 4. File needs decompression (compressionMethod != 0)
988
+ // 5. Not autodraining
989
+ //
990
+ // We require BOTH compressedSize and uncompressedSize <= thresholdBytes.
991
+ // This prevents materializing large highly-compressible files in memory,
992
+ // which can cause massive peak RSS and negate streaming backpressure.
993
+ const sizesTrusted = !hasDataDescriptorFlag(vars.flags);
994
+ const compressedSize = vars.compressedSize || 0;
995
+ const uncompressedSize = vars.uncompressedSize || 0;
996
+ const useSmallFileOptimization = sizesTrusted &&
997
+ fileSizeKnown &&
998
+ inflateRawSync &&
999
+ vars.compressionMethod !== 0 &&
1000
+ !autodraining &&
1001
+ compressedSize <= thresholdBytes &&
1002
+ uncompressedSize <= thresholdBytes;
1003
+ if (useSmallFileOptimization) {
1004
+ // Read compressed data directly and decompress synchronously
1005
+ const compressedData = await io.pull(compressedSize);
1006
+ const decompressedData = inflateRawSync(compressedData);
1007
+ entry.end(decompressedData);
1008
+ // Wait for entry stream write to complete (not for read/consume)
1009
+ await finished(entry, { readable: false });
1010
+ return;
1011
+ }
1012
+ const inflater = vars.compressionMethod && !autodraining
1013
+ ? inflateFactory()
1014
+ : new PassThrough({ highWaterMark: DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK });
1015
+ if (fileSizeKnown) {
1016
+ await pumpKnownCompressedSizeToEntry(io, inflater, entry, vars.compressedSize || 0);
1017
+ return;
1018
+ }
1019
+ await pipeline(io.streamUntilDataDescriptor(), inflater, entry);
1020
+ const dd = await readDataDescriptor(async (l) => io.pull(l));
1021
+ entry.size = dd.uncompressedSize || 0;
1022
+ }