@ismail-elkorchi/bytefold 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (314) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +48 -0
  3. package/SPEC.md +285 -0
  4. package/dist/abort.d.ts +3 -0
  5. package/dist/abort.d.ts.map +1 -0
  6. package/dist/abort.js +33 -0
  7. package/dist/abort.js.map +1 -0
  8. package/dist/archive/errors.d.ts +34 -0
  9. package/dist/archive/errors.d.ts.map +1 -0
  10. package/dist/archive/errors.js +45 -0
  11. package/dist/archive/errors.js.map +1 -0
  12. package/dist/archive/httpArchiveErrors.d.ts +2 -0
  13. package/dist/archive/httpArchiveErrors.d.ts.map +1 -0
  14. package/dist/archive/httpArchiveErrors.js +25 -0
  15. package/dist/archive/httpArchiveErrors.js.map +1 -0
  16. package/dist/archive/index.d.ts +47 -0
  17. package/dist/archive/index.d.ts.map +1 -0
  18. package/dist/archive/index.js +1490 -0
  19. package/dist/archive/index.js.map +1 -0
  20. package/dist/archive/types.d.ts +91 -0
  21. package/dist/archive/types.d.ts.map +1 -0
  22. package/dist/archive/types.js +2 -0
  23. package/dist/archive/types.js.map +1 -0
  24. package/dist/archive/xzPreflight.d.ts +13 -0
  25. package/dist/archive/xzPreflight.d.ts.map +1 -0
  26. package/dist/archive/xzPreflight.js +44 -0
  27. package/dist/archive/xzPreflight.js.map +1 -0
  28. package/dist/archive/zipPreflight.d.ts +18 -0
  29. package/dist/archive/zipPreflight.d.ts.map +1 -0
  30. package/dist/archive/zipPreflight.js +50 -0
  31. package/dist/archive/zipPreflight.js.map +1 -0
  32. package/dist/binary.d.ts +12 -0
  33. package/dist/binary.d.ts.map +1 -0
  34. package/dist/binary.js +59 -0
  35. package/dist/binary.js.map +1 -0
  36. package/dist/bun/index.d.ts +19 -0
  37. package/dist/bun/index.d.ts.map +1 -0
  38. package/dist/bun/index.js +427 -0
  39. package/dist/bun/index.js.map +1 -0
  40. package/dist/compress/errors.d.ts +30 -0
  41. package/dist/compress/errors.d.ts.map +1 -0
  42. package/dist/compress/errors.js +40 -0
  43. package/dist/compress/errors.js.map +1 -0
  44. package/dist/compress/index.d.ts +12 -0
  45. package/dist/compress/index.d.ts.map +1 -0
  46. package/dist/compress/index.js +339 -0
  47. package/dist/compress/index.js.map +1 -0
  48. package/dist/compress/types.d.ts +41 -0
  49. package/dist/compress/types.d.ts.map +1 -0
  50. package/dist/compress/types.js +2 -0
  51. package/dist/compress/types.js.map +1 -0
  52. package/dist/compression/bzip2.d.ts +9 -0
  53. package/dist/compression/bzip2.d.ts.map +1 -0
  54. package/dist/compression/bzip2.js +546 -0
  55. package/dist/compression/bzip2.js.map +1 -0
  56. package/dist/compression/codecs.d.ts +6 -0
  57. package/dist/compression/codecs.d.ts.map +1 -0
  58. package/dist/compression/codecs.js +82 -0
  59. package/dist/compression/codecs.js.map +1 -0
  60. package/dist/compression/deflate64.d.ts +3 -0
  61. package/dist/compression/deflate64.d.ts.map +1 -0
  62. package/dist/compression/deflate64.js +549 -0
  63. package/dist/compression/deflate64.js.map +1 -0
  64. package/dist/compression/node-backend.d.ts +9 -0
  65. package/dist/compression/node-backend.d.ts.map +1 -0
  66. package/dist/compression/node-backend.js +103 -0
  67. package/dist/compression/node-backend.js.map +1 -0
  68. package/dist/compression/registry.d.ts +10 -0
  69. package/dist/compression/registry.d.ts.map +1 -0
  70. package/dist/compression/registry.js +30 -0
  71. package/dist/compression/registry.js.map +1 -0
  72. package/dist/compression/streams.d.ts +31 -0
  73. package/dist/compression/streams.d.ts.map +1 -0
  74. package/dist/compression/streams.js +147 -0
  75. package/dist/compression/streams.js.map +1 -0
  76. package/dist/compression/types.d.ts +19 -0
  77. package/dist/compression/types.d.ts.map +1 -0
  78. package/dist/compression/types.js +2 -0
  79. package/dist/compression/types.js.map +1 -0
  80. package/dist/compression/xz.d.ts +21 -0
  81. package/dist/compression/xz.d.ts.map +1 -0
  82. package/dist/compression/xz.js +1455 -0
  83. package/dist/compression/xz.js.map +1 -0
  84. package/dist/compression/xzFilters.d.ts +14 -0
  85. package/dist/compression/xzFilters.d.ts.map +1 -0
  86. package/dist/compression/xzFilters.js +736 -0
  87. package/dist/compression/xzFilters.js.map +1 -0
  88. package/dist/compression/xzIndexPreflight.d.ts +20 -0
  89. package/dist/compression/xzIndexPreflight.d.ts.map +1 -0
  90. package/dist/compression/xzIndexPreflight.js +371 -0
  91. package/dist/compression/xzIndexPreflight.js.map +1 -0
  92. package/dist/compression/xzScan.d.ts +15 -0
  93. package/dist/compression/xzScan.d.ts.map +1 -0
  94. package/dist/compression/xzScan.js +310 -0
  95. package/dist/compression/xzScan.js.map +1 -0
  96. package/dist/cp437.d.ts +2 -0
  97. package/dist/cp437.d.ts.map +1 -0
  98. package/dist/cp437.js +31 -0
  99. package/dist/cp437.js.map +1 -0
  100. package/dist/crc32.d.ts +7 -0
  101. package/dist/crc32.d.ts.map +1 -0
  102. package/dist/crc32.js +37 -0
  103. package/dist/crc32.js.map +1 -0
  104. package/dist/crc64.d.ts +6 -0
  105. package/dist/crc64.d.ts.map +1 -0
  106. package/dist/crc64.js +32 -0
  107. package/dist/crc64.js.map +1 -0
  108. package/dist/crypto/ctr.d.ts +11 -0
  109. package/dist/crypto/ctr.d.ts.map +1 -0
  110. package/dist/crypto/ctr.js +56 -0
  111. package/dist/crypto/ctr.js.map +1 -0
  112. package/dist/crypto/sha256.d.ts +16 -0
  113. package/dist/crypto/sha256.d.ts.map +1 -0
  114. package/dist/crypto/sha256.js +152 -0
  115. package/dist/crypto/sha256.js.map +1 -0
  116. package/dist/crypto/winzip-aes.d.ts +17 -0
  117. package/dist/crypto/winzip-aes.d.ts.map +1 -0
  118. package/dist/crypto/winzip-aes.js +98 -0
  119. package/dist/crypto/winzip-aes.js.map +1 -0
  120. package/dist/crypto/zipcrypto.d.ts +23 -0
  121. package/dist/crypto/zipcrypto.d.ts.map +1 -0
  122. package/dist/crypto/zipcrypto.js +99 -0
  123. package/dist/crypto/zipcrypto.js.map +1 -0
  124. package/dist/deno/index.d.ts +19 -0
  125. package/dist/deno/index.d.ts.map +1 -0
  126. package/dist/deno/index.js +422 -0
  127. package/dist/deno/index.js.map +1 -0
  128. package/dist/dosTime.d.ts +7 -0
  129. package/dist/dosTime.d.ts.map +1 -0
  130. package/dist/dosTime.js +21 -0
  131. package/dist/dosTime.js.map +1 -0
  132. package/dist/errorContext.d.ts +2 -0
  133. package/dist/errorContext.d.ts.map +1 -0
  134. package/dist/errorContext.js +24 -0
  135. package/dist/errorContext.js.map +1 -0
  136. package/dist/errors.d.ts +46 -0
  137. package/dist/errors.d.ts.map +1 -0
  138. package/dist/errors.js +51 -0
  139. package/dist/errors.js.map +1 -0
  140. package/dist/extraFields.d.ts +29 -0
  141. package/dist/extraFields.d.ts.map +1 -0
  142. package/dist/extraFields.js +201 -0
  143. package/dist/extraFields.js.map +1 -0
  144. package/dist/generated/unicodeCaseFolding.d.ts +4 -0
  145. package/dist/generated/unicodeCaseFolding.d.ts.map +1 -0
  146. package/dist/generated/unicodeCaseFolding.js +1594 -0
  147. package/dist/generated/unicodeCaseFolding.js.map +1 -0
  148. package/dist/http/errors.d.ts +26 -0
  149. package/dist/http/errors.d.ts.map +1 -0
  150. package/dist/http/errors.js +33 -0
  151. package/dist/http/errors.js.map +1 -0
  152. package/dist/index.d.ts +10 -0
  153. package/dist/index.d.ts.map +1 -0
  154. package/dist/index.js +7 -0
  155. package/dist/index.js.map +1 -0
  156. package/dist/limits.d.ts +22 -0
  157. package/dist/limits.d.ts.map +1 -0
  158. package/dist/limits.js +39 -0
  159. package/dist/limits.js.map +1 -0
  160. package/dist/node/index.d.ts +13 -0
  161. package/dist/node/index.d.ts.map +1 -0
  162. package/dist/node/index.js +448 -0
  163. package/dist/node/index.js.map +1 -0
  164. package/dist/node/zip/RandomAccess.d.ts +12 -0
  165. package/dist/node/zip/RandomAccess.d.ts.map +1 -0
  166. package/dist/node/zip/RandomAccess.js +38 -0
  167. package/dist/node/zip/RandomAccess.js.map +1 -0
  168. package/dist/node/zip/Sink.d.ts +17 -0
  169. package/dist/node/zip/Sink.d.ts.map +1 -0
  170. package/dist/node/zip/Sink.js +45 -0
  171. package/dist/node/zip/Sink.js.map +1 -0
  172. package/dist/node/zip/ZipReader.d.ts +51 -0
  173. package/dist/node/zip/ZipReader.d.ts.map +1 -0
  174. package/dist/node/zip/ZipReader.js +1540 -0
  175. package/dist/node/zip/ZipReader.js.map +1 -0
  176. package/dist/node/zip/ZipWriter.d.ts +21 -0
  177. package/dist/node/zip/ZipWriter.d.ts.map +1 -0
  178. package/dist/node/zip/ZipWriter.js +196 -0
  179. package/dist/node/zip/ZipWriter.js.map +1 -0
  180. package/dist/node/zip/entryStream.d.ts +22 -0
  181. package/dist/node/zip/entryStream.d.ts.map +1 -0
  182. package/dist/node/zip/entryStream.js +241 -0
  183. package/dist/node/zip/entryStream.js.map +1 -0
  184. package/dist/node/zip/entryWriter.d.ts +54 -0
  185. package/dist/node/zip/entryWriter.d.ts.map +1 -0
  186. package/dist/node/zip/entryWriter.js +512 -0
  187. package/dist/node/zip/entryWriter.js.map +1 -0
  188. package/dist/node/zip/index.d.ts +8 -0
  189. package/dist/node/zip/index.d.ts.map +1 -0
  190. package/dist/node/zip/index.js +5 -0
  191. package/dist/node/zip/index.js.map +1 -0
  192. package/dist/reader/RandomAccess.d.ts +55 -0
  193. package/dist/reader/RandomAccess.d.ts.map +1 -0
  194. package/dist/reader/RandomAccess.js +528 -0
  195. package/dist/reader/RandomAccess.js.map +1 -0
  196. package/dist/reader/ZipReader.d.ts +89 -0
  197. package/dist/reader/ZipReader.d.ts.map +1 -0
  198. package/dist/reader/ZipReader.js +1359 -0
  199. package/dist/reader/ZipReader.js.map +1 -0
  200. package/dist/reader/centralDirectory.d.ts +40 -0
  201. package/dist/reader/centralDirectory.d.ts.map +1 -0
  202. package/dist/reader/centralDirectory.js +311 -0
  203. package/dist/reader/centralDirectory.js.map +1 -0
  204. package/dist/reader/entryStream.d.ts +22 -0
  205. package/dist/reader/entryStream.d.ts.map +1 -0
  206. package/dist/reader/entryStream.js +122 -0
  207. package/dist/reader/entryStream.js.map +1 -0
  208. package/dist/reader/eocd.d.ts +22 -0
  209. package/dist/reader/eocd.d.ts.map +1 -0
  210. package/dist/reader/eocd.js +184 -0
  211. package/dist/reader/eocd.js.map +1 -0
  212. package/dist/reader/httpZipErrors.d.ts +4 -0
  213. package/dist/reader/httpZipErrors.d.ts.map +1 -0
  214. package/dist/reader/httpZipErrors.js +48 -0
  215. package/dist/reader/httpZipErrors.js.map +1 -0
  216. package/dist/reader/localHeader.d.ts +15 -0
  217. package/dist/reader/localHeader.d.ts.map +1 -0
  218. package/dist/reader/localHeader.js +37 -0
  219. package/dist/reader/localHeader.js.map +1 -0
  220. package/dist/reportSchema.d.ts +3 -0
  221. package/dist/reportSchema.d.ts.map +1 -0
  222. package/dist/reportSchema.js +3 -0
  223. package/dist/reportSchema.js.map +1 -0
  224. package/dist/streams/adapters.d.ts +10 -0
  225. package/dist/streams/adapters.d.ts.map +1 -0
  226. package/dist/streams/adapters.js +54 -0
  227. package/dist/streams/adapters.js.map +1 -0
  228. package/dist/streams/buffer.d.ts +5 -0
  229. package/dist/streams/buffer.d.ts.map +1 -0
  230. package/dist/streams/buffer.js +44 -0
  231. package/dist/streams/buffer.js.map +1 -0
  232. package/dist/streams/crcTransform.d.ts +15 -0
  233. package/dist/streams/crcTransform.d.ts.map +1 -0
  234. package/dist/streams/crcTransform.js +30 -0
  235. package/dist/streams/crcTransform.js.map +1 -0
  236. package/dist/streams/emit.d.ts +7 -0
  237. package/dist/streams/emit.d.ts.map +1 -0
  238. package/dist/streams/emit.js +13 -0
  239. package/dist/streams/emit.js.map +1 -0
  240. package/dist/streams/limits.d.ts +16 -0
  241. package/dist/streams/limits.d.ts.map +1 -0
  242. package/dist/streams/limits.js +39 -0
  243. package/dist/streams/limits.js.map +1 -0
  244. package/dist/streams/measure.d.ts +5 -0
  245. package/dist/streams/measure.d.ts.map +1 -0
  246. package/dist/streams/measure.js +9 -0
  247. package/dist/streams/measure.js.map +1 -0
  248. package/dist/streams/progress.d.ts +8 -0
  249. package/dist/streams/progress.d.ts.map +1 -0
  250. package/dist/streams/progress.js +69 -0
  251. package/dist/streams/progress.js.map +1 -0
  252. package/dist/streams/web.d.ts +5 -0
  253. package/dist/streams/web.d.ts.map +1 -0
  254. package/dist/streams/web.js +33 -0
  255. package/dist/streams/web.js.map +1 -0
  256. package/dist/tar/TarReader.d.ts +41 -0
  257. package/dist/tar/TarReader.d.ts.map +1 -0
  258. package/dist/tar/TarReader.js +930 -0
  259. package/dist/tar/TarReader.js.map +1 -0
  260. package/dist/tar/TarWriter.d.ts +25 -0
  261. package/dist/tar/TarWriter.d.ts.map +1 -0
  262. package/dist/tar/TarWriter.js +307 -0
  263. package/dist/tar/TarWriter.js.map +1 -0
  264. package/dist/tar/index.d.ts +4 -0
  265. package/dist/tar/index.d.ts.map +1 -0
  266. package/dist/tar/index.js +3 -0
  267. package/dist/tar/index.js.map +1 -0
  268. package/dist/tar/types.d.ts +67 -0
  269. package/dist/tar/types.d.ts.map +1 -0
  270. package/dist/tar/types.js +2 -0
  271. package/dist/tar/types.js.map +1 -0
  272. package/dist/text/caseFold.d.ts +7 -0
  273. package/dist/text/caseFold.d.ts.map +1 -0
  274. package/dist/text/caseFold.js +45 -0
  275. package/dist/text/caseFold.js.map +1 -0
  276. package/dist/types.d.ts +190 -0
  277. package/dist/types.d.ts.map +1 -0
  278. package/dist/types.js +2 -0
  279. package/dist/types.js.map +1 -0
  280. package/dist/web/index.d.ts +11 -0
  281. package/dist/web/index.d.ts.map +1 -0
  282. package/dist/web/index.js +95 -0
  283. package/dist/web/index.js.map +1 -0
  284. package/dist/writer/Sink.d.ts +21 -0
  285. package/dist/writer/Sink.d.ts.map +1 -0
  286. package/dist/writer/Sink.js +24 -0
  287. package/dist/writer/Sink.js.map +1 -0
  288. package/dist/writer/ZipWriter.d.ts +27 -0
  289. package/dist/writer/ZipWriter.d.ts.map +1 -0
  290. package/dist/writer/ZipWriter.js +153 -0
  291. package/dist/writer/ZipWriter.js.map +1 -0
  292. package/dist/writer/centralDirectoryWriter.d.ts +8 -0
  293. package/dist/writer/centralDirectoryWriter.d.ts.map +1 -0
  294. package/dist/writer/centralDirectoryWriter.js +77 -0
  295. package/dist/writer/centralDirectoryWriter.js.map +1 -0
  296. package/dist/writer/entryWriter.d.ts +54 -0
  297. package/dist/writer/entryWriter.d.ts.map +1 -0
  298. package/dist/writer/entryWriter.js +327 -0
  299. package/dist/writer/entryWriter.js.map +1 -0
  300. package/dist/writer/finalize.d.ts +10 -0
  301. package/dist/writer/finalize.d.ts.map +1 -0
  302. package/dist/writer/finalize.js +56 -0
  303. package/dist/writer/finalize.js.map +1 -0
  304. package/dist/zip/index.d.ts +8 -0
  305. package/dist/zip/index.d.ts.map +1 -0
  306. package/dist/zip/index.js +5 -0
  307. package/dist/zip/index.js.map +1 -0
  308. package/jsr.json +41 -0
  309. package/package.json +117 -0
  310. package/schemas/audit-report.schema.json +38 -0
  311. package/schemas/capabilities-report.schema.json +25 -0
  312. package/schemas/detection-report.schema.json +23 -0
  313. package/schemas/error.schema.json +22 -0
  314. package/schemas/normalize-report.schema.json +47 -0
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Ismail El Korchi
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,48 @@
1
+ ---
2
+ role: overview
3
+ audience: users
4
+ source_of_truth: README.md
5
+ update_triggers:
6
+ - public API changes
7
+ - supported formats or codecs
8
+ ---
9
+
10
+ # bytefold
11
+
12
+ Multi-format archive reader/writer for Node 24+, Deno, Bun, and Web (Browser). ESM-only, TypeScript strict, no runtime dependencies (tests: `test/repo-invariants.test.ts`).
13
+
14
+ ## Install
15
+
16
+ ```sh
17
+ npm install @ismail-elkorchi/bytefold
18
+ ```
19
+
20
+ ## Quickstart (auto-detect)
21
+
22
+ ```js
23
+ import { openArchive } from '@ismail-elkorchi/bytefold';
24
+
25
+ const reader = await openArchive(bytesOrStream, { profile: 'agent' });
26
+ const report = await reader.audit({ profile: 'agent' });
27
+ console.log(JSON.stringify(report));
28
+
29
+ await reader.assertSafe({ profile: 'agent' });
30
+ for await (const entry of reader.entries()) {
31
+ if (entry.isDirectory) continue;
32
+ const data = await new Response(await entry.open()).arrayBuffer();
33
+ console.log(entry.name, data.byteLength);
34
+ }
35
+ ```
36
+
37
+ Support matrix: see `SPEC.md` (Support matrix section).
38
+
39
+ Web runtime entrypoint: `@ismail-elkorchi/bytefold/web` (HTTPS URL input only; full-fetch by design; no seekable HTTP range sessions in web adapter).
40
+
41
+ ## Verification
42
+ `npm run check`
43
+
44
+ ## Docs
45
+ - `SPEC.md` (invariants, API entrypoints, error/report model)
46
+ - `ARCHITECTURE.md` (module map and data flow)
47
+ - `SECURITY.md` (threat model and reporting)
48
+ - `CONTRIBUTING.md`, `CHANGELOG.md`
package/SPEC.md ADDED
@@ -0,0 +1,285 @@
1
+ ---
2
+ role: spec
3
+ audience: maintainers, agents, users
4
+ source_of_truth: SPEC.md
5
+ update_triggers:
6
+ - public API changes
7
+ - new invariants or error codes
8
+ - new formats, codecs, or filters
9
+ ---
10
+
11
+ # SPEC
12
+
13
+ ## Domain model (conceptual)
14
+ - Archive: a container of entries (files, directories, links) that can be opened, audited, normalized, and extracted.
15
+ - Compression: byte-level transforms for entry payloads or whole archives (e.g., deflate, zstd, xz).
16
+ - Audit: validation pass that reports issues without mutating data.
17
+ - Normalize: deterministic rewrite that fixes or flags issues according to a profile.
18
+
19
+ ## Public API entrypoints
20
+ Snapshot enforced by `test/export-surface.test.ts` and `test/support-matrix.test.ts`.
21
+ ### npm (package.json exports)
22
+ - `@ismail-elkorchi/bytefold`
23
+ - `@ismail-elkorchi/bytefold/archive`
24
+ - `@ismail-elkorchi/bytefold/compress`
25
+ - `@ismail-elkorchi/bytefold/zip`
26
+ - `@ismail-elkorchi/bytefold/tar`
27
+ - `@ismail-elkorchi/bytefold/node`
28
+ - `@ismail-elkorchi/bytefold/node/zip`
29
+ - `@ismail-elkorchi/bytefold/deno`
30
+ - `@ismail-elkorchi/bytefold/bun`
31
+ - `@ismail-elkorchi/bytefold/web`
32
+
33
+ ### jsr (jsr.json exports)
34
+ - `@ismail-elkorchi/bytefold`
35
+ - `@ismail-elkorchi/bytefold/archive`
36
+ - `@ismail-elkorchi/bytefold/compress`
37
+ - `@ismail-elkorchi/bytefold/zip`
38
+ - `@ismail-elkorchi/bytefold/tar`
39
+ - `@ismail-elkorchi/bytefold/deno`
40
+ - `@ismail-elkorchi/bytefold/bun`
41
+ - `@ismail-elkorchi/bytefold/web`
42
+
43
+ ## Invariants (test-linked)
44
+ 1. Runtime dependencies count is zero; package is ESM-only and requires Node >= 24. (tests: `test/repo-invariants.test.ts`)
45
+ 2. TypeScript strict mode remains enabled. (tests: `test/repo-invariants.test.ts`)
46
+ 3. Default entrypoints do not import `node:*` at module evaluation. (tests: `test/repo-invariants.test.ts`)
47
+ 4. Emitted `Uint8Array` chunks are immutable after enqueue, including under chunking adversary inputs. (tests: `test/xz-aliasing.test.ts`, `test/deflate64-aliasing.test.ts`, `test/streaming-invariance.test.ts`)
48
+ 5. Streaming decompression is invariant to input chunking for gzip, bzip2, and xz (including BCJ). (tests: `test/streaming-invariance.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
49
+ 6. Mutation harness applies deterministic byte-level mutations across bytes/stream/file/url boundaries for zip/tar/gz/bz2/xz inputs; failures surface typed errors with schema-valid `toJSON()`, successes return schema-valid audits. (tests: `test/mutation-harness.test.ts`)
50
+ 7. Third-party ZIP and TAR fixtures open, list, and audit cleanly with provenance + size bounds. (tests: `test/zip-tar-thirdparty.test.ts`, `test/third-party-provenance.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
51
+ 8. Seekable ZIP preflight enforces EOCD/central-directory ceilings before full buffering, rejects multi-disk archives, and stays bounded in HTTP range requests/bytes. (tests: `test/zip-seekable-preflight.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
52
+ 9. Seekable ZIP over HTTP Range is used for list + single-entry extract and stays bounded without full downloads when Range is supported (tests assert total bytes ≤ ceil(size/16) and request count ≤ 1 + ceil((size/16)/64KiB) + 2). (tests: `test/zip-url-seekable-budget.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
53
+ 10. HTTP Range sessions pin validators; If-Range is sent only with strong ETags (never weak ETag/Last-Modified), 200 responses to ranged requests with If-Range are treated as resource changed, and Content-Encoding responses are rejected. Snapshot policy require-strong-etag fails without a strong validator, while best-effort proceeds without If-Range. (specs: `specs/http/rfc9110-if-range.md`, `specs/http/rfc9110-accept-encoding.md`; tests: `test/zip-url-seekable-budget.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
54
+ 11. HTTP header-only range failures fail fast by aborting request bodies before payload consumption; slow-body adversarial servers remain bounded (`<= 4096` bytes served) for range-unsupported and content-encoding rejection paths. (tests: `test/zip-url-seekable-budget.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
55
+ 12. HTTP 206 bodies must match the requested range length exactly; truncated or overrun bodies fail with typed bad-response errors. (tests: `test/zip-url-seekable-budget.test.ts`, `test/bun.smoke.ts`, `test/deno.smoke.ts`)
56
+ 13. Gzip header options (FEXTRA/FNAME/FCOMMENT) are parsed; FNAME yields the entry name even when extra fields are present, consistently across runtimes. (tests: `test/gzip-header-options.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
57
+ 14. Gzip FHCRC headers are validated when present; mismatches throw typed errors. (tests: `test/gzip-fhcrc.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
58
+ 15. Decompression output ceilings enforce `maxTotalDecompressedBytes` for gzip/deflate/brotli/zstd and fail with `COMPRESSION_RESOURCE_LIMIT` without emitting beyond the limit. (tests: `test/compression-resource-limits.test.ts`)
59
+ 16. XZ Index VLI parsing is chunk-boundary safe, including multi-byte uncompressed sizes. (tests: `test/xz-vli-boundaries.test.ts`, `test/streaming-invariance.test.ts`)
60
+ 17. XZ streaming decode supports padding + concatenation and validates checks (none/CRC32/CRC64/SHA-256). (tests: `test/xz-utils-conformance.test.ts`, `test/xz-bcj-filters.test.ts`)
61
+ 18. XZ supports filters LZMA2, Delta, BCJ x86/PowerPC/IA64/ARM/ARM-Thumb/SPARC/ARM64/RISC-V; unsupported filters/checks throw typed errors. (tests: `test/xz-utils-conformance.test.ts`, `test/xz-bcj-filters.test.ts`, `test/xz-thirdparty.test.ts`)
62
+ 19. Mixed XZ filter chains (Delta -> BCJ -> LZMA2) decode correctly with filters applied in reverse order. (tests: `test/xz-mixed-filters.test.ts`, `test/xz-seekable-preflight.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
63
+ 20. XZ BCJ filters enforce property sizing/alignment and cannot be the last filter. Size-changing filters cannot be non-last. (tests: `test/xz-bcj-filters.test.ts`)
64
+ 21. XZ BCJ start offset properties are stream-relative and reset at each concatenated stream. Properties are honored per block. (tests: `test/xz-bcj-filters.test.ts`, `test/xz-concat-bcj.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
65
+ 22. XZ corruption yields typed errors and extraction is atomic for corrupted streams. (tests: `test/xz.test.ts`, `test/xz-utils-conformance.test.ts`)
66
+ 23. XZ resource ceilings are enforced (buffer + dictionary + Index limits). (tests: `test/xz-utils-conformance.test.ts`, `test/xz-index-limits.test.ts`)
67
+ 24. XZ preflight scanning bounds Index parsing without per-record allocations. (tests: `test/xz-index-limits.test.ts`)
68
+ 25. Seekable XZ preflight enforces Index limits before full buffering. (tests: `test/xz-seekable-preflight.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
69
+ 26. Seekable XZ preflight enforces dictionary limits before full buffering (bounded block-header scan). (tests: `test/xz-seekable-preflight.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
70
+ 27. Seekable XZ preflight HTTP failures map to specific `ARCHIVE_HTTP_*` codes with `context.httpCode` preserving the originating HTTP failure class. (tests: `test/xz-http-error-mapping.test.ts`, `test/xz-seekable-preflight.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
71
+ 28. Seekable XZ preflight success path is bounded in HTTP range requests/bytes and reports incomplete scans when block header limits are exceeded without blocking decode. (tests: `test/xz-seekable-preflight.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
72
+ 29. Report + error JSON schemas are versioned and validated. (tests: `test/schema-contracts.test.ts`, `test/json-safety.test.ts`)
73
+ 30. Public export surface matches the manifest snapshot. (tests: `test/export-surface.test.ts`)
74
+ 31. TypeScript declaration surface for every npm/jsr public entrypoint matches the committed snapshot manifest; intentional surface breaks in ALPHA require snapshot updates and explicit CHANGELOG entries, and V1+ will introduce stricter migration discipline. (tests: `test/type-surface.test.ts`)
75
+ 32. npm pack payload obeys allowlist/denylist policy: runtime artifacts plus contract metadata (`SPEC.md`) and JSON schemas (`schemas/*.json`) only; repo-internal indexes (`docs/REPO_INDEX.*`) and internal sources are excluded. (tests: `test/packaging-contract.test.ts`, `scripts/verify-pack.mjs`)
76
+ 33. Normalize safe mode is deterministic and lossless mode preserves bytes where documented. (zip/tar idempotent.) (tests: `test/normalize.test.ts`, `test/audit-normalize-proof.test.ts`)
77
+ 34. `extractAll` blocks path traversal. Deterministic ZIP/TAR adversarial corpora also surface traversal as audit failures (`ZIP_PATH_TRAVERSAL` / `TAR_PATH_TRAVERSAL`) and fail `assertSafe` under the agent profile. (tests: `test/zip.test.ts`, `test/security-audit-simulation.test.ts`)
78
+ 35. `openArchive` auto-detects documented formats; tar.br requires an explicit hint. (tests: `test/archive.test.ts`, `test/bzip2.test.ts`, `test/xz.test.ts`, `test/tar-xz.test.ts`)
79
+ 36. Context index artifacts are deterministic and bounded: `npm run context:index` produces `docs/REPO_INDEX.md` (<= 250 KiB) plus `docs/REPO_INDEX.md.sha256` with stable sorting and no timestamps. (tests: `test/context-tools.test.ts`)
80
+ 37. Error JSON `context` never shadows top-level keys (`schemaVersion`, `name`, `code`, `message`, `hint`, `context`, plus top-level optionals such as `entryName`, `method`, `offset`, `algorithm`). (tests: `test/error-contracts.test.ts`, `test/error-json-ambiguity.test.ts`, `test/schema-contracts.test.ts`)
81
+ 38. Profile/limits precedence is deterministic across readers and decompressor setup: profile selects defaults, explicit `limits` override only provided fields, and explicit decompressor scalar limits override `limits` for matching knobs. (tests: `test/option-precedence.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
82
+ 39. openArchive(...) accepts Blob/File inputs (inputKind: "blob"); ZIP on Blob is seekable via random access (slice reads) while non-ZIP Blob inputs are bounded by input limits. (tests: `test/archive.test.ts`, `test/web-adapter.test.ts`, `test/schema-contracts.test.ts`)
83
+ 40. Web adapter URL inputs (@ismail-elkorchi/bytefold/web) are HTTPS-only and always full-fetch bytes (no HTTP range sessions); the fetch path enforces input-size limits and preserves inputKind: "url". (tests: `test/web-adapter.test.ts`, `test/security-audit-simulation.test.ts`)
84
+ 41. Browser-facing entrypoint stays web-bundle safe: `npm run web:check` bundles `web/mod.ts` for `platform=browser`, rejects `node:*` imports, and is deterministic across runs. (tests: `test/web-check.test.ts`, `test/repo-invariants.test.ts`)
85
+ 42. Web entrypoint writer roundtrips are contract-backed: ZIP (store-only) and TAR archives written to Web `WritableStream` sinks can be reopened from Blob via `openArchive(...)`, preserve entry names/bytes, and remain safe under `audit` + deterministic `normalizeToWritable`. (tests: `test/web-writer-roundtrip.test.ts`)
86
+ 43. TAR octal parsing uses null-terminated UTF-8 decoding without regex backtracking and preserves legacy truncation semantics on representative + adversarial long inputs. (tests: `test/null-terminated-utf8.test.ts`, `test/archive.test.ts`, `test/tar-xz.test.ts`)
87
+ 44. XZ fixture expectations avoid committed ELF binary outputs by pinning deterministic digest/size assertions for BCJ payload verification. (tests: `test/xz-utils-conformance.test.ts`, `test/xz-thirdparty.test.ts`)
88
+ 45. Web adapter URL full-fetch overflow paths are fail-fast and bounded: `maxInputBytes` over-limit responses reject with `RangeError`, cancel slow response streams before full transfer, and never use HTTP Range requests. (tests: `test/web-adapter.test.ts`)
89
+ 46. Browser smoke verifies web entrypoint behavior in real Chromium/Firefox/WebKit: Blob ZIP roundtrip, ZIP/TAR writer roundtrip, and URL `maxInputBytes` abort remains bounded without HTTP Range requests. (tests: `test/browser/web-entrypoint.pw.ts`)
90
+ 47. Security-sensitive + third-party fixture bytes are pinned in `test/fixtures/security-fixture-hashes.json`; `npm run check` fails on missing, unexpected, or changed hashes with explicit diff output. (tests: `test/fixture-hash-manifest.test.ts`, command: `npm run fixtures:hashes:check`)
91
+ 48. `npm run format:check` ignores Playwright artifact directories (`test-results/`, `playwright-report/`) so browser smoke artifacts cannot cause false formatting failures in `npm run check`. (tests: `test/format-script.test.ts`)
92
+ 49. Web adapter URL inputs reject non-HTTPS schemes (including `http:`) with typed `ARCHIVE_UNSUPPORTED_FEATURE` before any fetch attempt. (tests: `test/security-audit-simulation.test.ts`)
93
+ 50. Zip64 boundary parsing is deterministic around 32-bit/64-bit limits: EOCD sentinel combinations requiring Zip64 must provide valid locator/record structures, malformed Zip64 extra fields reject with typed `ZIP_BAD_ZIP64`, and >4GiB central-directory offsets are never truncated to 32-bit values. (tests: `test/zip64-boundary.test.ts`)
94
+ 51. Deterministic property tests harden parser boundaries: TAR octal/NUL/space numeric fields, ZIP EOCD comment-length mutations, gzip optional header sections, and web URL `maxInputBytes` abort paths are fuzzed with fixed seeds and bounded runs. (tests: `test/fuzz-property-boundaries.test.ts`)
95
+ 52. Unicode Trojan Source directionality controls are blocked by repository scanning: tracked text files must not contain bidi override/isolation control code points, and violations fail `npm run check` with file/line/codepoint diagnostics. (tests: `test/unicode-safety-check.test.ts`)
96
+ 53. ZIP writer forced-ZIP64 mode is structurally explicit and typed on corruption: emitted archives include ZIP64 EOCD + locator + ZIP64 central-directory extra fields even for small payloads, remain readable by the ZIP reader, and malformed ZIP64 locator paths reject with typed `ZIP_BAD_ZIP64` (never untyped `RangeError`). (tests: `test/zip64-writer-structural.test.ts`)
97
+ 54. Changelog release-truth guard is deterministic: `## Unreleased` must cover core post-release themes (web URL hardening, security simulation corpus, browser smoke scope, zip64 proofs, fixture-hash enforcement, property tests, unicode guard), and missing themes fail `npm run check`. (tests: `test/changelog-unreleased-coverage.test.ts`)
98
+
99
+ ## Gzip support details
100
+ - Header CRC (FHCRC) is validated per RFC 1952 (`https://www.rfc-editor.org/rfc/rfc1952`). (tests: `test/gzip-fhcrc.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
101
+
102
+ ## XZ support details
103
+ - Checks supported: none (0x00), CRC32 (0x01), CRC64 (0x04), SHA-256 (0x0A). (tests: `test/xz-utils-conformance.test.ts`, `test/xz-bcj-filters.test.ts`, `test/xz-thirdparty.test.ts`)
104
+ - Filters supported: LZMA2 (0x21), Delta (0x03), BCJ x86 (0x04), PowerPC (0x05), IA64 (0x06), ARM (0x07), ARM-Thumb (0x08), SPARC (0x09), ARM64 (0x0A), RISC-V (0x0B). (tests: `test/xz-utils-conformance.test.ts`, `test/xz-bcj-filters.test.ts`, `test/xz-thirdparty.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
105
+ - BCJ filter properties are size 0 or 4 bytes with alignment enforced; BCJ filters cannot be last. (tests: `test/xz-bcj-filters.test.ts`)
106
+ - Mixed chains (Delta -> BCJ -> LZMA2) are supported; non-LZMA2 filters apply in reverse order during decode. (tests: `test/xz-mixed-filters.test.ts`)
107
+ - BCJ start offsets are interpreted per stream (reset at concatenation) and applied as provided in filter properties. (tests: `test/xz-bcj-filters.test.ts`, `test/xz-concat-bcj.test.ts`)
108
+
109
+ ## XZ Filter IDs — Sources of Truth
110
+ - Filter IDs 0x04..0x09 are grounded in `specs/xz-file-format.txt` (Filter IDs table).
111
+ - Filter IDs 0x0A (ARM64) and 0x0B (RISC-V) are grounded in `specs/xz-filter-ids.md`.
112
+
113
+ ## Support matrix (format × operation × runtime)
114
+ Legend: ✅ supported · ❌ unsupported (error code in cell) · ⚠️ explicit hint required · 🟦 capability-gated (throws `COMPRESSION_UNSUPPORTED_ALGORITHM` when missing).
115
+
116
+ ### Node (>=24)
117
+ | Format | Detect | List | Audit | Extract | Normalize | Write |
118
+ | --- | --- | --- | --- | --- | --- | --- |
119
+ | zip | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
120
+ | tar | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
121
+ | tgz / tar.gz | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
122
+ | gz | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ✅ |
123
+ | tar.bz2 | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
124
+ | bz2 | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
125
+ | tar.xz | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
126
+ | xz | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
127
+ | tar.zst | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
128
+ | zst | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ✅ |
129
+ | tar.br | ⚠️ (format: `tar.br` or filename) | ✅ | ✅ | ✅ | ✅ | ✅ |
130
+ | br | ⚠️ (format: `br` or filename) | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ✅ |
131
+
132
+ ### Deno
133
+ | Format | Detect | List | Audit | Extract | Normalize | Write |
134
+ | --- | --- | --- | --- | --- | --- | --- |
135
+ | zip | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
136
+ | tar | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
137
+ | tgz / tar.gz | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
138
+ | gz | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ✅ |
139
+ | tar.bz2 | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
140
+ | bz2 | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
141
+ | tar.xz | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
142
+ | xz | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
143
+ | tar.zst | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) |
144
+ | zst | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) |
145
+ | tar.br | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) |
146
+ | br | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) | ❌ (`COMPRESSION_UNSUPPORTED_ALGORITHM`) |
147
+
148
+ ### Bun
149
+ | Format | Detect | List | Audit | Extract | Normalize | Write |
150
+ | --- | --- | --- | --- | --- | --- | --- |
151
+ | zip | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
152
+ | tar | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
153
+ | tgz / tar.gz | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
154
+ | gz | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ✅ |
155
+ | tar.bz2 | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
156
+ | bz2 | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
157
+ | tar.xz | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
158
+ | xz | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
159
+ | tar.zst | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
160
+ | zst | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ✅ |
161
+ | tar.br | ⚠️ (format: `tar.br` or filename) | ✅ | ✅ | ✅ | ✅ | ✅ |
162
+ | br | ⚠️ (format: `br` or filename) | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ✅ |
163
+
164
+ ### Web (Browser)
165
+ | Format | Detect | List | Audit | Extract | Normalize | Write |
166
+ | --- | --- | --- | --- | --- | --- | --- |
167
+ | zip | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
168
+ | tar | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
169
+ | tgz / tar.gz | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
170
+ | gz | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ✅ |
171
+ | tar.bz2 | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
172
+ | bz2 | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
173
+ | tar.xz | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
174
+ | xz | ✅ | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | ❌ (`ARCHIVE_UNSUPPORTED_FORMAT`) |
175
+ | tar.zst | 🟦 | 🟦 | 🟦 | 🟦 | 🟦 | 🟦 |
176
+ | zst | 🟦 | 🟦 | 🟦 | 🟦 | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | 🟦 |
177
+ | tar.br | ⚠️ (format: `tar.br` or filename; 🟦 on runtimes without brotli streams) | ✅ | ✅ | ✅ | ✅ | 🟦 |
178
+ | br | ⚠️ (format: `br` or filename; 🟦 on runtimes without brotli streams) | ✅ | ✅ | ✅ | ❌ (`ARCHIVE_UNSUPPORTED_FEATURE`) | 🟦 |
179
+
180
+ Matrix proofs: `test/archive.test.ts`, `test/bun.smoke.ts`, `test/deno.smoke.ts`, `test/xz.test.ts`, `test/bzip2.test.ts`, `test/tar-xz.test.ts`, `test/single-file-formats.test.ts`, `test/archive-writer-proof.test.ts`, `test/audit-normalize-proof.test.ts`, `test/support-matrix-behavior.test.ts`, `test/support-matrix.test.ts`, `test/web-adapter.test.ts`, `test/web-writer-roundtrip.test.ts`.
181
+ Write proofs: `test/archive-writer-proof.test.ts`, `test/archive.test.ts`, `test/bun.smoke.ts`, `test/deno.smoke.ts`, `test/web-writer-roundtrip.test.ts`.
182
+ Write-negative proofs: `test/archive-writer-proof.test.ts`, `test/deno.smoke.ts`.
183
+
184
+ ```json support-matrix
185
+ {
186
+ "formats": [
187
+ "zip",
188
+ "tar",
189
+ "tgz",
190
+ "tar.gz",
191
+ "gz",
192
+ "bz2",
193
+ "tar.bz2",
194
+ "zst",
195
+ "tar.zst",
196
+ "br",
197
+ "tar.br",
198
+ "xz",
199
+ "tar.xz"
200
+ ],
201
+ "operations": ["detect", "list", "audit", "extract", "normalize", "write"],
202
+ "runtimes": ["node", "deno", "bun", "web"]
203
+ }
204
+ ```
205
+
206
+ ## Web runtime notes
207
+ - Entry point: `@ismail-elkorchi/bytefold/web` / `./web`.
208
+ - Supported input kinds in the web adapter: `Uint8Array`, `ArrayBuffer`, `ReadableStream<Uint8Array>`, `Blob`/`File`, and HTTPS URL.
209
+ - URL behavior in web adapter: only HTTPS URLs are accepted; responses are always full-fetched before archive detection/opening, and no seekable HTTP range session is attempted in web adapter by design. `maxInputBytes` is enforced both from `Content-Length` and during streaming reads, with over-limit chunked/slow responses canceled before full transfer. (tests: `test/web-adapter.test.ts`, `test/security-audit-simulation.test.ts`)
210
+ - ZIP on Blob uses seekable random access (`blob.slice(start, end).arrayBuffer()`), so listing/extracting ZIP from Blob stays bounded by seek budget and avoids full Blob buffering. (tests: `test/web-adapter.test.ts`)
211
+ - Web write roundtrip contract: ZIP (store-only mode) and TAR can be created through the web entrypoint into pure Web `WritableStream` sinks, wrapped in Blob, and reopened with matching entry names/bytes plus safe audit/normalize behavior. (tests: `test/web-writer-roundtrip.test.ts`)
212
+ - Browser credibility contract: Chromium smoke executes the web entrypoint in a real browser and proves Blob ZIP roundtrip, ZIP/TAR writer roundtrip, and URL `maxInputBytes` adversarial abort behavior (bounded transfer + no `Range` requests). (tests: `test/browser/web-entrypoint.pw.ts`)
213
+ - XZ seekable preflight over Blob is not implemented in this iteration; Blob XZ paths use bounded full-buffer input handling and existing decode-time resource ceilings. (tests: `test/web-adapter.test.ts`, `test/resource-ceilings.test.ts`)
214
+ - Compression capability reporting for web runtime:
215
+ - `getCompressionCapabilities()` probes `CompressionStream` and `DecompressionStream` constructor acceptance independently for algorithm strings `gzip`, `deflate`, `deflate-raw`, `brotli`, and `zstd`;
216
+ - each algorithm reports `compress` and `decompress` truthfully per constructor acceptance; unsupported modes surface `COMPRESSION_UNSUPPORTED_ALGORITHM` when requested;
217
+ - pure-JS decode support remains for `bzip2` and `xz`;
218
+ - when either web compression constructor is missing, `notes` includes an explicit missing-constructor message. (tests: `test/compress-runtime-web.test.ts`, `test/support-matrix-behavior.test.ts`, `test/schema-contracts.test.ts`)
219
+ - Runtime detection for capabilities uses `runtime: "web"` when Bun/Deno/Node markers are absent and either web compression global exists (`CompressionStream` or `DecompressionStream`). (tests: `test/compress-runtime-web.test.ts`, `test/schema-contracts.test.ts`)
220
+
221
+ ## Single-file compressed formats: entry naming
222
+ Naming is deterministic and sanitized to a single path segment. (tests: `test/single-file-formats.test.ts`)
223
+ - Inputs with a filename hint (file path or URL): use the final path segment, strip the compression extension, and return `name` or `name.tar` for `.tar.*` variants.
224
+ - Inputs without a filename hint (bytes/streams): default to `data`, except gzip may use a header FNAME if present.
225
+ - Gzip header options: FEXTRA and FCOMMENT fields are skipped per RFC1952 and do not block FNAME parsing. (tests: `test/gzip-header-options.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
226
+ - Gzip FNAME: used only after sanitization (basename only; NUL, empty, `.`/`..` rejected). If rejected or missing, fall back to filename or `data`.
227
+
228
+ ## Normalization determinism
229
+ - Deterministic normalization (`isDeterministic: true`) emits stable bytes for zip and tar; normalizing an already normalized archive yields byte-identical output. (tests: `test/audit-normalize-proof.test.ts`)
230
+ - For tar-wrapped compressed formats, normalization produces a deterministic tar stream and preserves entry names + contents. (tests: `test/audit-normalize-proof.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
231
+
232
+ ## Ambiguity policy (normalize safe mode)
233
+ - Collision key pipeline: path normalization (slashes + dot segments) → `normalize('NFC')` → full Unicode case folding (from `specs/unicode/CaseFolding-17.0.0.txt`, statuses C+F, T excluded) → `normalize('NFC')`. (tests: `test/unicode-collision.test.ts`, `test/casefold-collision.test.ts`)
234
+ - Duplicate paths: error (tar → `ARCHIVE_NAME_COLLISION` + `TAR_DUPLICATE_ENTRY`, zip → `ZIP_NAME_COLLISION` + `ZIP_DUPLICATE_ENTRY`). (tests: `test/ambiguous-fixtures.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
235
+ - Case-fold collisions: error (tar → `ARCHIVE_NAME_COLLISION` + `TAR_CASE_COLLISION`, zip → `ZIP_NAME_COLLISION` + `ZIP_CASE_COLLISION`). (tests: `test/ambiguous-fixtures.test.ts`, `test/casefold-collision.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
236
+ - Unicode normalization collisions (NFC): error (tar → `ARCHIVE_NAME_COLLISION` + `TAR_UNICODE_COLLISION`, zip → `ZIP_NAME_COLLISION` + `ZIP_UNICODE_COLLISION`). (tests: `test/unicode-collision.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
237
+ - Directory/file kind conflict (`dir` vs `dir/`): allowed; entries remain distinct after normalization. (tests: `test/ambiguous-fixtures.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
238
+ - Symlinks: rejected in normalize safe mode (tar → `ARCHIVE_UNSUPPORTED_FEATURE` + `TAR_SYMLINK_PRESENT`, zip → `ZIP_SYMLINK_DISALLOWED`). (tests: `test/ambiguous-fixtures.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`, `test/error-contracts.test.ts`)
239
+ - Hardlinks (tar `link` entries): rejected in normalize safe mode (`ARCHIVE_UNSUPPORTED_FEATURE` + `TAR_UNSUPPORTED_ENTRY`). (tests: `test/ambiguous-fixtures.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
240
+ - Path normalization: backslashes are normalized to `/`, `.` segments and repeated slashes are removed; absolute paths, drive-letter prefixes, and `..` segments are rejected with path traversal errors. (tests: `test/ambiguous-fixtures.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
241
+
242
+ ## Security model (name collisions)
243
+ - Unicode normalization collisions are rejected in audit/normalize/extract because filesystem normalization differs across platforms, and accepting them can cause nondeterministic overwrites. (tests: `test/unicode-collision.test.ts`)
244
+
245
+ ## Concatenation semantics (gzip, bzip2)
246
+ - Gzip concatenated members decode sequentially across Node, Deno, and Bun. (tests: `test/single-file-formats.test.ts`, `test/bun.smoke.ts`, `test/deno.smoke.ts`)
247
+ - Bzip2 concatenated streams decode sequentially across runtimes. (tests: `test/single-file-formats.test.ts`, `test/bun.smoke.ts`, `test/deno.smoke.ts`)
248
+
249
+ ## Resource ceilings
250
+ - Defaults (single source: `src/limits.ts`): `maxXzDictionaryBytes = 64 MiB` (agent profile: 32 MiB), `maxXzBufferedBytes = 1 MiB`, `maxXzIndexRecords = 1,000,000` (agent profile: 200,000), `maxXzIndexBytes = 64 MiB` (agent profile: 16 MiB), `maxXzPreflightBlockHeaders = 1024` (agent profile: 256), `maxZipCentralDirectoryBytes = 64 MiB` (agent profile: 16 MiB), `maxZipCommentBytes = 65,535` (agent profile: 16,384), `maxZipEocdSearchBytes = 65,558`, `maxBzip2BlockSize = 9`. (tests: `test/resource-ceilings.test.ts`, `test/resource-defaults.test.ts`, `test/xz-index-limits.test.ts`, `test/xz-seekable-preflight.test.ts`, `test/zip-seekable-preflight.test.ts`)
251
+ - `maxXzPreflightBlockHeaders` bounds seekable dictionary preflight; `0` disables block-header scanning and yields `COMPRESSION_RESOURCE_PREFLIGHT_INCOMPLETE` (info). (tests: `test/xz-seekable-preflight.test.ts`)
252
+ - Rationale: XZ Index fields are encoded as VLIs up to 63 bits (`specs/xz-file-format.txt`), so record counts and index sizes can be arbitrarily large; the defaults cap scan time/space while allowing typical archives. (tests: `test/xz-index-limits.test.ts`)
253
+ - XZ Index VLI decoding is streaming-safe even when VLI bytes split across chunks. (tests: `test/xz-vli-boundaries.test.ts`)
254
+ - Overrides: ceilings are configurable via `limits` in `openArchive(...)`, `ArchiveReader.audit(...)`, and `ArchiveReader.normalizeToWritable(...)`, and via `limits` in `createDecompressor(...)`. (tests: `test/resource-ceilings.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
255
+ - Precedence rules (profile vs limits):
256
+ - `openArchive(...)`: `profile` chooses reader defaults (`strict` mode + default limits), then `limits` overrides only the specified fields; unspecified fields stay on profile defaults. (tests: `test/option-precedence.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
257
+ - `ZipReader` / `TarReader` construction: same rule as `openArchive` because the constructors resolve `profile` defaults first, then merge explicit `limits` field-by-field, and `isStrict` (if set) overrides profile strictness. (tests: `test/option-precedence.test.ts`)
258
+ - `createDecompressor(...)`: explicit scalar knobs (`maxOutputBytes`, `maxCompressionRatio`, `maxDictionaryBytes`, `maxBufferedInputBytes`) take precedence over their `limits` counterparts; remaining values come from `limits`; `profile` still controls behavior independently (for example, unsupported XZ checks in strict vs compat). (tests: `test/option-precedence.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`, `test/xz-utils-conformance.test.ts`)
259
+ - `createArchiveWriter(...)`: no `profile`/`limits` API exists for writer creation; precedence is not applicable. (tests: `test/archive.test.ts`, `test/archive-writer-proof.test.ts`)
260
+ - Audit preflight: bzip2 block size and xz dictionary size are checked from headers and reported as `COMPRESSION_RESOURCE_LIMIT` without full decompression. (tests: `test/resource-ceilings.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`, `test/xz-seekable-preflight.test.ts`)
261
+ - `maxTotalDecompressedBytes` enforces output ceilings for gzip/deflate/brotli/zstd with `COMPRESSION_RESOURCE_LIMIT` and no output beyond the limit. (tests: `test/compression-resource-limits.test.ts`)
262
+
263
+ ## Concatenation and resource ceilings
264
+ - XZ: preflight scans concatenated streams using stream headers + Index records (no payload decompression), applies Index ceilings across the concatenation, and scans Block Headers to enforce dictionary limits up to `maxXzPreflightBlockHeaders` per stream. If block count exceeds the limit, audit emits `COMPRESSION_RESOURCE_PREFLIGHT_INCOMPLETE` (info) with `requiredBlockHeaders` + `limitBlockHeaders`, and dictionary limits remain enforced during decode. (tests: `test/resource-ceilings.test.ts`, `test/xz-index-limits.test.ts`, `test/xz-seekable-preflight.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
265
+ - XZ preflight scanning is O(1) memory with respect to Index record count (no per-record arrays). (tests: `test/xz-index-limits.test.ts`)
266
+ - Bzip2: preflight only inspects the first member; audit emits `COMPRESSION_RESOURCE_PREFLIGHT_INCOMPLETE` when bzip2 limits are in effect, and concatenated members are enforced during decode. (tests: `test/resource-ceilings.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
267
+ - Seekable XZ preflight: for file paths or HTTP Range URLs, index + dictionary limits run before full buffering; HTTP failures map to `ARCHIVE_HTTP_*` codes with preserved `context.httpCode`. (tests: `test/xz-http-error-mapping.test.ts`, `test/xz-seekable-preflight.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
268
+ - Seekable ZIP preflight: for file paths or HTTP Range URLs, EOCD/central-directory limits run before full buffering; Range is required for HTTP preflight; multi-disk archives are rejected. (tests: `test/zip-seekable-preflight.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
269
+ - HTTP Range random access: validators (ETag/Last-Modified) are pinned, `If-Range` is used only with strong ETags, header-only failures abort before body consumption, content codings are rejected, and 206 body length must exactly match the requested range. (specs: `specs/http/rfc9110-if-range.md`, `specs/http/rfc9110-accept-encoding.md`; tests: `test/zip-url-seekable-budget.test.ts`, `test/deno.smoke.ts`, `test/bun.smoke.ts`)
270
+
271
+ ## Error model
272
+ - Stable error classes: `ZipError`, `CompressionError`, `ArchiveError`.
273
+ - Stable error codes are defined in `src/errors.ts`, `src/compress/errors.ts`, and `src/archive/errors.ts`.
274
+ - Error JSON includes `schemaVersion: "1"` plus `hint` and `context`, and serializes as plain objects with string/number fields only.
275
+ - Error JSON context policy: `context` MUST NOT duplicate any top-level key name. Keys that are top-level in an error payload (`schemaVersion`, `name`, `code`, `message`, `hint`, `context`, plus optional top-level fields like `entryName`, `method`, `offset`, `algorithm`) are stripped from `context` during serialization so machine consumers have one canonical location per fact. (tests: `test/error-json-ambiguity.test.ts`, `test/error-contracts.test.ts`)
276
+ - JSON schema: `schemas/error.schema.json`. (tests: `test/schema-contracts.test.ts`, `test/error-contracts.test.ts`)
277
+
278
+ ## Report model
279
+ - Reports are JSON-safe objects with `schemaVersion: "1"` and primitive fields only.
280
+ - `ArchiveDetectionReport`, `ArchiveAuditReport`, `ArchiveNormalizeReport`, and `CompressionCapabilities` use numbers/strings/arrays only; no `bigint`.
281
+ - Any report `toJSON()` implementation MUST return JSON-safe data.
282
+ - JSON schemas: `schemas/detection-report.schema.json`, `schemas/audit-report.schema.json`, `schemas/normalize-report.schema.json`, `schemas/capabilities-report.schema.json`. (tests: `test/schema-contracts.test.ts`)
283
+
284
+ ## Schema validation
285
+ Supported JSON Schema subset (enforced by `test/schema-contracts.test.ts` and `test/schema-validator.ts`): `type`, `required`, `properties`, `enum`, `items`, `additionalProperties`.
@@ -0,0 +1,3 @@
1
+ export declare function throwIfAborted(signal?: AbortSignal | null): void;
2
+ export declare function mergeSignals(...signals: Array<AbortSignal | null | undefined>): AbortSignal | undefined;
3
+ //# sourceMappingURL=abort.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"abort.d.ts","sourceRoot":"","sources":["../src/abort.ts"],"names":[],"mappings":"AAAA,wBAAgB,cAAc,CAAC,MAAM,CAAC,EAAE,WAAW,GAAG,IAAI,GAAG,IAAI,CAQhE;AAED,wBAAgB,YAAY,CAAC,GAAG,OAAO,EAAE,KAAK,CAAC,WAAW,GAAG,IAAI,GAAG,SAAS,CAAC,GAAG,WAAW,GAAG,SAAS,CAsBvG"}
package/dist/abort.js ADDED
@@ -0,0 +1,33 @@
1
+ export function throwIfAborted(signal) {
2
+ if (!signal)
3
+ return;
4
+ if (!signal.aborted)
5
+ return;
6
+ const reason = signal.reason;
7
+ if (reason instanceof Error) {
8
+ throw reason;
9
+ }
10
+ throw reason ?? new DOMException('The operation was aborted', 'AbortError');
11
+ }
12
+ export function mergeSignals(...signals) {
13
+ const active = signals.filter((signal) => !!signal);
14
+ if (active.length === 0)
15
+ return undefined;
16
+ if (active.length === 1)
17
+ return active[0];
18
+ if (typeof AbortSignal.any === 'function') {
19
+ return AbortSignal.any(active);
20
+ }
21
+ const controller = new AbortController();
22
+ for (const signal of active) {
23
+ if (signal.aborted) {
24
+ controller.abort(signal.reason);
25
+ break;
26
+ }
27
+ signal.addEventListener('abort', () => {
28
+ controller.abort(signal.reason);
29
+ }, { once: true });
30
+ }
31
+ return controller.signal;
32
+ }
33
+ //# sourceMappingURL=abort.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"abort.js","sourceRoot":"","sources":["../src/abort.ts"],"names":[],"mappings":"AAAA,MAAM,UAAU,cAAc,CAAC,MAA2B;IACxD,IAAI,CAAC,MAAM;QAAE,OAAO;IACpB,IAAI,CAAC,MAAM,CAAC,OAAO;QAAE,OAAO;IAC5B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;IAC7B,IAAI,MAAM,YAAY,KAAK,EAAE,CAAC;QAC5B,MAAM,MAAM,CAAC;IACf,CAAC;IACD,MAAM,MAAM,IAAI,IAAI,YAAY,CAAC,2BAA2B,EAAE,YAAY,CAAC,CAAC;AAC9E,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,GAAG,OAA8C;IAC5E,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,MAAM,EAAyB,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;IAC3E,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,SAAS,CAAC;IAC1C,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,MAAM,CAAC,CAAC,CAAC,CAAC;IAC1C,IAAI,OAAO,WAAW,CAAC,GAAG,KAAK,UAAU,EAAE,CAAC;QAC1C,OAAO,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IACjC,CAAC;IACD,MAAM,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC;IACzC,KAAK,MAAM,MAAM,IAAI,MAAM,EAAE,CAAC;QAC5B,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;YACnB,UAAU,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAChC,MAAM;QACR,CAAC;QACD,MAAM,CAAC,gBAAgB,CACrB,OAAO,EACP,GAAG,EAAE;YACH,UAAU,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QAClC,CAAC,EACD,EAAE,IAAI,EAAE,IAAI,EAAE,CACf,CAAC;IACJ,CAAC;IACD,OAAO,UAAU,CAAC,MAAM,CAAC;AAC3B,CAAC"}
@@ -0,0 +1,34 @@
1
+ /** Stable archive error codes. */
2
+ export type ArchiveErrorCode = 'ARCHIVE_UNSUPPORTED_FORMAT' | 'ARCHIVE_TRUNCATED' | 'ARCHIVE_BAD_HEADER' | 'ARCHIVE_NAME_COLLISION' | 'ARCHIVE_PATH_TRAVERSAL' | 'ARCHIVE_LIMIT_EXCEEDED' | 'ARCHIVE_UNSUPPORTED_FEATURE' | 'ARCHIVE_HTTP_RANGE_UNSUPPORTED' | 'ARCHIVE_HTTP_RANGE_INVALID' | 'ARCHIVE_HTTP_RESOURCE_CHANGED' | 'ARCHIVE_HTTP_CONTENT_ENCODING' | 'ARCHIVE_HTTP_STRONG_ETAG_REQUIRED' | 'ARCHIVE_HTTP_BAD_RESPONSE' | 'ARCHIVE_HTTP_SIZE_UNKNOWN' | 'ARCHIVE_AUDIT_FAILED';
3
+ /** Error thrown for archive-level failures and safety violations. */
4
+ export declare class ArchiveError extends Error {
5
+ /** Machine-readable error code. */
6
+ readonly code: ArchiveErrorCode;
7
+ /** Entry name related to the error, if available. */
8
+ readonly entryName?: string | undefined;
9
+ /** Offset (in bytes) related to the error, if available. */
10
+ readonly offset?: bigint | undefined;
11
+ /** Underlying cause, if available. */
12
+ readonly cause?: unknown;
13
+ /** Additional context for serialization. */
14
+ readonly context?: Record<string, string> | undefined;
15
+ /** Create an ArchiveError with a stable code. */
16
+ constructor(code: ArchiveErrorCode, message: string, options?: {
17
+ entryName?: string | undefined;
18
+ offset?: bigint | undefined;
19
+ context?: Record<string, string> | undefined;
20
+ cause?: unknown;
21
+ });
22
+ /** JSON-safe serialization with schemaVersion "1". */
23
+ toJSON(): {
24
+ schemaVersion: string;
25
+ name: string;
26
+ code: ArchiveErrorCode;
27
+ message: string;
28
+ hint: string;
29
+ context: Record<string, string>;
30
+ entryName?: string;
31
+ offset?: string;
32
+ };
33
+ }
34
+ //# sourceMappingURL=errors.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../../src/archive/errors.ts"],"names":[],"mappings":"AAGA,kCAAkC;AAClC,MAAM,MAAM,gBAAgB,GACxB,4BAA4B,GAC5B,mBAAmB,GACnB,oBAAoB,GACpB,wBAAwB,GACxB,wBAAwB,GACxB,wBAAwB,GACxB,6BAA6B,GAC7B,gCAAgC,GAChC,4BAA4B,GAC5B,+BAA+B,GAC/B,+BAA+B,GAC/B,mCAAmC,GACnC,2BAA2B,GAC3B,2BAA2B,GAC3B,sBAAsB,CAAC;AAE3B,qEAAqE;AACrE,qBAAa,YAAa,SAAQ,KAAK;IACrC,mCAAmC;IACnC,QAAQ,CAAC,IAAI,EAAE,gBAAgB,CAAC;IAChC,qDAAqD;IACrD,QAAQ,CAAC,SAAS,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACxC,4DAA4D;IAC5D,QAAQ,CAAC,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACrC,sCAAsC;IACtC,SAAkB,KAAK,CAAC,EAAE,OAAO,CAAC;IAClC,4CAA4C;IAC5C,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,SAAS,CAAC;IAEtD,iDAAiD;gBAE/C,IAAI,EAAE,gBAAgB,EACtB,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE;QACR,SAAS,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QAC/B,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QAC5B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,SAAS,CAAC;QAC7C,KAAK,CAAC,EAAE,OAAO,CAAC;KACjB;IAWH,sDAAsD;IACtD,MAAM,IAAI;QACR,aAAa,EAAE,MAAM,CAAC;QACtB,IAAI,EAAE,MAAM,CAAC;QACb,IAAI,EAAE,gBAAgB,CAAC;QACvB,OAAO,EAAE,MAAM,CAAC;QAChB,IAAI,EAAE,MAAM,CAAC;QACb,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAChC,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,MAAM,CAAC,EAAE,MAAM,CAAC;KACjB;CAgBF"}
@@ -0,0 +1,45 @@
1
+ import { sanitizeErrorContext } from '../errorContext.js';
2
+ import { BYTEFOLD_REPORT_SCHEMA_VERSION } from '../reportSchema.js';
3
+ /** Error thrown for archive-level failures and safety violations. */
4
+ export class ArchiveError extends Error {
5
+ /** Machine-readable error code. */
6
+ code;
7
+ /** Entry name related to the error, if available. */
8
+ entryName;
9
+ /** Offset (in bytes) related to the error, if available. */
10
+ offset;
11
+ /** Underlying cause, if available. */
12
+ cause;
13
+ /** Additional context for serialization. */
14
+ context;
15
+ /** Create an ArchiveError with a stable code. */
16
+ constructor(code, message, options) {
17
+ super(message, options?.cause ? { cause: options.cause } : undefined);
18
+ this.name = 'ArchiveError';
19
+ this.code = code;
20
+ this.entryName = options?.entryName;
21
+ this.offset = options?.offset;
22
+ this.context = options?.context;
23
+ this.cause = options?.cause;
24
+ }
25
+ /** JSON-safe serialization with schemaVersion "1". */
26
+ toJSON() {
27
+ const topLevelShadowKeys = [];
28
+ if (this.entryName !== undefined)
29
+ topLevelShadowKeys.push('entryName');
30
+ if (this.offset !== undefined)
31
+ topLevelShadowKeys.push('offset');
32
+ const context = sanitizeErrorContext(this.context, topLevelShadowKeys);
33
+ return {
34
+ schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION,
35
+ name: this.name,
36
+ code: this.code,
37
+ message: this.message,
38
+ hint: this.message,
39
+ context,
40
+ ...(this.entryName !== undefined ? { entryName: this.entryName } : {}),
41
+ ...(this.offset !== undefined ? { offset: this.offset.toString() } : {})
42
+ };
43
+ }
44
+ }
45
+ //# sourceMappingURL=errors.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"errors.js","sourceRoot":"","sources":["../../src/archive/errors.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAC1D,OAAO,EAAE,8BAA8B,EAAE,MAAM,oBAAoB,CAAC;AAoBpE,qEAAqE;AACrE,MAAM,OAAO,YAAa,SAAQ,KAAK;IACrC,mCAAmC;IAC1B,IAAI,CAAmB;IAChC,qDAAqD;IAC5C,SAAS,CAAsB;IACxC,4DAA4D;IACnD,MAAM,CAAsB;IACrC,sCAAsC;IACpB,KAAK,CAAW;IAClC,4CAA4C;IACnC,OAAO,CAAsC;IAEtD,iDAAiD;IACjD,YACE,IAAsB,EACtB,OAAe,EACf,OAKC;QAED,KAAK,CAAC,OAAO,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;QACtE,IAAI,CAAC,IAAI,GAAG,cAAc,CAAC;QAC3B,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,SAAS,GAAG,OAAO,EAAE,SAAS,CAAC;QACpC,IAAI,CAAC,MAAM,GAAG,OAAO,EAAE,MAAM,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,OAAO,EAAE,OAAO,CAAC;QAChC,IAAI,CAAC,KAAK,GAAG,OAAO,EAAE,KAAK,CAAC;IAC9B,CAAC;IAED,sDAAsD;IACtD,MAAM;QAUJ,MAAM,kBAAkB,GAAa,EAAE,CAAC;QACxC,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS;YAAE,kBAAkB,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QACvE,IAAI,IAAI,CAAC,MAAM,KAAK,SAAS;YAAE,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QACjE,MAAM,OAAO,GAAG,oBAAoB,CAAC,IAAI,CAAC,OAAO,EAAE,kBAAkB,CAAC,CAAC;QACvE,OAAO;YACL,aAAa,EAAE,8BAA8B;YAC7C,IAAI,EAAE,IAAI,CAAC,IAAI;YACf,IAAI,EAAE,IAAI,CAAC,IAAI;YACf,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,IAAI,EAAE,IAAI,CAAC,OAAO;YAClB,OAAO;YACP,GAAG,CAAC,IAAI,CAAC,SAAS,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;YACtE,GAAG,CAAC,IAAI,CAAC,MAAM,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;SACzE,CAAC;IACJ,CAAC;CACF"}
@@ -0,0 +1,2 @@
1
+ export declare function mapHttpErrorToArchiveError(err: unknown, context?: Record<string, string>): unknown;
2
+ //# sourceMappingURL=httpArchiveErrors.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"httpArchiveErrors.d.ts","sourceRoot":"","sources":["../../src/archive/httpArchiveErrors.ts"],"names":[],"mappings":"AAaA,wBAAgB,0BAA0B,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,OAAO,CAWlG"}
@@ -0,0 +1,25 @@
1
+ import { HttpError } from '../http/errors.js';
2
+ import { ArchiveError } from './errors.js';
3
+ const HTTP_ERROR_TO_ARCHIVE_ERROR = {
4
+ HTTP_RANGE_UNSUPPORTED: 'ARCHIVE_HTTP_RANGE_UNSUPPORTED',
5
+ HTTP_RANGE_INVALID: 'ARCHIVE_HTTP_RANGE_INVALID',
6
+ HTTP_RESOURCE_CHANGED: 'ARCHIVE_HTTP_RESOURCE_CHANGED',
7
+ HTTP_CONTENT_ENCODING: 'ARCHIVE_HTTP_CONTENT_ENCODING',
8
+ HTTP_STRONG_ETAG_REQUIRED: 'ARCHIVE_HTTP_STRONG_ETAG_REQUIRED',
9
+ HTTP_BAD_RESPONSE: 'ARCHIVE_HTTP_BAD_RESPONSE',
10
+ HTTP_SIZE_UNKNOWN: 'ARCHIVE_HTTP_SIZE_UNKNOWN'
11
+ };
12
+ export function mapHttpErrorToArchiveError(err, context) {
13
+ if (!(err instanceof HttpError))
14
+ return err;
15
+ const mappedCode = HTTP_ERROR_TO_ARCHIVE_ERROR[err.code] ?? 'ARCHIVE_HTTP_BAD_RESPONSE';
16
+ return new ArchiveError(mappedCode, err.message, {
17
+ context: {
18
+ ...(context ?? {}),
19
+ ...(err.context ?? {}),
20
+ httpCode: err.code
21
+ },
22
+ cause: err
23
+ });
24
+ }
25
+ //# sourceMappingURL=httpArchiveErrors.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"httpArchiveErrors.js","sourceRoot":"","sources":["../../src/archive/httpArchiveErrors.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAsB,MAAM,mBAAmB,CAAC;AAClE,OAAO,EAAE,YAAY,EAAyB,MAAM,aAAa,CAAC;AAElE,MAAM,2BAA2B,GAA4C;IAC3E,sBAAsB,EAAE,gCAAgC;IACxD,kBAAkB,EAAE,4BAA4B;IAChD,qBAAqB,EAAE,+BAA+B;IACtD,qBAAqB,EAAE,+BAA+B;IACtD,yBAAyB,EAAE,mCAAmC;IAC9D,iBAAiB,EAAE,2BAA2B;IAC9C,iBAAiB,EAAE,2BAA2B;CAC/C,CAAC;AAEF,MAAM,UAAU,0BAA0B,CAAC,GAAY,EAAE,OAAgC;IACvF,IAAI,CAAC,CAAC,GAAG,YAAY,SAAS,CAAC;QAAE,OAAO,GAAG,CAAC;IAC5C,MAAM,UAAU,GAAG,2BAA2B,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,2BAA2B,CAAC;IACxF,OAAO,IAAI,YAAY,CAAC,UAAU,EAAE,GAAG,CAAC,OAAO,EAAE;QAC/C,OAAO,EAAE;YACP,GAAG,CAAC,OAAO,IAAI,EAAE,CAAC;YAClB,GAAG,CAAC,GAAG,CAAC,OAAO,IAAI,EAAE,CAAC;YACtB,QAAQ,EAAE,GAAG,CAAC,IAAI;SACnB;QACD,KAAK,EAAE,GAAG;KACX,CAAC,CAAC;AACL,CAAC"}
@@ -0,0 +1,47 @@
1
+ import type { ArchiveAuditReport, ArchiveDetectionReport, ArchiveEntry, ArchiveFormat, ArchiveLimits, ArchiveNormalizeReport, ArchiveOpenOptions, ArchiveProfile } from './types.js';
2
+ import type { ZipWriterOptions } from '../types.js';
3
+ import type { TarWriterOptions } from '../tar/types.js';
4
+ /** Unified archive reader API returned by openArchive(). */
5
+ export type ArchiveReader = {
6
+ format: ArchiveFormat;
7
+ detection?: ArchiveDetectionReport;
8
+ entries(): AsyncGenerator<ArchiveEntry>;
9
+ audit(options?: ArchiveAuditOptions): Promise<ArchiveAuditReport>;
10
+ assertSafe(options?: ArchiveAuditOptions): Promise<void>;
11
+ normalizeToWritable?(writable: WritableStream<Uint8Array>, options?: ArchiveNormalizeOptions): Promise<ArchiveNormalizeReport>;
12
+ };
13
+ /** Unified archive writer API for ZIP/TAR and layered formats. */
14
+ export type ArchiveWriter = {
15
+ format: ArchiveFormat;
16
+ add(name: string, source?: Uint8Array | ArrayBuffer | ReadableStream<Uint8Array> | AsyncIterable<Uint8Array>, options?: unknown): Promise<void>;
17
+ close(): Promise<void>;
18
+ };
19
+ /** Options for creating archive writers. */
20
+ export type ArchiveWriterOptions = {
21
+ zip?: ZipWriterOptions;
22
+ tar?: TarWriterOptions;
23
+ compression?: {
24
+ level?: number;
25
+ quality?: number;
26
+ };
27
+ };
28
+ /** Options for auditing archives opened via openArchive(). */
29
+ export type ArchiveAuditOptions = {
30
+ profile?: ArchiveProfile;
31
+ isStrict?: boolean;
32
+ limits?: ArchiveLimits;
33
+ signal?: AbortSignal;
34
+ };
35
+ /** Options for normalization via openArchive(). */
36
+ export type ArchiveNormalizeOptions = {
37
+ isDeterministic?: boolean;
38
+ limits?: ArchiveLimits;
39
+ signal?: AbortSignal;
40
+ };
41
+ /** Inputs accepted by openArchive(). */
42
+ export type ArchiveInput = Uint8Array | ArrayBuffer | ReadableStream<Uint8Array> | Blob;
43
+ /** Open an archive with auto-detection (or a forced format). */
44
+ export declare function openArchive(input: ArchiveInput, options?: ArchiveOpenOptions): Promise<ArchiveReader>;
45
+ /** Create an archive writer for a specific format. */
46
+ export declare function createArchiveWriter(format: ArchiveFormat, writable: WritableStream<Uint8Array>, options?: ArchiveWriterOptions): ArchiveWriter;
47
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/archive/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACV,kBAAkB,EAClB,sBAAsB,EACtB,YAAY,EACZ,aAAa,EAGb,aAAa,EACb,sBAAsB,EACtB,kBAAkB,EAClB,cAAc,EACf,MAAM,YAAY,CAAC;AAcpB,OAAO,KAAK,EAKV,gBAAgB,EACjB,MAAM,aAAa,CAAC;AAGrB,OAAO,KAAK,EAA0D,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AAOhH,4DAA4D;AAC5D,MAAM,MAAM,aAAa,GAAG;IAC1B,MAAM,EAAE,aAAa,CAAC;IACtB,SAAS,CAAC,EAAE,sBAAsB,CAAC;IACnC,OAAO,IAAI,cAAc,CAAC,YAAY,CAAC,CAAC;IACxC,KAAK,CAAC,OAAO,CAAC,EAAE,mBAAmB,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;IAClE,UAAU,CAAC,OAAO,CAAC,EAAE,mBAAmB,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACzD,mBAAmB,CAAC,CAClB,QAAQ,EAAE,cAAc,CAAC,UAAU,CAAC,EACpC,OAAO,CAAC,EAAE,uBAAuB,GAChC,OAAO,CAAC,sBAAsB,CAAC,CAAC;CACpC,CAAC;AAEF,kEAAkE;AAClE,MAAM,MAAM,aAAa,GAAG;IAC1B,MAAM,EAAE,aAAa,CAAC;IACtB,GAAG,CACD,IAAI,EAAE,MAAM,EACZ,MAAM,CAAC,EAAE,UAAU,GAAG,WAAW,GAAG,cAAc,CAAC,UAAU,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,EAC1F,OAAO,CAAC,EAAE,OAAO,GAChB,OAAO,CAAC,IAAI,CAAC,CAAC;IACjB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CACxB,CAAC;AAEF,4CAA4C;AAC5C,MAAM,MAAM,oBAAoB,GAAG;IACjC,GAAG,CAAC,EAAE,gBAAgB,CAAC;IACvB,GAAG,CAAC,EAAE,gBAAgB,CAAC;IACvB,WAAW,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;CACpD,CAAC;AAyBF,8DAA8D;AAC9D,MAAM,MAAM,mBAAmB,GAAG;IAChC,OAAO,CAAC,EAAE,cAAc,CAAC;IACzB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,MAAM,CAAC,EAAE,aAAa,CAAC;IACvB,MAAM,CAAC,EAAE,WAAW,CAAC;CACtB,CAAC;AAEF,mDAAmD;AACnD,MAAM,MAAM,uBAAuB,GAAG;IACpC,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,MAAM,CAAC,EAAE,aAAa,CAAC;IACvB,MAAM,CAAC,EAAE,WAAW,CAAC;CACtB,CAAC;AAEF,wCAAwC;AACxC,MAAM,MAAM,YAAY,GAAG,UAAU,GAAG,WAAW,GAAG,cAAc,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC;AAExF,gEAAgE;AAChE,wBAAsB,WAAW,CAAC,KAAK,EAAE,YAAY,EAAE,OAAO,CAAC,EAAE,kBAAkB,GAAG,OAAO,CAAC,aAAa,CAAC,CAqD3G;AAwFD,sDAAsD;AACtD,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,aAAa,EACrB,QAAQ,EAAE,cAAc,CAAC,UAAU,CAAC,EACpC,OAAO,CAAC,EAAE,oBAAoB,GAC7B,aAAa,CA0Df"}