@ismail-elkorchi/bytefold 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (314) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +48 -0
  3. package/SPEC.md +285 -0
  4. package/dist/abort.d.ts +3 -0
  5. package/dist/abort.d.ts.map +1 -0
  6. package/dist/abort.js +33 -0
  7. package/dist/abort.js.map +1 -0
  8. package/dist/archive/errors.d.ts +34 -0
  9. package/dist/archive/errors.d.ts.map +1 -0
  10. package/dist/archive/errors.js +45 -0
  11. package/dist/archive/errors.js.map +1 -0
  12. package/dist/archive/httpArchiveErrors.d.ts +2 -0
  13. package/dist/archive/httpArchiveErrors.d.ts.map +1 -0
  14. package/dist/archive/httpArchiveErrors.js +25 -0
  15. package/dist/archive/httpArchiveErrors.js.map +1 -0
  16. package/dist/archive/index.d.ts +47 -0
  17. package/dist/archive/index.d.ts.map +1 -0
  18. package/dist/archive/index.js +1490 -0
  19. package/dist/archive/index.js.map +1 -0
  20. package/dist/archive/types.d.ts +91 -0
  21. package/dist/archive/types.d.ts.map +1 -0
  22. package/dist/archive/types.js +2 -0
  23. package/dist/archive/types.js.map +1 -0
  24. package/dist/archive/xzPreflight.d.ts +13 -0
  25. package/dist/archive/xzPreflight.d.ts.map +1 -0
  26. package/dist/archive/xzPreflight.js +44 -0
  27. package/dist/archive/xzPreflight.js.map +1 -0
  28. package/dist/archive/zipPreflight.d.ts +18 -0
  29. package/dist/archive/zipPreflight.d.ts.map +1 -0
  30. package/dist/archive/zipPreflight.js +50 -0
  31. package/dist/archive/zipPreflight.js.map +1 -0
  32. package/dist/binary.d.ts +12 -0
  33. package/dist/binary.d.ts.map +1 -0
  34. package/dist/binary.js +59 -0
  35. package/dist/binary.js.map +1 -0
  36. package/dist/bun/index.d.ts +19 -0
  37. package/dist/bun/index.d.ts.map +1 -0
  38. package/dist/bun/index.js +427 -0
  39. package/dist/bun/index.js.map +1 -0
  40. package/dist/compress/errors.d.ts +30 -0
  41. package/dist/compress/errors.d.ts.map +1 -0
  42. package/dist/compress/errors.js +40 -0
  43. package/dist/compress/errors.js.map +1 -0
  44. package/dist/compress/index.d.ts +12 -0
  45. package/dist/compress/index.d.ts.map +1 -0
  46. package/dist/compress/index.js +339 -0
  47. package/dist/compress/index.js.map +1 -0
  48. package/dist/compress/types.d.ts +41 -0
  49. package/dist/compress/types.d.ts.map +1 -0
  50. package/dist/compress/types.js +2 -0
  51. package/dist/compress/types.js.map +1 -0
  52. package/dist/compression/bzip2.d.ts +9 -0
  53. package/dist/compression/bzip2.d.ts.map +1 -0
  54. package/dist/compression/bzip2.js +546 -0
  55. package/dist/compression/bzip2.js.map +1 -0
  56. package/dist/compression/codecs.d.ts +6 -0
  57. package/dist/compression/codecs.d.ts.map +1 -0
  58. package/dist/compression/codecs.js +82 -0
  59. package/dist/compression/codecs.js.map +1 -0
  60. package/dist/compression/deflate64.d.ts +3 -0
  61. package/dist/compression/deflate64.d.ts.map +1 -0
  62. package/dist/compression/deflate64.js +549 -0
  63. package/dist/compression/deflate64.js.map +1 -0
  64. package/dist/compression/node-backend.d.ts +9 -0
  65. package/dist/compression/node-backend.d.ts.map +1 -0
  66. package/dist/compression/node-backend.js +103 -0
  67. package/dist/compression/node-backend.js.map +1 -0
  68. package/dist/compression/registry.d.ts +10 -0
  69. package/dist/compression/registry.d.ts.map +1 -0
  70. package/dist/compression/registry.js +30 -0
  71. package/dist/compression/registry.js.map +1 -0
  72. package/dist/compression/streams.d.ts +31 -0
  73. package/dist/compression/streams.d.ts.map +1 -0
  74. package/dist/compression/streams.js +147 -0
  75. package/dist/compression/streams.js.map +1 -0
  76. package/dist/compression/types.d.ts +19 -0
  77. package/dist/compression/types.d.ts.map +1 -0
  78. package/dist/compression/types.js +2 -0
  79. package/dist/compression/types.js.map +1 -0
  80. package/dist/compression/xz.d.ts +21 -0
  81. package/dist/compression/xz.d.ts.map +1 -0
  82. package/dist/compression/xz.js +1455 -0
  83. package/dist/compression/xz.js.map +1 -0
  84. package/dist/compression/xzFilters.d.ts +14 -0
  85. package/dist/compression/xzFilters.d.ts.map +1 -0
  86. package/dist/compression/xzFilters.js +736 -0
  87. package/dist/compression/xzFilters.js.map +1 -0
  88. package/dist/compression/xzIndexPreflight.d.ts +20 -0
  89. package/dist/compression/xzIndexPreflight.d.ts.map +1 -0
  90. package/dist/compression/xzIndexPreflight.js +371 -0
  91. package/dist/compression/xzIndexPreflight.js.map +1 -0
  92. package/dist/compression/xzScan.d.ts +15 -0
  93. package/dist/compression/xzScan.d.ts.map +1 -0
  94. package/dist/compression/xzScan.js +310 -0
  95. package/dist/compression/xzScan.js.map +1 -0
  96. package/dist/cp437.d.ts +2 -0
  97. package/dist/cp437.d.ts.map +1 -0
  98. package/dist/cp437.js +31 -0
  99. package/dist/cp437.js.map +1 -0
  100. package/dist/crc32.d.ts +7 -0
  101. package/dist/crc32.d.ts.map +1 -0
  102. package/dist/crc32.js +37 -0
  103. package/dist/crc32.js.map +1 -0
  104. package/dist/crc64.d.ts +6 -0
  105. package/dist/crc64.d.ts.map +1 -0
  106. package/dist/crc64.js +32 -0
  107. package/dist/crc64.js.map +1 -0
  108. package/dist/crypto/ctr.d.ts +11 -0
  109. package/dist/crypto/ctr.d.ts.map +1 -0
  110. package/dist/crypto/ctr.js +56 -0
  111. package/dist/crypto/ctr.js.map +1 -0
  112. package/dist/crypto/sha256.d.ts +16 -0
  113. package/dist/crypto/sha256.d.ts.map +1 -0
  114. package/dist/crypto/sha256.js +152 -0
  115. package/dist/crypto/sha256.js.map +1 -0
  116. package/dist/crypto/winzip-aes.d.ts +17 -0
  117. package/dist/crypto/winzip-aes.d.ts.map +1 -0
  118. package/dist/crypto/winzip-aes.js +98 -0
  119. package/dist/crypto/winzip-aes.js.map +1 -0
  120. package/dist/crypto/zipcrypto.d.ts +23 -0
  121. package/dist/crypto/zipcrypto.d.ts.map +1 -0
  122. package/dist/crypto/zipcrypto.js +99 -0
  123. package/dist/crypto/zipcrypto.js.map +1 -0
  124. package/dist/deno/index.d.ts +19 -0
  125. package/dist/deno/index.d.ts.map +1 -0
  126. package/dist/deno/index.js +422 -0
  127. package/dist/deno/index.js.map +1 -0
  128. package/dist/dosTime.d.ts +7 -0
  129. package/dist/dosTime.d.ts.map +1 -0
  130. package/dist/dosTime.js +21 -0
  131. package/dist/dosTime.js.map +1 -0
  132. package/dist/errorContext.d.ts +2 -0
  133. package/dist/errorContext.d.ts.map +1 -0
  134. package/dist/errorContext.js +24 -0
  135. package/dist/errorContext.js.map +1 -0
  136. package/dist/errors.d.ts +46 -0
  137. package/dist/errors.d.ts.map +1 -0
  138. package/dist/errors.js +51 -0
  139. package/dist/errors.js.map +1 -0
  140. package/dist/extraFields.d.ts +29 -0
  141. package/dist/extraFields.d.ts.map +1 -0
  142. package/dist/extraFields.js +201 -0
  143. package/dist/extraFields.js.map +1 -0
  144. package/dist/generated/unicodeCaseFolding.d.ts +4 -0
  145. package/dist/generated/unicodeCaseFolding.d.ts.map +1 -0
  146. package/dist/generated/unicodeCaseFolding.js +1594 -0
  147. package/dist/generated/unicodeCaseFolding.js.map +1 -0
  148. package/dist/http/errors.d.ts +26 -0
  149. package/dist/http/errors.d.ts.map +1 -0
  150. package/dist/http/errors.js +33 -0
  151. package/dist/http/errors.js.map +1 -0
  152. package/dist/index.d.ts +10 -0
  153. package/dist/index.d.ts.map +1 -0
  154. package/dist/index.js +7 -0
  155. package/dist/index.js.map +1 -0
  156. package/dist/limits.d.ts +22 -0
  157. package/dist/limits.d.ts.map +1 -0
  158. package/dist/limits.js +39 -0
  159. package/dist/limits.js.map +1 -0
  160. package/dist/node/index.d.ts +13 -0
  161. package/dist/node/index.d.ts.map +1 -0
  162. package/dist/node/index.js +448 -0
  163. package/dist/node/index.js.map +1 -0
  164. package/dist/node/zip/RandomAccess.d.ts +12 -0
  165. package/dist/node/zip/RandomAccess.d.ts.map +1 -0
  166. package/dist/node/zip/RandomAccess.js +38 -0
  167. package/dist/node/zip/RandomAccess.js.map +1 -0
  168. package/dist/node/zip/Sink.d.ts +17 -0
  169. package/dist/node/zip/Sink.d.ts.map +1 -0
  170. package/dist/node/zip/Sink.js +45 -0
  171. package/dist/node/zip/Sink.js.map +1 -0
  172. package/dist/node/zip/ZipReader.d.ts +51 -0
  173. package/dist/node/zip/ZipReader.d.ts.map +1 -0
  174. package/dist/node/zip/ZipReader.js +1540 -0
  175. package/dist/node/zip/ZipReader.js.map +1 -0
  176. package/dist/node/zip/ZipWriter.d.ts +21 -0
  177. package/dist/node/zip/ZipWriter.d.ts.map +1 -0
  178. package/dist/node/zip/ZipWriter.js +196 -0
  179. package/dist/node/zip/ZipWriter.js.map +1 -0
  180. package/dist/node/zip/entryStream.d.ts +22 -0
  181. package/dist/node/zip/entryStream.d.ts.map +1 -0
  182. package/dist/node/zip/entryStream.js +241 -0
  183. package/dist/node/zip/entryStream.js.map +1 -0
  184. package/dist/node/zip/entryWriter.d.ts +54 -0
  185. package/dist/node/zip/entryWriter.d.ts.map +1 -0
  186. package/dist/node/zip/entryWriter.js +512 -0
  187. package/dist/node/zip/entryWriter.js.map +1 -0
  188. package/dist/node/zip/index.d.ts +8 -0
  189. package/dist/node/zip/index.d.ts.map +1 -0
  190. package/dist/node/zip/index.js +5 -0
  191. package/dist/node/zip/index.js.map +1 -0
  192. package/dist/reader/RandomAccess.d.ts +55 -0
  193. package/dist/reader/RandomAccess.d.ts.map +1 -0
  194. package/dist/reader/RandomAccess.js +528 -0
  195. package/dist/reader/RandomAccess.js.map +1 -0
  196. package/dist/reader/ZipReader.d.ts +89 -0
  197. package/dist/reader/ZipReader.d.ts.map +1 -0
  198. package/dist/reader/ZipReader.js +1359 -0
  199. package/dist/reader/ZipReader.js.map +1 -0
  200. package/dist/reader/centralDirectory.d.ts +40 -0
  201. package/dist/reader/centralDirectory.d.ts.map +1 -0
  202. package/dist/reader/centralDirectory.js +311 -0
  203. package/dist/reader/centralDirectory.js.map +1 -0
  204. package/dist/reader/entryStream.d.ts +22 -0
  205. package/dist/reader/entryStream.d.ts.map +1 -0
  206. package/dist/reader/entryStream.js +122 -0
  207. package/dist/reader/entryStream.js.map +1 -0
  208. package/dist/reader/eocd.d.ts +22 -0
  209. package/dist/reader/eocd.d.ts.map +1 -0
  210. package/dist/reader/eocd.js +184 -0
  211. package/dist/reader/eocd.js.map +1 -0
  212. package/dist/reader/httpZipErrors.d.ts +4 -0
  213. package/dist/reader/httpZipErrors.d.ts.map +1 -0
  214. package/dist/reader/httpZipErrors.js +48 -0
  215. package/dist/reader/httpZipErrors.js.map +1 -0
  216. package/dist/reader/localHeader.d.ts +15 -0
  217. package/dist/reader/localHeader.d.ts.map +1 -0
  218. package/dist/reader/localHeader.js +37 -0
  219. package/dist/reader/localHeader.js.map +1 -0
  220. package/dist/reportSchema.d.ts +3 -0
  221. package/dist/reportSchema.d.ts.map +1 -0
  222. package/dist/reportSchema.js +3 -0
  223. package/dist/reportSchema.js.map +1 -0
  224. package/dist/streams/adapters.d.ts +10 -0
  225. package/dist/streams/adapters.d.ts.map +1 -0
  226. package/dist/streams/adapters.js +54 -0
  227. package/dist/streams/adapters.js.map +1 -0
  228. package/dist/streams/buffer.d.ts +5 -0
  229. package/dist/streams/buffer.d.ts.map +1 -0
  230. package/dist/streams/buffer.js +44 -0
  231. package/dist/streams/buffer.js.map +1 -0
  232. package/dist/streams/crcTransform.d.ts +15 -0
  233. package/dist/streams/crcTransform.d.ts.map +1 -0
  234. package/dist/streams/crcTransform.js +30 -0
  235. package/dist/streams/crcTransform.js.map +1 -0
  236. package/dist/streams/emit.d.ts +7 -0
  237. package/dist/streams/emit.d.ts.map +1 -0
  238. package/dist/streams/emit.js +13 -0
  239. package/dist/streams/emit.js.map +1 -0
  240. package/dist/streams/limits.d.ts +16 -0
  241. package/dist/streams/limits.d.ts.map +1 -0
  242. package/dist/streams/limits.js +39 -0
  243. package/dist/streams/limits.js.map +1 -0
  244. package/dist/streams/measure.d.ts +5 -0
  245. package/dist/streams/measure.d.ts.map +1 -0
  246. package/dist/streams/measure.js +9 -0
  247. package/dist/streams/measure.js.map +1 -0
  248. package/dist/streams/progress.d.ts +8 -0
  249. package/dist/streams/progress.d.ts.map +1 -0
  250. package/dist/streams/progress.js +69 -0
  251. package/dist/streams/progress.js.map +1 -0
  252. package/dist/streams/web.d.ts +5 -0
  253. package/dist/streams/web.d.ts.map +1 -0
  254. package/dist/streams/web.js +33 -0
  255. package/dist/streams/web.js.map +1 -0
  256. package/dist/tar/TarReader.d.ts +41 -0
  257. package/dist/tar/TarReader.d.ts.map +1 -0
  258. package/dist/tar/TarReader.js +930 -0
  259. package/dist/tar/TarReader.js.map +1 -0
  260. package/dist/tar/TarWriter.d.ts +25 -0
  261. package/dist/tar/TarWriter.d.ts.map +1 -0
  262. package/dist/tar/TarWriter.js +307 -0
  263. package/dist/tar/TarWriter.js.map +1 -0
  264. package/dist/tar/index.d.ts +4 -0
  265. package/dist/tar/index.d.ts.map +1 -0
  266. package/dist/tar/index.js +3 -0
  267. package/dist/tar/index.js.map +1 -0
  268. package/dist/tar/types.d.ts +67 -0
  269. package/dist/tar/types.d.ts.map +1 -0
  270. package/dist/tar/types.js +2 -0
  271. package/dist/tar/types.js.map +1 -0
  272. package/dist/text/caseFold.d.ts +7 -0
  273. package/dist/text/caseFold.d.ts.map +1 -0
  274. package/dist/text/caseFold.js +45 -0
  275. package/dist/text/caseFold.js.map +1 -0
  276. package/dist/types.d.ts +190 -0
  277. package/dist/types.d.ts.map +1 -0
  278. package/dist/types.js +2 -0
  279. package/dist/types.js.map +1 -0
  280. package/dist/web/index.d.ts +11 -0
  281. package/dist/web/index.d.ts.map +1 -0
  282. package/dist/web/index.js +95 -0
  283. package/dist/web/index.js.map +1 -0
  284. package/dist/writer/Sink.d.ts +21 -0
  285. package/dist/writer/Sink.d.ts.map +1 -0
  286. package/dist/writer/Sink.js +24 -0
  287. package/dist/writer/Sink.js.map +1 -0
  288. package/dist/writer/ZipWriter.d.ts +27 -0
  289. package/dist/writer/ZipWriter.d.ts.map +1 -0
  290. package/dist/writer/ZipWriter.js +153 -0
  291. package/dist/writer/ZipWriter.js.map +1 -0
  292. package/dist/writer/centralDirectoryWriter.d.ts +8 -0
  293. package/dist/writer/centralDirectoryWriter.d.ts.map +1 -0
  294. package/dist/writer/centralDirectoryWriter.js +77 -0
  295. package/dist/writer/centralDirectoryWriter.js.map +1 -0
  296. package/dist/writer/entryWriter.d.ts +54 -0
  297. package/dist/writer/entryWriter.d.ts.map +1 -0
  298. package/dist/writer/entryWriter.js +327 -0
  299. package/dist/writer/entryWriter.js.map +1 -0
  300. package/dist/writer/finalize.d.ts +10 -0
  301. package/dist/writer/finalize.d.ts.map +1 -0
  302. package/dist/writer/finalize.js +56 -0
  303. package/dist/writer/finalize.js.map +1 -0
  304. package/dist/zip/index.d.ts +8 -0
  305. package/dist/zip/index.d.ts.map +1 -0
  306. package/dist/zip/index.js +5 -0
  307. package/dist/zip/index.js.map +1 -0
  308. package/jsr.json +41 -0
  309. package/package.json +117 -0
  310. package/schemas/audit-report.schema.json +38 -0
  311. package/schemas/capabilities-report.schema.json +25 -0
  312. package/schemas/detection-report.schema.json +23 -0
  313. package/schemas/error.schema.json +22 -0
  314. package/schemas/normalize-report.schema.json +47 -0
@@ -0,0 +1,930 @@
1
+ import { ArchiveError } from '../archive/errors.js';
2
+ import { normalizePathForCollision, toCollisionKey } from '../text/caseFold.js';
3
+ import { readAllBytes } from '../streams/buffer.js';
4
+ import { readableFromBytes } from '../streams/web.js';
5
+ import { BYTEFOLD_REPORT_SCHEMA_VERSION } from '../reportSchema.js';
6
+ import { AGENT_RESOURCE_LIMITS, DEFAULT_RESOURCE_LIMITS } from '../limits.js';
7
+ import { TarWriter } from './TarWriter.js';
8
+ import { throwIfAborted } from '../abort.js';
9
+ import { decodeNullTerminatedUtf8 } from '../binary.js';
10
+ const BLOCK_SIZE = 512;
11
+ const DEFAULT_LIMITS = DEFAULT_RESOURCE_LIMITS;
12
+ const AGENT_LIMITS = AGENT_RESOURCE_LIMITS;
13
+ const TEXT_DECODER = new TextDecoder('utf-8');
14
+ /** Read TAR archives from bytes, streams, or URLs. */
15
+ export class TarReader {
16
+ data;
17
+ profile;
18
+ strict;
19
+ limits;
20
+ warningsList = [];
21
+ entriesList = null;
22
+ storeEntries;
23
+ signal;
24
+ constructor(data, options) {
25
+ this.data = data;
26
+ const resolved = resolveReaderProfile(options);
27
+ this.profile = resolved.profile;
28
+ this.strict = resolved.strict;
29
+ this.limits = resolved.limits;
30
+ this.storeEntries = options?.shouldStoreEntries ?? true;
31
+ this.signal = options?.signal;
32
+ }
33
+ /** Create a reader from in-memory bytes. */
34
+ static async fromUint8Array(data, options) {
35
+ const reader = new TarReader(data, options);
36
+ await reader.init();
37
+ return reader;
38
+ }
39
+ /** Create a reader from a readable stream. */
40
+ static async fromStream(stream, options) {
41
+ const readOptions = {};
42
+ if (options?.signal)
43
+ readOptions.signal = options.signal;
44
+ if (options?.limits?.maxInputBytes !== undefined) {
45
+ readOptions.maxBytes = options.limits.maxInputBytes;
46
+ }
47
+ else if (options?.limits?.maxTotalDecompressedBytes !== undefined) {
48
+ readOptions.maxBytes = options.limits.maxTotalDecompressedBytes;
49
+ }
50
+ else if (options?.limits?.maxTotalUncompressedBytes !== undefined) {
51
+ readOptions.maxBytes = options.limits.maxTotalUncompressedBytes;
52
+ }
53
+ const data = await readAllBytes(stream, readOptions);
54
+ return TarReader.fromUint8Array(data, options);
55
+ }
56
+ /** Create a reader from a URL via fetch(). */
57
+ static async fromUrl(url, options) {
58
+ const response = await fetch(typeof url === 'string' ? url : url.toString(), {
59
+ signal: options?.signal ?? null
60
+ });
61
+ if (!response.ok) {
62
+ throw new ArchiveError('ARCHIVE_BAD_HEADER', `Unexpected HTTP status ${response.status}`);
63
+ }
64
+ const data = new Uint8Array(await response.arrayBuffer());
65
+ return TarReader.fromUint8Array(data, options);
66
+ }
67
+ /** Return stored entries (requires shouldStoreEntries=true). */
68
+ entries() {
69
+ if (!this.storeEntries) {
70
+ throw new ArchiveError('ARCHIVE_UNSUPPORTED_FEATURE', 'Entries are not stored; use iterEntries()');
71
+ }
72
+ if (!this.entriesList)
73
+ return [];
74
+ return this.entriesList.map((entry) => ({ ...entry }));
75
+ }
76
+ /** Return non-fatal warnings encountered during parsing. */
77
+ warnings() {
78
+ return [...this.warningsList];
79
+ }
80
+ /** Iterate entries (from cached entries). */
81
+ async *iterEntries() {
82
+ if (!this.entriesList)
83
+ return;
84
+ for (const entry of this.entriesList) {
85
+ yield { ...entry };
86
+ }
87
+ }
88
+ /** Open a stream for a specific entry's contents. */
89
+ async open(entry) {
90
+ if (!this.entriesList)
91
+ throw new ArchiveError('ARCHIVE_UNSUPPORTED_FEATURE', 'Entries not loaded');
92
+ const record = this.entriesList.find((item) => item.name === entry.name && item.size === entry.size);
93
+ if (!record) {
94
+ throw new ArchiveError('ARCHIVE_UNSUPPORTED_FEATURE', 'Entry not found');
95
+ }
96
+ const start = record.dataOffset;
97
+ const end = start + Number(record.dataSize);
98
+ const slice = this.data.subarray(start, end);
99
+ return readableFromBytes(slice);
100
+ }
101
+ /** Audit the archive and return a report of issues. */
102
+ async audit(options) {
103
+ const settings = this.resolveAuditSettings(options);
104
+ const issues = [];
105
+ const summary = {
106
+ entries: 0,
107
+ warnings: 0,
108
+ errors: 0
109
+ };
110
+ const addIssue = (issue) => {
111
+ issues.push(issue);
112
+ if (issue.severity === 'warning')
113
+ summary.warnings += 1;
114
+ if (issue.severity === 'error')
115
+ summary.errors += 1;
116
+ };
117
+ if (!this.entriesList) {
118
+ addIssue({
119
+ code: 'TAR_PARSE_FAILED',
120
+ severity: 'error',
121
+ message: 'Entries not loaded'
122
+ });
123
+ return finalizeAuditReport(issues, summary);
124
+ }
125
+ const seenNames = new Map();
126
+ const seenNfc = new Map();
127
+ const seenCase = new Map();
128
+ let total = 0n;
129
+ for (const entry of this.entriesList) {
130
+ summary.entries += 1;
131
+ const pathIssues = entryPathIssues(entry.name);
132
+ for (const issue of pathIssues) {
133
+ addIssue(issue);
134
+ }
135
+ total += entry.size;
136
+ if (total > settings.limits.maxTotalUncompressedBytes) {
137
+ addIssue({
138
+ code: 'TAR_LIMIT_EXCEEDED',
139
+ severity: 'error',
140
+ message: 'Total uncompressed size exceeds limit'
141
+ });
142
+ }
143
+ if (entry.size > settings.limits.maxUncompressedEntryBytes) {
144
+ addIssue({
145
+ code: 'TAR_LIMIT_EXCEEDED',
146
+ severity: 'error',
147
+ message: 'Entry uncompressed size exceeds limit',
148
+ entryName: entry.name
149
+ });
150
+ }
151
+ const normalizedName = normalizePathForCollision(entry.name, entry.isDirectory);
152
+ if (normalizedName) {
153
+ const existing = seenNames.get(normalizedName);
154
+ if (existing !== undefined) {
155
+ addIssue({
156
+ code: 'TAR_DUPLICATE_ENTRY',
157
+ severity: settings.strict ? 'error' : 'warning',
158
+ message: `Duplicate entry name: ${existing} vs ${entry.name}`,
159
+ entryName: entry.name,
160
+ details: { otherName: existing, key: normalizedName, collisionKind: 'duplicate' }
161
+ });
162
+ }
163
+ else {
164
+ const nfcName = normalizedName.normalize('NFC');
165
+ const caseKey = toCollisionKey(normalizedName, entry.isDirectory);
166
+ const existingNfc = seenNfc.get(nfcName);
167
+ if (existingNfc && existingNfc.normalized !== normalizedName) {
168
+ addIssue({
169
+ code: 'TAR_UNICODE_COLLISION',
170
+ severity: 'error',
171
+ message: `Unicode normalization collision: ${existingNfc.original} vs ${entry.name}`,
172
+ entryName: entry.name,
173
+ details: { otherName: existingNfc.original, key: nfcName, collisionKind: 'unicode_nfc' }
174
+ });
175
+ }
176
+ else {
177
+ const existingCase = seenCase.get(caseKey);
178
+ if (existingCase && existingCase.nfc !== nfcName) {
179
+ addIssue({
180
+ code: 'TAR_CASE_COLLISION',
181
+ severity: settings.strict ? 'error' : 'warning',
182
+ message: `Case-insensitive name collision: ${existingCase.original} vs ${entry.name}`,
183
+ entryName: entry.name,
184
+ details: { otherName: existingCase.original, key: caseKey, collisionKind: 'casefold' }
185
+ });
186
+ }
187
+ }
188
+ seenNames.set(normalizedName, entry.name);
189
+ seenNfc.set(nfcName, { original: entry.name, normalized: normalizedName });
190
+ seenCase.set(caseKey, { original: entry.name, nfc: nfcName });
191
+ }
192
+ }
193
+ if (entry.isSymlink && settings.symlinkSeverity !== 'info') {
194
+ addIssue({
195
+ code: 'TAR_SYMLINK_PRESENT',
196
+ severity: settings.symlinkSeverity,
197
+ message: 'Symlink entries are present',
198
+ entryName: entry.name
199
+ });
200
+ }
201
+ }
202
+ const totalBytes = toSafeNumber(total);
203
+ if (totalBytes !== undefined)
204
+ summary.totalBytes = totalBytes;
205
+ return finalizeAuditReport(issues, summary);
206
+ }
207
+ /** Audit and throw if the archive fails the selected profile. */
208
+ async assertSafe(options) {
209
+ const report = await this.audit(options);
210
+ if (!report.ok) {
211
+ throw new ArchiveError('ARCHIVE_AUDIT_FAILED', 'TAR audit failed');
212
+ }
213
+ }
214
+ /** Normalize to a writable stream, producing a report. */
215
+ async normalizeToWritable(writable, options) {
216
+ if (!this.entriesList) {
217
+ throw new ArchiveError('ARCHIVE_UNSUPPORTED_FEATURE', 'Entries not loaded');
218
+ }
219
+ const signal = options?.signal ?? this.signal;
220
+ const deterministic = options?.isDeterministic ?? true;
221
+ const onDuplicate = options?.onDuplicate ?? 'error';
222
+ const onCaseCollision = options?.onCaseCollision ?? 'error';
223
+ const onSymlink = options?.onSymlink ?? 'error';
224
+ const onUnsupported = options?.onUnsupported ?? 'error';
225
+ const issues = [];
226
+ const summary = {
227
+ entries: 0,
228
+ outputEntries: 0,
229
+ droppedEntries: 0,
230
+ renamedEntries: 0,
231
+ warnings: 0,
232
+ errors: 0
233
+ };
234
+ const addIssue = (issue) => {
235
+ issues.push(issue);
236
+ if (issue.severity === 'warning')
237
+ summary.warnings += 1;
238
+ if (issue.severity === 'error')
239
+ summary.errors += 1;
240
+ };
241
+ const normalized = collectNormalizedEntries(this.entriesList, {
242
+ deterministic,
243
+ onDuplicate,
244
+ onCaseCollision,
245
+ onSymlink,
246
+ addIssue,
247
+ summary
248
+ });
249
+ const writerOptions = {
250
+ ...(deterministic ? { isDeterministic: deterministic } : {}),
251
+ ...(signal ? { signal } : {})
252
+ };
253
+ const writer = TarWriter.toWritable(writable, writerOptions);
254
+ for (const item of normalized) {
255
+ throwIfAborted(signal);
256
+ summary.entries += 1;
257
+ if (item.dropped)
258
+ continue;
259
+ const entry = item.entry;
260
+ if (entry.isSymlink) {
261
+ addIssue({
262
+ code: 'TAR_SYMLINK_PRESENT',
263
+ severity: 'error',
264
+ message: 'Symlink entries are not allowed during normalization',
265
+ entryName: entry.name
266
+ });
267
+ if (onSymlink === 'drop') {
268
+ summary.droppedEntries += 1;
269
+ continue;
270
+ }
271
+ throw new ArchiveError('ARCHIVE_UNSUPPORTED_FEATURE', 'Symlink entries are not allowed during normalization', {
272
+ entryName: entry.name
273
+ });
274
+ }
275
+ if (entry.type === 'link') {
276
+ addIssue({
277
+ code: 'TAR_UNSUPPORTED_ENTRY',
278
+ severity: 'error',
279
+ message: 'Hardlink entries are not allowed during normalization',
280
+ entryName: entry.name
281
+ });
282
+ if (onUnsupported === 'drop') {
283
+ summary.droppedEntries += 1;
284
+ continue;
285
+ }
286
+ throw new ArchiveError('ARCHIVE_UNSUPPORTED_FEATURE', 'Hardlink entries are not allowed during normalization', {
287
+ entryName: entry.name
288
+ });
289
+ }
290
+ const data = entry.isDirectory
291
+ ? new Uint8Array(0)
292
+ : this.data.subarray(entry.dataOffset, entry.dataOffset + Number(entry.dataSize));
293
+ const mtime = deterministic ? new Date(0) : entry.mtime;
294
+ const mode = deterministic ? defaultMode(entry) : clampMode(entry.mode ?? defaultMode(entry));
295
+ const addOptions = {
296
+ type: entry.type,
297
+ ...(mtime ? { mtime } : {}),
298
+ ...(mode !== undefined ? { mode } : {}),
299
+ ...(entry.uid !== undefined ? { uid: deterministic ? 0 : entry.uid } : {}),
300
+ ...(entry.gid !== undefined ? { gid: deterministic ? 0 : entry.gid } : {}),
301
+ ...(entry.linkName !== undefined ? { linkName: entry.linkName } : {}),
302
+ ...(entry.pax ? { pax: entry.pax } : {})
303
+ };
304
+ try {
305
+ await writer.add(item.normalizedName, data, addOptions);
306
+ }
307
+ catch (err) {
308
+ addIssue({
309
+ code: 'TAR_UNSUPPORTED_ENTRY',
310
+ severity: 'error',
311
+ message: err.message,
312
+ entryName: entry.name
313
+ });
314
+ if (onUnsupported === 'drop') {
315
+ summary.droppedEntries += 1;
316
+ continue;
317
+ }
318
+ throw err;
319
+ }
320
+ summary.outputEntries += 1;
321
+ }
322
+ await writer.close();
323
+ return finalizeNormalizeReport(issues, summary);
324
+ }
325
+ /** @internal */
326
+ resolveAuditSettings(options) {
327
+ const profile = options?.profile ?? this.profile;
328
+ const defaults = profile === this.profile ? { strict: this.strict, limits: this.limits } : resolveProfileDefaults(profile);
329
+ const strict = options?.isStrict ?? defaults.strict;
330
+ const limits = normalizeLimits(options?.limits, defaults.limits);
331
+ return {
332
+ profile,
333
+ strict,
334
+ limits,
335
+ symlinkSeverity: profile === 'agent' ? 'error' : 'warning'
336
+ };
337
+ }
338
+ /** @internal */
339
+ async init() {
340
+ const { entries, warnings } = parseTarEntries(this.data, {
341
+ strict: this.strict,
342
+ limits: this.limits
343
+ });
344
+ this.entriesList = entries;
345
+ this.warningsList.push(...warnings);
346
+ }
347
+ }
348
+ function resolveReaderProfile(options) {
349
+ const profile = options?.profile ?? 'strict';
350
+ const defaults = profile === 'agent' ? AGENT_LIMITS : DEFAULT_LIMITS;
351
+ const strictDefault = profile === 'compat' ? false : true;
352
+ const strict = options?.isStrict ?? strictDefault;
353
+ const limits = normalizeLimits(options?.limits, defaults);
354
+ return { profile, strict, limits };
355
+ }
356
+ function resolveProfileDefaults(profile) {
357
+ if (profile === 'compat')
358
+ return { strict: false, limits: DEFAULT_LIMITS };
359
+ if (profile === 'agent')
360
+ return { strict: true, limits: AGENT_LIMITS };
361
+ return { strict: true, limits: DEFAULT_LIMITS };
362
+ }
363
+ /** @internal */
364
+ export function __getTarDefaultsForProfile(profile) {
365
+ return resolveProfileDefaults(profile).limits;
366
+ }
367
+ function normalizeLimits(limits, defaults = DEFAULT_LIMITS) {
368
+ const maxTotal = toBigInt(limits?.maxTotalDecompressedBytes ?? limits?.maxTotalUncompressedBytes) ??
369
+ defaults.maxTotalUncompressedBytes;
370
+ return {
371
+ maxEntries: limits?.maxEntries ?? defaults.maxEntries,
372
+ maxUncompressedEntryBytes: toBigInt(limits?.maxUncompressedEntryBytes) ?? defaults.maxUncompressedEntryBytes,
373
+ maxTotalUncompressedBytes: maxTotal,
374
+ maxTotalDecompressedBytes: maxTotal,
375
+ maxCompressionRatio: limits?.maxCompressionRatio ?? defaults.maxCompressionRatio,
376
+ maxDictionaryBytes: toBigInt(limits?.maxDictionaryBytes) ?? defaults.maxDictionaryBytes,
377
+ maxXzDictionaryBytes: toBigInt(limits?.maxXzDictionaryBytes ?? limits?.maxDictionaryBytes) ?? defaults.maxXzDictionaryBytes,
378
+ maxXzBufferedBytes: typeof limits?.maxXzBufferedBytes === 'number' && Number.isFinite(limits.maxXzBufferedBytes)
379
+ ? Math.max(1, Math.floor(limits.maxXzBufferedBytes))
380
+ : defaults.maxXzBufferedBytes,
381
+ maxXzIndexRecords: typeof limits?.maxXzIndexRecords === 'number' && Number.isFinite(limits.maxXzIndexRecords)
382
+ ? Math.max(1, Math.floor(limits.maxXzIndexRecords))
383
+ : defaults.maxXzIndexRecords,
384
+ maxXzIndexBytes: typeof limits?.maxXzIndexBytes === 'number' && Number.isFinite(limits.maxXzIndexBytes)
385
+ ? Math.max(8, Math.floor(limits.maxXzIndexBytes))
386
+ : defaults.maxXzIndexBytes,
387
+ maxXzPreflightBlockHeaders: typeof limits?.maxXzPreflightBlockHeaders === 'number' && Number.isFinite(limits.maxXzPreflightBlockHeaders)
388
+ ? Math.max(0, Math.floor(limits.maxXzPreflightBlockHeaders))
389
+ : defaults.maxXzPreflightBlockHeaders,
390
+ maxZipCentralDirectoryBytes: typeof limits?.maxZipCentralDirectoryBytes === 'number' && Number.isFinite(limits.maxZipCentralDirectoryBytes)
391
+ ? Math.max(0, Math.floor(limits.maxZipCentralDirectoryBytes))
392
+ : defaults.maxZipCentralDirectoryBytes,
393
+ maxZipCommentBytes: typeof limits?.maxZipCommentBytes === 'number' && Number.isFinite(limits.maxZipCommentBytes)
394
+ ? Math.max(0, Math.floor(limits.maxZipCommentBytes))
395
+ : defaults.maxZipCommentBytes,
396
+ maxZipEocdSearchBytes: typeof limits?.maxZipEocdSearchBytes === 'number' && Number.isFinite(limits.maxZipEocdSearchBytes)
397
+ ? Math.max(22, Math.floor(limits.maxZipEocdSearchBytes))
398
+ : defaults.maxZipEocdSearchBytes,
399
+ maxBzip2BlockSize: typeof limits?.maxBzip2BlockSize === 'number' && Number.isFinite(limits.maxBzip2BlockSize)
400
+ ? Math.max(1, Math.min(9, Math.floor(limits.maxBzip2BlockSize)))
401
+ : defaults.maxBzip2BlockSize,
402
+ maxInputBytes: toBigInt(limits?.maxInputBytes) ?? defaults.maxInputBytes
403
+ };
404
+ }
405
+ function toBigInt(value) {
406
+ if (value === undefined)
407
+ return undefined;
408
+ return typeof value === 'bigint' ? value : BigInt(value);
409
+ }
410
+ function parseTarEntries(data, options) {
411
+ const entries = [];
412
+ const warnings = [];
413
+ let offset = 0;
414
+ let globalPax = null;
415
+ let pendingPax = null;
416
+ let entryCount = 0;
417
+ let totalBytes = 0n;
418
+ while (offset + BLOCK_SIZE <= data.length) {
419
+ const header = data.subarray(offset, offset + BLOCK_SIZE);
420
+ if (isZeroBlock(header)) {
421
+ const next = data.subarray(offset + BLOCK_SIZE, offset + BLOCK_SIZE * 2);
422
+ if (next.length === BLOCK_SIZE && isZeroBlock(next)) {
423
+ break;
424
+ }
425
+ }
426
+ const checksumStored = parseOctal(header.subarray(148, 156));
427
+ const checksumActual = computeChecksum(header);
428
+ if (checksumStored !== undefined && Number(checksumStored) !== checksumActual) {
429
+ const issue = {
430
+ code: 'TAR_BAD_HEADER',
431
+ severity: options.strict ? 'error' : 'warning',
432
+ message: 'Header checksum mismatch',
433
+ offset: BigInt(offset).toString()
434
+ };
435
+ if (options.strict) {
436
+ throw new ArchiveError('ARCHIVE_BAD_HEADER', issue.message, { offset: BigInt(offset) });
437
+ }
438
+ warnings.push(issue);
439
+ }
440
+ const name = readString(header, 0, 100);
441
+ const mode = parseNumeric(header.subarray(100, 108));
442
+ const uid = parseNumeric(header.subarray(108, 116));
443
+ const gid = parseNumeric(header.subarray(116, 124));
444
+ let size = parseNumeric(header.subarray(124, 136));
445
+ const mtime = parseNumeric(header.subarray(136, 148));
446
+ const typeflag = readString(header, 156, 1) || '0';
447
+ const linkName = readString(header, 157, 100);
448
+ const prefix = readString(header, 345, 155);
449
+ let fullName = prefix ? `${prefix}/${name}` : name;
450
+ const blockSize = BigInt(BLOCK_SIZE);
451
+ const dataOffset = offset + BLOCK_SIZE;
452
+ const sizeBytes = size ?? 0n;
453
+ const dataEnd = dataOffset + Number(sizeBytes);
454
+ const padded = Number((sizeBytes + blockSize - 1n) / blockSize * blockSize);
455
+ if (dataEnd > data.length) {
456
+ throw new ArchiveError('ARCHIVE_TRUNCATED', 'TAR entry truncated', { offset: BigInt(offset) });
457
+ }
458
+ if (typeflag === 'x' || typeflag === 'g') {
459
+ const paxData = data.subarray(dataOffset, dataOffset + Number(sizeBytes));
460
+ const records = parsePaxRecords(paxData);
461
+ if (typeflag === 'g') {
462
+ globalPax = { ...(globalPax ?? {}), ...records };
463
+ pendingPax = null;
464
+ }
465
+ else {
466
+ pendingPax = { ...(globalPax ?? {}), ...records };
467
+ }
468
+ offset = dataOffset + padded;
469
+ continue;
470
+ }
471
+ const pax = pendingPax ? { ...pendingPax } : globalPax ? { ...globalPax } : undefined;
472
+ pendingPax = null;
473
+ if (pax?.path) {
474
+ fullName = pax.path;
475
+ }
476
+ let resolvedLink = linkName;
477
+ if (pax?.linkpath) {
478
+ resolvedLink = pax.linkpath;
479
+ }
480
+ if (pax?.size) {
481
+ const parsedSize = parsePaxSize(pax.size);
482
+ if (parsedSize !== undefined) {
483
+ size = parsedSize;
484
+ }
485
+ }
486
+ const entryType = typeFromFlag(typeflag);
487
+ const entryMtime = pax?.mtime ? parseMtime(pax.mtime) : mtime !== undefined ? new Date(Number(mtime) * 1000) : undefined;
488
+ const isDirectory = entryType === 'directory' || fullName.endsWith('/');
489
+ const isSymlink = entryType === 'symlink';
490
+ const entry = {
491
+ name: fullName,
492
+ size: size ?? 0n,
493
+ type: entryType,
494
+ isDirectory,
495
+ isSymlink,
496
+ dataOffset,
497
+ dataSize: size ?? 0n,
498
+ ...(pax ? { pax } : {})
499
+ };
500
+ if (entryMtime)
501
+ entry.mtime = entryMtime;
502
+ if (mode !== undefined)
503
+ entry.mode = Number(mode);
504
+ if (uid !== undefined)
505
+ entry.uid = Number(uid);
506
+ if (gid !== undefined)
507
+ entry.gid = Number(gid);
508
+ if (resolvedLink)
509
+ entry.linkName = resolvedLink;
510
+ entries.push(entry);
511
+ entryCount += 1;
512
+ totalBytes += entry.size;
513
+ if (entryCount > options.limits.maxEntries) {
514
+ throw new ArchiveError('ARCHIVE_LIMIT_EXCEEDED', 'Too many TAR entries');
515
+ }
516
+ if (entry.size > options.limits.maxUncompressedEntryBytes) {
517
+ throw new ArchiveError('ARCHIVE_LIMIT_EXCEEDED', 'TAR entry exceeds size limit', { entryName: entry.name });
518
+ }
519
+ if (totalBytes > options.limits.maxTotalUncompressedBytes) {
520
+ throw new ArchiveError('ARCHIVE_LIMIT_EXCEEDED', 'TAR total size exceeds limit');
521
+ }
522
+ offset = dataOffset + padded;
523
+ }
524
+ return { entries, warnings };
525
+ }
526
+ function readString(buffer, start, length) {
527
+ const slice = buffer.subarray(start, start + length);
528
+ let end = slice.indexOf(0);
529
+ if (end === -1)
530
+ end = slice.length;
531
+ return TEXT_DECODER.decode(slice.subarray(0, end)).trim();
532
+ }
533
+ function parseNumeric(buffer) {
534
+ if (buffer.length === 0)
535
+ return undefined;
536
+ const first = buffer[0] ?? 0;
537
+ if ((first & 0x80) !== 0) {
538
+ return parseBase256(buffer);
539
+ }
540
+ return parseOctal(buffer);
541
+ }
542
+ function parseOctal(buffer) {
543
+ const text = decodeNullTerminatedUtf8(buffer).trim();
544
+ if (!text)
545
+ return undefined;
546
+ const value = parseInt(text, 8);
547
+ if (!Number.isFinite(value))
548
+ return undefined;
549
+ return BigInt(value);
550
+ }
551
+ function parseBase256(buffer) {
552
+ let result = 0n;
553
+ for (const byte of buffer) {
554
+ result = (result << 8n) | BigInt(byte & 0xff);
555
+ }
556
+ // Clear the sign bit.
557
+ const bits = BigInt(buffer.length * 8 - 1);
558
+ const mask = (1n << bits) - 1n;
559
+ return result & mask;
560
+ }
561
+ function parseMtime(value) {
562
+ const num = Number(value);
563
+ if (!Number.isFinite(num))
564
+ return undefined;
565
+ return new Date(num * 1000);
566
+ }
567
+ function parsePaxSize(value) {
568
+ const trimmed = value.trim();
569
+ if (!trimmed)
570
+ return undefined;
571
+ try {
572
+ if (trimmed.includes('.')) {
573
+ const num = Number(trimmed);
574
+ if (!Number.isFinite(num))
575
+ return undefined;
576
+ return BigInt(Math.max(0, Math.floor(num)));
577
+ }
578
+ const parsed = BigInt(trimmed);
579
+ return parsed < 0n ? 0n : parsed;
580
+ }
581
+ catch {
582
+ return undefined;
583
+ }
584
+ }
585
+ function computeChecksum(header) {
586
+ let sum = 0;
587
+ for (let i = 0; i < header.length; i += 1) {
588
+ if (i >= 148 && i < 156) {
589
+ sum += 0x20;
590
+ }
591
+ else {
592
+ sum += header[i];
593
+ }
594
+ }
595
+ return sum;
596
+ }
597
+ function isZeroBlock(block) {
598
+ for (let i = 0; i < block.length; i += 1) {
599
+ if (block[i] !== 0)
600
+ return false;
601
+ }
602
+ return true;
603
+ }
604
+ function typeFromFlag(flag) {
605
+ switch (flag) {
606
+ case '0':
607
+ case '\0':
608
+ return 'file';
609
+ case '1':
610
+ return 'link';
611
+ case '2':
612
+ return 'symlink';
613
+ case '3':
614
+ return 'character';
615
+ case '4':
616
+ return 'block';
617
+ case '5':
618
+ return 'directory';
619
+ case '6':
620
+ return 'fifo';
621
+ default:
622
+ return 'unknown';
623
+ }
624
+ }
625
+ function parsePaxRecords(buffer) {
626
+ const out = {};
627
+ let offset = 0;
628
+ while (offset < buffer.length) {
629
+ const spaceIndex = buffer.indexOf(0x20, offset);
630
+ if (spaceIndex === -1)
631
+ break;
632
+ const lenText = TEXT_DECODER.decode(buffer.subarray(offset, spaceIndex));
633
+ const length = parseInt(lenText, 10);
634
+ if (!Number.isFinite(length) || length <= 0)
635
+ break;
636
+ const record = TEXT_DECODER.decode(buffer.subarray(spaceIndex + 1, offset + length));
637
+ const eqIndex = record.indexOf('=');
638
+ if (eqIndex > 0) {
639
+ const key = record.slice(0, eqIndex);
640
+ const value = record.slice(eqIndex + 1).replace(/\n$/, '');
641
+ out[key] = value;
642
+ }
643
+ offset += length;
644
+ }
645
+ return out;
646
+ }
647
+ function entryPathIssues(entryName) {
648
+ const issues = [];
649
+ if (entryName.includes('\u0000')) {
650
+ issues.push({
651
+ code: 'TAR_PATH_TRAVERSAL',
652
+ severity: 'error',
653
+ message: 'Entry name contains NUL byte',
654
+ entryName
655
+ });
656
+ return issues;
657
+ }
658
+ const normalized = entryName.replace(/\\/g, '/');
659
+ if (normalized.startsWith('/') || /^[a-zA-Z]:/.test(normalized)) {
660
+ issues.push({
661
+ code: 'TAR_PATH_TRAVERSAL',
662
+ severity: 'error',
663
+ message: 'Absolute paths are not allowed in TAR entries',
664
+ entryName
665
+ });
666
+ }
667
+ const parts = normalized.split('/').filter((part) => part.length > 0);
668
+ if (parts.some((part) => part === '..')) {
669
+ issues.push({
670
+ code: 'TAR_PATH_TRAVERSAL',
671
+ severity: 'error',
672
+ message: 'Path traversal detected in TAR entry',
673
+ entryName
674
+ });
675
+ }
676
+ return issues;
677
+ }
678
+ function collectNormalizedEntries(entries, params) {
679
+ const out = [];
680
+ const nameIndex = new Map();
681
+ const caseIndex = new Map();
682
+ const nfcIndex = new Map();
683
+ const originalNames = new Map();
684
+ for (const entry of entries) {
685
+ const normalizedName = normalizeEntryName(entry.name, entry.isDirectory, params.addIssue);
686
+ let targetName = normalizedName;
687
+ let renamed = false;
688
+ const existingIndex = nameIndex.get(targetName);
689
+ if (existingIndex !== undefined) {
690
+ if (params.onDuplicate === 'error') {
691
+ const existingName = originalNames.get(targetName) ?? targetName;
692
+ params.addIssue({
693
+ code: 'TAR_DUPLICATE_ENTRY',
694
+ severity: 'error',
695
+ message: `Duplicate entry name: ${existingName} vs ${entry.name}`,
696
+ entryName: entry.name,
697
+ details: { collisionKind: 'duplicate', otherName: existingName, key: targetName }
698
+ });
699
+ throw new ArchiveError('ARCHIVE_NAME_COLLISION', 'Name collision detected (duplicate). Rename entries to avoid collisions.', {
700
+ entryName: entry.name,
701
+ context: buildCollisionContext('duplicate', existingName, entry.name, targetName, 'tar')
702
+ });
703
+ }
704
+ if (params.onDuplicate === 'last-wins') {
705
+ out[existingIndex].dropped = true;
706
+ params.summary.droppedEntries += 1;
707
+ }
708
+ else if (params.onDuplicate === 'rename') {
709
+ targetName = resolveConflictName(targetName, nameIndex, caseIndex);
710
+ renamed = true;
711
+ }
712
+ }
713
+ const nfcName = targetName.normalize('NFC');
714
+ const existingNfc = nfcIndex.get(nfcName);
715
+ if (existingNfc && existingNfc.target !== targetName) {
716
+ params.addIssue({
717
+ code: 'TAR_UNICODE_COLLISION',
718
+ severity: 'error',
719
+ message: `Unicode normalization collision: ${existingNfc.original} vs ${entry.name}`,
720
+ entryName: entry.name,
721
+ details: { collisionKind: 'unicode_nfc', otherName: existingNfc.original, key: nfcName }
722
+ });
723
+ throw new ArchiveError('ARCHIVE_NAME_COLLISION', 'Name collision detected (unicode_nfc). Rename entries to avoid collisions.', {
724
+ entryName: entry.name,
725
+ context: buildCollisionContext('unicode_nfc', existingNfc.original, entry.name, nfcName, 'tar')
726
+ });
727
+ }
728
+ const caseKey = toCollisionKey(targetName, entry.isDirectory);
729
+ const existingCase = caseIndex.get(caseKey);
730
+ if (existingCase && existingCase.target !== targetName) {
731
+ if (params.onCaseCollision === 'error') {
732
+ params.addIssue({
733
+ code: 'TAR_CASE_COLLISION',
734
+ severity: 'error',
735
+ message: `Case-insensitive name collision: ${existingCase.original} vs ${entry.name}`,
736
+ entryName: entry.name,
737
+ details: { collisionKind: 'casefold', otherName: existingCase.original, key: caseKey }
738
+ });
739
+ throw new ArchiveError('ARCHIVE_NAME_COLLISION', 'Name collision detected (case). Rename entries to avoid collisions.', {
740
+ entryName: entry.name,
741
+ context: buildCollisionContext('case', existingCase.original, entry.name, caseKey, 'tar')
742
+ });
743
+ }
744
+ if (params.onCaseCollision === 'last-wins') {
745
+ const previous = nameIndex.get(existingCase.target);
746
+ if (previous !== undefined) {
747
+ out[previous].dropped = true;
748
+ params.summary.droppedEntries += 1;
749
+ }
750
+ }
751
+ else if (params.onCaseCollision === 'rename') {
752
+ targetName = resolveConflictName(targetName, nameIndex, caseIndex);
753
+ renamed = true;
754
+ }
755
+ }
756
+ nameIndex.set(targetName, out.length);
757
+ originalNames.set(targetName, entry.name);
758
+ const finalNfc = targetName.normalize('NFC');
759
+ nfcIndex.set(finalNfc, { original: entry.name, target: targetName });
760
+ caseIndex.set(toCollisionKey(targetName, entry.isDirectory), {
761
+ original: entry.name,
762
+ target: targetName,
763
+ nfc: finalNfc
764
+ });
765
+ if (renamed) {
766
+ params.summary.renamedEntries += 1;
767
+ }
768
+ out.push({
769
+ entry,
770
+ normalizedName: targetName,
771
+ dropped: false
772
+ });
773
+ }
774
+ if (params.deterministic) {
775
+ out.sort((a, b) => (a.normalizedName < b.normalizedName ? -1 : a.normalizedName > b.normalizedName ? 1 : 0));
776
+ }
777
+ return out;
778
+ }
779
+ function normalizeEntryName(entryName, isDirectory, addIssue) {
780
+ if (entryName.includes('\u0000')) {
781
+ addIssue({
782
+ code: 'TAR_PATH_TRAVERSAL',
783
+ severity: 'error',
784
+ message: 'Entry name contains NUL byte',
785
+ entryName
786
+ });
787
+ throw new ArchiveError('ARCHIVE_PATH_TRAVERSAL', 'Entry name contains NUL byte', { entryName });
788
+ }
789
+ const normalized = entryName.replace(/\\/g, '/');
790
+ if (normalized.startsWith('/') || /^[a-zA-Z]:/.test(normalized)) {
791
+ addIssue({
792
+ code: 'TAR_PATH_TRAVERSAL',
793
+ severity: 'error',
794
+ message: 'Absolute paths are not allowed in TAR entries',
795
+ entryName
796
+ });
797
+ throw new ArchiveError('ARCHIVE_PATH_TRAVERSAL', 'Absolute paths are not allowed in TAR entries', { entryName });
798
+ }
799
+ const parts = normalized.split('/').filter((part) => part.length > 0 && part !== '.');
800
+ if (parts.some((part) => part === '..')) {
801
+ addIssue({
802
+ code: 'TAR_PATH_TRAVERSAL',
803
+ severity: 'error',
804
+ message: 'Path traversal detected in TAR entry',
805
+ entryName
806
+ });
807
+ throw new ArchiveError('ARCHIVE_PATH_TRAVERSAL', 'Path traversal detected in TAR entry', { entryName });
808
+ }
809
+ let name = parts.join('/');
810
+ if (isDirectory && !name.endsWith('/')) {
811
+ name = name.length > 0 ? `${name}/` : '';
812
+ }
813
+ if (name.length === 0) {
814
+ addIssue({
815
+ code: 'TAR_PATH_TRAVERSAL',
816
+ severity: 'error',
817
+ message: 'Entry name resolves to empty path',
818
+ entryName
819
+ });
820
+ throw new ArchiveError('ARCHIVE_PATH_TRAVERSAL', 'Entry name resolves to empty path', { entryName });
821
+ }
822
+ return name;
823
+ }
824
+ function buildCollisionContext(collisionType, nameA, nameB, key, format, collisionKind = collisionType === 'case' ? 'casefold' : collisionType) {
825
+ return {
826
+ collisionType,
827
+ collisionKind,
828
+ nameA,
829
+ nameB,
830
+ key,
831
+ format
832
+ };
833
+ }
834
+ function resolveConflictName(name, nameIndex, lowerIndex) {
835
+ const trailingSlash = name.endsWith('/');
836
+ const trimmed = trailingSlash ? name.slice(0, -1) : name;
837
+ const slashIndex = trimmed.lastIndexOf('/');
838
+ const dir = slashIndex >= 0 ? trimmed.slice(0, slashIndex + 1) : '';
839
+ const file = slashIndex >= 0 ? trimmed.slice(slashIndex + 1) : trimmed;
840
+ const dotIndex = file.lastIndexOf('.');
841
+ const base = dotIndex > 0 ? file.slice(0, dotIndex) : file;
842
+ const ext = dotIndex > 0 ? file.slice(dotIndex) : '';
843
+ let counter = 1;
844
+ while (true) {
845
+ const candidate = `${dir}${base}~${counter}${ext}${trailingSlash ? '/' : ''}`;
846
+ const caseKey = toCollisionKey(candidate, trailingSlash);
847
+ if (!nameIndex.has(candidate) && !lowerIndex.has(caseKey)) {
848
+ return candidate;
849
+ }
850
+ counter += 1;
851
+ }
852
+ }
853
+ function defaultMode(entry) {
854
+ if (entry.isDirectory)
855
+ return 0o755;
856
+ if (entry.isSymlink)
857
+ return 0o777;
858
+ return 0o644;
859
+ }
860
+ function clampMode(mode) {
861
+ return mode & 0o777;
862
+ }
863
+ function finalizeAuditReport(issues, summary) {
864
+ const sanitizedIssues = issues.map((issue) => ({
865
+ ...issue,
866
+ ...(issue.details ? { details: sanitizeDetails(issue.details) } : {})
867
+ }));
868
+ const report = {
869
+ schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION,
870
+ ok: summary.errors === 0,
871
+ summary,
872
+ issues: sanitizedIssues
873
+ };
874
+ report.toJSON = () => ({
875
+ schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION,
876
+ ok: report.ok,
877
+ summary: report.summary,
878
+ issues: sanitizedIssues.map(issueToJson)
879
+ });
880
+ return report;
881
+ }
882
+ function finalizeNormalizeReport(issues, summary) {
883
+ const sanitizedIssues = issues.map((issue) => ({
884
+ ...issue,
885
+ ...(issue.details ? { details: sanitizeDetails(issue.details) } : {})
886
+ }));
887
+ const report = {
888
+ schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION,
889
+ ok: summary.errors === 0,
890
+ summary,
891
+ issues: sanitizedIssues
892
+ };
893
+ report.toJSON = () => ({
894
+ schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION,
895
+ ok: report.ok,
896
+ summary: report.summary,
897
+ issues: sanitizedIssues.map(issueToJson)
898
+ });
899
+ return report;
900
+ }
901
+ function issueToJson(issue) {
902
+ return {
903
+ code: issue.code,
904
+ severity: issue.severity,
905
+ message: issue.message,
906
+ ...(issue.entryName ? { entryName: issue.entryName } : {}),
907
+ ...(issue.offset !== undefined ? { offset: issue.offset.toString() } : {}),
908
+ ...(issue.details ? { details: sanitizeDetails(issue.details) } : {})
909
+ };
910
+ }
911
+ function sanitizeDetails(value) {
912
+ if (typeof value === 'bigint')
913
+ return value.toString();
914
+ if (Array.isArray(value))
915
+ return value.map(sanitizeDetails);
916
+ if (value && typeof value === 'object') {
917
+ const out = {};
918
+ for (const [key, val] of Object.entries(value)) {
919
+ out[key] = sanitizeDetails(val);
920
+ }
921
+ return out;
922
+ }
923
+ return value;
924
+ }
925
+ function toSafeNumber(value) {
926
+ if (value > BigInt(Number.MAX_SAFE_INTEGER))
927
+ return undefined;
928
+ return Number(value);
929
+ }
930
+ //# sourceMappingURL=TarReader.js.map