modern-tar 0.7.4 → 0.7.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,8 @@
1
- import { i as UnpackOptions, n as TarEntryData, r as TarHeader } from "../types-Te8MHQn3.js";
1
+ import { i as UnpackOptions, n as TarEntryData, r as TarHeader } from "../types-D19dF2SE.js";
2
2
  import { Readable, Writable } from "node:stream";
3
3
  import { Stats } from "node:fs";
4
4
 
5
5
  //#region src/fs/types.d.ts
6
-
7
6
  /**
8
7
  * Filesystem-specific configuration options for packing directories into tar archives.
9
8
  *
package/dist/fs/index.js CHANGED
@@ -1,10 +1,9 @@
1
- import { a as normalizeBody, c as LINK, l as SYMLINK, n as createTarPacker, o as DIRECTORY, r as transformHeader, s as FILE, t as createUnpacker } from "../unpacker-CEuY-276.js";
2
- import * as fs from "node:fs/promises";
1
+ import { a as normalizeBody, c as LINK, l as SYMLINK, n as createTarPacker, o as DIRECTORY, r as transformHeader, s as FILE, t as createUnpacker } from "../unpacker-CPCEF5CT.js";
2
+ import * as fs$1 from "node:fs/promises";
3
3
  import { cpus } from "node:os";
4
4
  import * as path from "node:path";
5
5
  import { Readable, Writable } from "node:stream";
6
- import * as fs$1 from "node:fs";
7
-
6
+ import * as fs from "node:fs";
8
7
  //#region src/fs/cache.ts
9
8
  const createCache = () => {
10
9
  const m = /* @__PURE__ */ new Map();
@@ -19,7 +18,6 @@ const createCache = () => {
19
18
  }
20
19
  };
21
20
  };
22
-
23
21
  //#endregion
24
22
  //#region src/fs/path.ts
25
23
  const unicodeCache = createCache();
@@ -48,16 +46,15 @@ const win32Reserved = {
48
46
  "\"": ""
49
47
  };
50
48
  function normalizeName(name) {
51
- const path$1 = name.replace(/\\/g, "/");
52
- if (path$1.split("/").includes("..") || /^[a-zA-Z]:\.\./.test(path$1)) throw new Error(`${name} points outside extraction directory`);
53
- let relative = path$1;
49
+ const path = name.replace(/\\/g, "/");
50
+ if (path.split("/").includes("..") || /^[a-zA-Z]:\.\./.test(path)) throw new Error(`${name} points outside extraction directory`);
51
+ let relative = path;
54
52
  if (/^[a-zA-Z]:/.test(relative)) relative = relative.replace(/^[a-zA-Z]:[/\\]?/, "");
55
53
  else if (relative.startsWith("/")) relative = relative.replace(/^\/+/, "");
56
54
  if (process.platform === "win32") return relative.replace(/[<>:"|?*]/g, (char) => win32Reserved[char]);
57
55
  return relative;
58
56
  }
59
57
  const normalizeHeaderName = (s) => normalizeUnicode(normalizeName(s.replace(/\/+$/, "")));
60
-
61
58
  //#endregion
62
59
  //#region src/fs/pack.ts
63
60
  const packTarSources = packTar;
@@ -68,7 +65,7 @@ function packTar(sources, options = {}) {
68
65
  const { dereference = false, filter, map, baseDir, concurrency = cpus().length || 8 } = options;
69
66
  const isDir = typeof sources === "string";
70
67
  const directoryPath = isDir ? path.resolve(sources) : null;
71
- const jobs = isDir ? (await fs.readdir(directoryPath, { withFileTypes: true })).map((entry) => ({
68
+ const jobs = isDir ? (await fs$1.readdir(directoryPath, { withFileTypes: true })).map((entry) => ({
72
69
  type: entry.isDirectory() ? DIRECTORY : FILE,
73
70
  source: path.join(directoryPath, entry.name),
74
71
  target: entry.name
@@ -152,56 +149,56 @@ function packTar(sources, options = {}) {
152
149
  const target = normalizeName(job.target);
153
150
  try {
154
151
  if (job.type === "content" || job.type === "stream") {
155
- let body$1;
152
+ let body;
156
153
  let size;
157
- const isDir$1 = target.endsWith("/");
154
+ const isDir = target.endsWith("/");
158
155
  if (job.type === "stream") {
159
- if (typeof job.size !== "number" || !isDir$1 && job.size <= 0 || isDir$1 && job.size !== 0) throw new Error(isDir$1 ? "Streams for directories must have size 0." : "Streams require a positive size.");
156
+ if (!isDir && job.size <= 0 || isDir && job.size !== 0) throw new Error(isDir ? "Streams for directories must have size 0." : "Streams require a positive size.");
160
157
  size = job.size;
161
- body$1 = job.content;
158
+ body = job.content;
162
159
  } else {
163
160
  const content = await normalizeBody(job.content);
164
161
  size = content.length;
165
- body$1 = content;
162
+ body = content;
166
163
  }
167
- const stat$1 = {
168
- size: isDir$1 ? 0 : size,
169
- isFile: () => !isDir$1,
170
- isDirectory: () => isDir$1,
164
+ const stat = {
165
+ size: isDir ? 0 : size,
166
+ isFile: () => !isDir,
167
+ isDirectory: () => isDir,
171
168
  isSymbolicLink: () => false,
172
169
  mode: job.mode,
173
170
  mtime: job.mtime ?? /* @__PURE__ */ new Date(),
174
171
  uid: job.uid ?? 0,
175
172
  gid: job.gid ?? 0
176
173
  };
177
- if (filter && !filter(target, stat$1)) return;
178
- let header$1 = {
174
+ if (filter && !filter(target, stat)) return;
175
+ let header = {
179
176
  name: target,
180
- type: isDir$1 ? DIRECTORY : FILE,
181
- size: isDir$1 ? 0 : size,
182
- mode: stat$1.mode,
183
- mtime: stat$1.mtime,
184
- uid: stat$1.uid,
185
- gid: stat$1.gid,
177
+ type: isDir ? DIRECTORY : FILE,
178
+ size: isDir ? 0 : size,
179
+ mode: stat.mode,
180
+ mtime: stat.mtime,
181
+ uid: stat.uid,
182
+ gid: stat.gid,
186
183
  uname: job.uname,
187
184
  gname: job.gname
188
185
  };
189
- if (map) header$1 = map(header$1);
186
+ if (map) header = map(header);
190
187
  jobResult = {
191
- header: header$1,
192
- body: isDir$1 ? void 0 : body$1
188
+ header,
189
+ body: isDir ? void 0 : body
193
190
  };
194
191
  return;
195
192
  }
196
- let stat = await fs.lstat(job.source, { bigint: true });
193
+ let stat = await fs$1.lstat(job.source, { bigint: true });
197
194
  if (dereference && stat.isSymbolicLink()) {
198
- const linkTarget = await fs.readlink(job.source);
195
+ const linkTarget = await fs$1.readlink(job.source);
199
196
  const resolved = path.resolve(path.dirname(job.source), linkTarget);
200
197
  const resolvedBase = baseDir ?? directoryPath ?? process.cwd();
201
198
  if (!resolved.startsWith(resolvedBase + path.sep) && resolved !== resolvedBase) return;
202
- stat = await fs.stat(job.source, { bigint: true });
199
+ stat = await fs$1.stat(job.source, { bigint: true });
203
200
  }
204
- if (filter && !filter(target, stat)) return;
201
+ if (filter && !filter(job.source, stat)) return;
205
202
  let header = {
206
203
  name: target,
207
204
  size: 0,
@@ -218,7 +215,7 @@ function packTar(sources, options = {}) {
218
215
  header.type = DIRECTORY;
219
216
  header.name = target.endsWith("/") ? target : `${target}/`;
220
217
  try {
221
- for (const d of await fs.readdir(job.source, { withFileTypes: true })) jobs.push({
218
+ for (const d of await fs$1.readdir(job.source, { withFileTypes: true })) jobs.push({
222
219
  type: d.isDirectory() ? DIRECTORY : FILE,
223
220
  source: path.join(job.source, d.name),
224
221
  target: `${header.name}${d.name}`
@@ -226,7 +223,7 @@ function packTar(sources, options = {}) {
226
223
  } catch {}
227
224
  } else if (stat.isSymbolicLink()) {
228
225
  header.type = SYMLINK;
229
- header.linkname = await fs.readlink(job.source);
226
+ header.linkname = await fs$1.readlink(job.source);
230
227
  } else if (stat.isFile()) {
231
228
  header.size = Number(stat.size);
232
229
  if (stat.nlink > 1 && seenInodes.has(stat.ino)) {
@@ -235,9 +232,9 @@ function packTar(sources, options = {}) {
235
232
  header.size = 0;
236
233
  } else {
237
234
  if (stat.nlink > 1) seenInodes.set(stat.ino, target);
238
- if (header.size > 0) if (header.size < 32 * 1024) body = await fs.readFile(job.source);
235
+ if (header.size > 0) if (header.size < 32 * 1024) body = await fs$1.readFile(job.source);
239
236
  else body = {
240
- handle: await fs.open(job.source, "r"),
237
+ handle: await fs$1.open(job.source, "r"),
241
238
  size: header.size
242
239
  };
243
240
  }
@@ -258,7 +255,6 @@ function packTar(sources, options = {}) {
258
255
  })().catch((error) => stream.destroy(error));
259
256
  return stream;
260
257
  }
261
-
262
258
  //#endregion
263
259
  //#region src/fs/concurrency.ts
264
260
  const createOperationQueue = (concurrency) => {
@@ -301,17 +297,17 @@ const createOperationQueue = (concurrency) => {
301
297
  }
302
298
  };
303
299
  };
304
-
305
300
  //#endregion
306
301
  //#region src/fs/file-sink.ts
307
302
  const BATCH_BYTES = 256 * 1024;
303
+ const OPEN_FLAGS = fs.constants.O_WRONLY | fs.constants.O_CREAT | fs.constants.O_TRUNC | (fs.constants.O_NOFOLLOW ?? 0);
308
304
  const STATE_UNOPENED = 0;
309
305
  const STATE_OPENING = 1;
310
306
  const STATE_OPEN = 2;
311
307
  const STATE_CLOSED = 3;
312
308
  const STATE_FAILED = 4;
313
309
  const DRAINED_PROMISE = Promise.resolve();
314
- function createFileSink(path$1, { mode = 438, mtime } = {}) {
310
+ function createFileSink(path, { mode = 438, mtime } = {}) {
315
311
  let state = STATE_UNOPENED;
316
312
  let flushing = false;
317
313
  let fd = null;
@@ -361,7 +357,7 @@ function createFileSink(path$1, { mode = 438, mtime } = {}) {
361
357
  flushing = false;
362
358
  const fdToClose = fd;
363
359
  fd = null;
364
- if (fdToClose !== null) fs$1.ftruncate(fdToClose, 0, () => fs$1.close(fdToClose));
360
+ if (fdToClose !== null) fs.ftruncate(fdToClose, 0, () => fs.close(fdToClose));
365
361
  endReject?.(error);
366
362
  failWaiters(error);
367
363
  };
@@ -372,14 +368,14 @@ function createFileSink(path$1, { mode = 438, mtime } = {}) {
372
368
  }
373
369
  const fdToClose = fd;
374
370
  fd = null;
375
- if (mtime) fs$1.futimes(fdToClose, mtime, mtime, (err) => {
371
+ if (mtime) fs.futimes(fdToClose, mtime, mtime, (err) => {
376
372
  if (err) return fail(err);
377
- fs$1.close(fdToClose, (closeErr) => {
373
+ fs.close(fdToClose, (closeErr) => {
378
374
  if (closeErr) fail(closeErr);
379
375
  else finish();
380
376
  });
381
377
  });
382
- else fs$1.close(fdToClose, (err) => {
378
+ else fs.close(fdToClose, (err) => {
383
379
  if (err) fail(err);
384
380
  else finish();
385
381
  });
@@ -399,16 +395,16 @@ function createFileSink(path$1, { mode = 438, mtime } = {}) {
399
395
  };
400
396
  if (bufs.length === 1) {
401
397
  const buf = bufs[0];
402
- fs$1.write(fd, buf, 0, buf.length, null, onDone);
403
- } else fs$1.writev(fd, bufs, onDone);
398
+ fs.write(fd, buf, 0, buf.length, null, onDone);
399
+ } else fs.writev(fd, bufs, onDone);
404
400
  };
405
401
  const open = () => {
406
402
  if (state !== STATE_UNOPENED) return;
407
403
  state = STATE_OPENING;
408
- fs$1.open(path$1, "w", mode, (err, openFd) => {
404
+ fs.open(path, OPEN_FLAGS, mode, (err, openFd) => {
409
405
  if (err) return fail(err);
410
406
  if (state === STATE_CLOSED || state === STATE_FAILED) {
411
- fs$1.close(openFd);
407
+ fs.close(openFd);
412
408
  return;
413
409
  }
414
410
  fd = openFd;
@@ -460,7 +456,7 @@ function createFileSink(path$1, { mode = 438, mtime } = {}) {
460
456
  if (fd !== null) {
461
457
  const fdToClose = fd;
462
458
  fd = null;
463
- fs$1.close(fdToClose);
459
+ fs.close(fdToClose);
464
460
  }
465
461
  finish();
466
462
  };
@@ -471,31 +467,31 @@ function createFileSink(path$1, { mode = 438, mtime } = {}) {
471
467
  waitDrain
472
468
  };
473
469
  }
474
-
475
470
  //#endregion
476
471
  //#region src/fs/path-cache.ts
477
472
  const ENOENT = "ENOENT";
478
473
  const createPathCache = (destDirPath, options) => {
474
+ const { maxDepth = 1024, dmode } = options;
479
475
  const dirPromises = createCache();
480
- const pathConflicts = createCache();
476
+ const pathConflicts = /* @__PURE__ */ new Map();
481
477
  const deferredLinks = [];
482
478
  const realDirCache = createCache();
483
- const initializeDestDir = async (destDirPath$1) => {
484
- const symbolic = normalizeUnicode(path.resolve(destDirPath$1));
479
+ const initializeDestDir = async (destDirPath) => {
480
+ const symbolic = normalizeUnicode(path.resolve(destDirPath));
485
481
  try {
486
- await fs.mkdir(symbolic, { recursive: true });
482
+ await fs$1.mkdir(symbolic, { recursive: true });
487
483
  } catch (err) {
488
484
  if (err.code === ENOENT) {
489
485
  const parentDir = path.dirname(symbolic);
490
486
  if (parentDir === symbolic) throw err;
491
- await fs.mkdir(parentDir, { recursive: true });
492
- await fs.mkdir(symbolic, { recursive: true });
487
+ await fs$1.mkdir(parentDir, { recursive: true });
488
+ await fs$1.mkdir(symbolic, { recursive: true });
493
489
  } else throw err;
494
490
  }
495
491
  try {
496
492
  return {
497
493
  symbolic,
498
- real: await fs.realpath(symbolic)
494
+ real: await fs$1.realpath(symbolic)
499
495
  };
500
496
  } catch (err) {
501
497
  if (err.code === ENOENT) return {
@@ -515,7 +511,7 @@ const createPathCache = (destDirPath, options) => {
515
511
  }
516
512
  let promise = realDirCache.get(dirPath);
517
513
  if (!promise) {
518
- promise = fs.realpath(dirPath).then((realPath) => {
514
+ promise = fs$1.realpath(dirPath).then((realPath) => {
519
515
  validateBounds(realPath, destDir.real, errorMessage);
520
516
  return realPath;
521
517
  });
@@ -532,11 +528,11 @@ const createPathCache = (destDirPath, options) => {
532
528
  if (dirPath === (await destDirPromise).symbolic) return;
533
529
  await prepareDirectory(path.dirname(dirPath));
534
530
  try {
535
- const stat = await fs.lstat(dirPath);
531
+ const stat = await fs$1.lstat(dirPath);
536
532
  if (stat.isDirectory()) return;
537
533
  if (stat.isSymbolicLink()) try {
538
534
  const realPath = await getRealDir(dirPath, `Symlink "${dirPath}" points outside the extraction directory.`);
539
- if ((await fs.stat(realPath)).isDirectory()) return;
535
+ if ((await fs$1.stat(realPath)).isDirectory()) return;
540
536
  } catch (err) {
541
537
  if (err.code === ENOENT) throw new Error(`Symlink "${dirPath}" points outside the extraction directory.`);
542
538
  throw err;
@@ -544,7 +540,7 @@ const createPathCache = (destDirPath, options) => {
544
540
  throw new Error(`"${dirPath}" is not a valid directory component.`);
545
541
  } catch (err) {
546
542
  if (err.code === ENOENT) {
547
- await fs.mkdir(dirPath, { mode: mode ?? options.dmode });
543
+ await fs$1.mkdir(dirPath, { mode: mode ?? options.dmode });
548
544
  return;
549
545
  }
550
546
  throw err;
@@ -559,7 +555,6 @@ const createPathCache = (destDirPath, options) => {
559
555
  },
560
556
  async preparePath(header) {
561
557
  const { name, linkname, type, mode, mtime } = header;
562
- const { maxDepth = 1024, dmode } = options;
563
558
  const normalizedName = normalizeHeaderName(name);
564
559
  const destDir = await destDirPromise;
565
560
  const outPath = path.join(destDir.symbolic, normalizedName);
@@ -570,7 +565,7 @@ const createPathCache = (destDirPath, options) => {
570
565
  }
571
566
  const prevOp = pathConflicts.get(normalizedName);
572
567
  if (prevOp) {
573
- if (prevOp === DIRECTORY && type !== DIRECTORY || prevOp !== DIRECTORY && type === DIRECTORY) throw new Error(`Path conflict ${type} over existing ${prevOp} at "${name}"`);
568
+ if (prevOp === "directory" && type !== "directory" || prevOp !== "directory" && type === "directory") throw new Error(`Path conflict ${type} over existing ${prevOp} at "${name}"`);
574
569
  return;
575
570
  }
576
571
  const parentDir = path.dirname(outPath);
@@ -579,7 +574,7 @@ const createPathCache = (destDirPath, options) => {
579
574
  pathConflicts.set(normalizedName, DIRECTORY);
580
575
  const safeMode = mode ? mode & 511 : void 0;
581
576
  await prepareDirectory(outPath, dmode ?? safeMode);
582
- if (mtime) await fs.lutimes(outPath, mtime, mtime).catch(() => {});
577
+ if (mtime) await fs$1.lutimes(outPath, mtime, mtime).catch(() => {});
583
578
  return;
584
579
  }
585
580
  case FILE:
@@ -591,8 +586,8 @@ const createPathCache = (destDirPath, options) => {
591
586
  if (!linkname) return;
592
587
  await prepareDirectory(parentDir);
593
588
  validateBounds(path.resolve(parentDir, linkname), destDir.symbolic, `Symlink "${linkname}" points outside the extraction directory.`);
594
- await fs.symlink(linkname, outPath);
595
- if (mtime) await fs.lutimes(outPath, mtime, mtime).catch(() => {});
589
+ await fs$1.symlink(linkname, outPath);
590
+ if (mtime) await fs$1.lutimes(outPath, mtime, mtime).catch(() => {});
596
591
  return;
597
592
  case LINK: {
598
593
  pathConflicts.set(normalizedName, LINK);
@@ -618,7 +613,7 @@ const createPathCache = (destDirPath, options) => {
618
613
  },
619
614
  async applyLinks() {
620
615
  for (const { linkTarget, outPath } of deferredLinks) try {
621
- await fs.link(linkTarget, outPath);
616
+ await fs$1.link(linkTarget, outPath);
622
617
  } catch (err) {
623
618
  if (err.code === ENOENT) throw new Error(`Hardlink target "${linkTarget}" does not exist for link at "${outPath}".`);
624
619
  throw err;
@@ -626,7 +621,6 @@ const createPathCache = (destDirPath, options) => {
626
621
  }
627
622
  };
628
623
  };
629
-
630
624
  //#endregion
631
625
  //#region src/fs/unpack.ts
632
626
  function unpackTar(directoryPath, options = {}) {
@@ -635,7 +629,12 @@ function unpackTar(directoryPath, options = {}) {
635
629
  const pathCache = createPathCache(directoryPath, options);
636
630
  let currentFileStream = null;
637
631
  let currentWriteCallback = null;
638
- return new Writable({
632
+ let queuedError = null;
633
+ const onQueuedError = (err) => {
634
+ queuedError ??= err;
635
+ if (!writable.destroyed) writable.destroy(err);
636
+ };
637
+ const writable = new Writable({
639
638
  async write(chunk, _, cb) {
640
639
  try {
641
640
  unpacker.write(chunk);
@@ -656,7 +655,7 @@ function unpackTar(directoryPath, options = {}) {
656
655
  return;
657
656
  }
658
657
  const streamToClose = currentFileStream;
659
- if (streamToClose) opQueue.add(() => streamToClose.end());
658
+ if (streamToClose) opQueue.add(() => streamToClose.end()).catch(onQueuedError);
660
659
  currentFileStream = null;
661
660
  currentWriteCallback = null;
662
661
  } else if (!unpacker.skipEntry()) {
@@ -686,8 +685,8 @@ function unpackTar(directoryPath, options = {}) {
686
685
  mtime: transformedHeader.mtime ?? void 0
687
686
  });
688
687
  let needsDrain = false;
689
- const writeCallback = (chunk$1) => {
690
- const writeOk = fileStream.write(chunk$1);
688
+ const writeCallback = (chunk) => {
689
+ const writeOk = fileStream.write(chunk);
691
690
  if (!writeOk) needsDrain = true;
692
691
  return writeOk;
693
692
  };
@@ -707,7 +706,7 @@ function unpackTar(directoryPath, options = {}) {
707
706
  cb();
708
707
  return;
709
708
  }
710
- opQueue.add(() => fileStream.end());
709
+ opQueue.add(() => fileStream.end()).catch(onQueuedError);
711
710
  } else if (!unpacker.skipEntry()) {
712
711
  cb();
713
712
  return;
@@ -723,6 +722,7 @@ function unpackTar(directoryPath, options = {}) {
723
722
  unpacker.validateEOF();
724
723
  await pathCache.ready();
725
724
  await opQueue.onIdle();
725
+ if (queuedError) throw queuedError;
726
726
  await pathCache.applyLinks();
727
727
  cb();
728
728
  } catch (err) {
@@ -740,7 +740,7 @@ function unpackTar(directoryPath, options = {}) {
740
740
  })().then(() => callback(error ?? null), (e) => callback(error ?? (e instanceof Error ? e : /* @__PURE__ */ new Error("Stream destroyed"))));
741
741
  }
742
742
  });
743
+ return writable;
743
744
  }
744
-
745
745
  //#endregion
746
- export { packTar, packTarSources, unpackTar };
746
+ export { packTar, packTarSources, unpackTar };
@@ -1,5 +1,4 @@
1
1
  //#region src/tar/constants.d.ts
2
-
3
2
  /** Type flag constants for file types. */
4
3
  declare const TYPEFLAG: {
5
4
  readonly file: "0";
@@ -1,39 +1,3 @@
1
- //#region src/tar/constants.ts
2
- const BLOCK_SIZE = 512;
3
- const BLOCK_SIZE_MASK = 511;
4
- const DEFAULT_FILE_MODE = 420;
5
- const DEFAULT_DIR_MODE = 493;
6
- const USTAR_NAME_OFFSET = 0;
7
- const USTAR_NAME_SIZE = 100;
8
- const USTAR_MODE_OFFSET = 100;
9
- const USTAR_MODE_SIZE = 8;
10
- const USTAR_UID_OFFSET = 108;
11
- const USTAR_UID_SIZE = 8;
12
- const USTAR_GID_OFFSET = 116;
13
- const USTAR_GID_SIZE = 8;
14
- const USTAR_SIZE_OFFSET = 124;
15
- const USTAR_SIZE_SIZE = 12;
16
- const USTAR_MTIME_OFFSET = 136;
17
- const USTAR_MTIME_SIZE = 12;
18
- const USTAR_CHECKSUM_OFFSET = 148;
19
- const USTAR_CHECKSUM_SIZE = 8;
20
- const USTAR_TYPEFLAG_OFFSET = 156;
21
- const USTAR_TYPEFLAG_SIZE = 1;
22
- const USTAR_LINKNAME_OFFSET = 157;
23
- const USTAR_LINKNAME_SIZE = 100;
24
- const USTAR_MAGIC_OFFSET = 257;
25
- const USTAR_MAGIC_SIZE = 6;
26
- const USTAR_VERSION_OFFSET = 263;
27
- const USTAR_VERSION_SIZE = 2;
28
- const USTAR_UNAME_OFFSET = 265;
29
- const USTAR_UNAME_SIZE = 32;
30
- const USTAR_GNAME_OFFSET = 297;
31
- const USTAR_GNAME_SIZE = 32;
32
- const USTAR_PREFIX_OFFSET = 345;
33
- const USTAR_PREFIX_SIZE = 155;
34
- const USTAR_VERSION = "00";
35
- const USTAR_MAX_UID_GID = 2097151;
36
- const USTAR_MAX_SIZE = 8589934591;
37
1
  const FILE = "file";
38
2
  const LINK = "link";
39
3
  const SYMLINK = "symlink";
@@ -64,9 +28,8 @@ const FLAGTYPE = {
64
28
  L: "gnu-long-name",
65
29
  K: "gnu-long-link-name"
66
30
  };
67
- const ZERO_BLOCK = new Uint8Array(BLOCK_SIZE);
31
+ const ZERO_BLOCK = new Uint8Array(512);
68
32
  const EMPTY = new Uint8Array(0);
69
-
70
33
  //#endregion
71
34
  //#region src/tar/encoding.ts
72
35
  const encoder = new TextEncoder();
@@ -105,10 +68,9 @@ function readNumeric(view, offset, size) {
105
68
  }
106
69
  return readOctal(view, offset, size);
107
70
  }
108
-
109
71
  //#endregion
110
72
  //#region src/tar/body.ts
111
- const isBodyless = (header) => header.type === DIRECTORY || header.type === SYMLINK || header.type === LINK || header.type === "character-device" || header.type === "block-device" || header.type === "fifo";
73
+ const isBodyless = (header) => header.type === "directory" || header.type === "symlink" || header.type === "link" || header.type === "character-device" || header.type === "block-device" || header.type === "fifo";
112
74
  async function normalizeBody(body) {
113
75
  if (body === null || body === void 0) return EMPTY;
114
76
  if (body instanceof Uint8Array) return body;
@@ -117,21 +79,26 @@ async function normalizeBody(body) {
117
79
  if (body instanceof Blob) return new Uint8Array(await body.arrayBuffer());
118
80
  throw new TypeError("Unsupported content type for entry body.");
119
81
  }
120
-
121
82
  //#endregion
122
83
  //#region src/tar/options.ts
84
+ const stripPath = (p, n) => {
85
+ const parts = p.split("/").filter(Boolean);
86
+ return n >= parts.length ? "" : parts.slice(n).join("/");
87
+ };
123
88
  function transformHeader(header, options) {
124
89
  const { strip, filter, map } = options;
125
90
  if (!strip && !filter && !map) return header;
126
91
  const h = { ...header };
127
92
  if (strip && strip > 0) {
128
- const components = h.name.split("/").filter(Boolean);
129
- if (strip >= components.length) return null;
130
- const newName = components.slice(strip).join("/");
131
- h.name = h.type === DIRECTORY && !newName.endsWith("/") ? `${newName}/` : newName;
132
- if (h.linkname?.startsWith("/")) {
133
- const linkComponents = h.linkname.split("/").filter(Boolean);
134
- h.linkname = strip >= linkComponents.length ? "/" : `/${linkComponents.slice(strip).join("/")}`;
93
+ const newName = stripPath(h.name, strip);
94
+ if (!newName) return null;
95
+ h.name = h.type === "directory" && !newName.endsWith("/") ? `${newName}/` : newName;
96
+ if (h.linkname) {
97
+ const isAbsolute = h.linkname.startsWith("/");
98
+ if (isAbsolute || h.type === "link") {
99
+ const stripped = stripPath(h.linkname, strip);
100
+ h.linkname = isAbsolute ? `/${stripped}` || "/" : stripped;
101
+ }
135
102
  }
136
103
  }
137
104
  if (filter?.(h) === false) return null;
@@ -139,43 +106,42 @@ function transformHeader(header, options) {
139
106
  if (result && (!result.name || !result.name.trim() || result.name === "." || result.name === "/")) return null;
140
107
  return result;
141
108
  }
142
-
143
109
  //#endregion
144
110
  //#region src/tar/checksum.ts
145
111
  const CHECKSUM_SPACE = 32;
146
112
  const ASCII_ZERO = 48;
147
113
  function validateChecksum(block) {
148
- const stored = readOctal(block, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_SIZE);
114
+ const stored = readOctal(block, 148, 8);
149
115
  let sum = 0;
150
- for (let i = 0; i < block.length; i++) if (i >= USTAR_CHECKSUM_OFFSET && i < USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE) sum += CHECKSUM_SPACE;
116
+ for (let i = 0; i < block.length; i++) if (i >= 148 && i < 156) sum += CHECKSUM_SPACE;
151
117
  else sum += block[i];
152
118
  return stored === sum;
153
119
  }
154
120
  function writeChecksum(block) {
155
- block.fill(CHECKSUM_SPACE, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE);
121
+ block.fill(CHECKSUM_SPACE, 148, 156);
156
122
  let checksum = 0;
157
123
  for (const byte of block) checksum += byte;
158
- for (let i = USTAR_CHECKSUM_OFFSET + 6 - 1; i >= USTAR_CHECKSUM_OFFSET; i--) {
124
+ for (let i = 153; i >= 148; i--) {
159
125
  block[i] = (checksum & 7) + ASCII_ZERO;
160
126
  checksum >>= 3;
161
127
  }
162
- block[USTAR_CHECKSUM_OFFSET + 6] = 0;
163
- block[USTAR_CHECKSUM_OFFSET + 7] = CHECKSUM_SPACE;
128
+ block[154] = 0;
129
+ block[155] = CHECKSUM_SPACE;
164
130
  }
165
-
166
131
  //#endregion
167
132
  //#region src/tar/pax.ts
133
+ const USTAR_SPLIT_MAX_SIZE = 256;
168
134
  function generatePax(header) {
169
135
  const paxRecords = {};
170
- if (header.name.length > USTAR_NAME_SIZE) {
136
+ if (encoder.encode(header.name).length > 100) {
171
137
  if (findUstarSplit(header.name) === null) paxRecords.path = header.name;
172
138
  }
173
- if (header.linkname && header.linkname.length > USTAR_NAME_SIZE) paxRecords.linkpath = header.linkname;
174
- if (header.uname && header.uname.length > USTAR_UNAME_SIZE) paxRecords.uname = header.uname;
175
- if (header.gname && header.gname.length > USTAR_GNAME_SIZE) paxRecords.gname = header.gname;
176
- if (header.uid != null && header.uid > USTAR_MAX_UID_GID) paxRecords.uid = String(header.uid);
177
- if (header.gid != null && header.gid > USTAR_MAX_UID_GID) paxRecords.gid = String(header.gid);
178
- if (header.size != null && header.size > USTAR_MAX_SIZE) paxRecords.size = String(header.size);
139
+ if (header.linkname && encoder.encode(header.linkname).length > 100) paxRecords.linkpath = header.linkname;
140
+ if (header.uname && encoder.encode(header.uname).length > 32) paxRecords.uname = header.uname;
141
+ if (header.gname && encoder.encode(header.gname).length > 32) paxRecords.gname = header.gname;
142
+ if (header.uid != null && header.uid > 2097151) paxRecords.uid = String(header.uid);
143
+ if (header.gid != null && header.gid > 2097151) paxRecords.gid = String(header.gid);
144
+ if (header.size != null && header.size > 8589934591) paxRecords.size = String(header.size);
179
145
  if (header.pax) Object.assign(paxRecords, header.pax);
180
146
  const paxEntries = Object.entries(paxRecords);
181
147
  if (paxEntries.length === 0) return null;
@@ -202,20 +168,23 @@ function generatePax(header) {
202
168
  };
203
169
  }
204
170
  function findUstarSplit(path) {
205
- if (path.length <= USTAR_NAME_SIZE) return null;
206
- const minSlashIndex = path.length - USTAR_NAME_SIZE - 1;
207
- const slashIndex = path.lastIndexOf("/", USTAR_PREFIX_SIZE);
208
- if (slashIndex > 0 && slashIndex >= minSlashIndex) return {
209
- prefix: path.slice(0, slashIndex),
210
- name: path.slice(slashIndex + 1)
211
- };
171
+ const totalPathBytes = encoder.encode(path).length;
172
+ if (totalPathBytes <= 100 || totalPathBytes > USTAR_SPLIT_MAX_SIZE) return null;
173
+ for (let i = path.length - 1; i > 0; i--) {
174
+ if (path[i] !== "/") continue;
175
+ const prefix = path.slice(0, i);
176
+ const name = path.slice(i + 1);
177
+ if (encoder.encode(prefix).length <= 155 && encoder.encode(name).length <= 100) return {
178
+ prefix,
179
+ name
180
+ };
181
+ }
212
182
  return null;
213
183
  }
214
-
215
184
  //#endregion
216
185
  //#region src/tar/header.ts
217
186
  function createTarHeader(header) {
218
- const view = new Uint8Array(BLOCK_SIZE);
187
+ const view = new Uint8Array(512);
219
188
  const size = isBodyless(header) ? 0 : header.size ?? 0;
220
189
  let name = header.name;
221
190
  let prefix = "";
@@ -226,42 +195,42 @@ function createTarHeader(header) {
226
195
  prefix = split.prefix;
227
196
  }
228
197
  }
229
- writeString(view, USTAR_NAME_OFFSET, USTAR_NAME_SIZE, name);
230
- writeOctal(view, USTAR_MODE_OFFSET, USTAR_MODE_SIZE, header.mode ?? (header.type === DIRECTORY ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));
231
- writeOctal(view, USTAR_UID_OFFSET, USTAR_UID_SIZE, header.uid ?? 0);
232
- writeOctal(view, USTAR_GID_OFFSET, USTAR_GID_SIZE, header.gid ?? 0);
233
- writeOctal(view, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE, size);
234
- writeOctal(view, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1e3));
235
- writeString(view, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE, TYPEFLAG[header.type ?? FILE]);
236
- writeString(view, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE, header.linkname);
237
- writeString(view, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE, "ustar\0");
238
- writeString(view, USTAR_VERSION_OFFSET, USTAR_VERSION_SIZE, USTAR_VERSION);
239
- writeString(view, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE, header.uname);
240
- writeString(view, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE, header.gname);
241
- writeString(view, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE, prefix);
198
+ writeString(view, 0, 100, name);
199
+ writeOctal(view, 100, 8, header.mode ?? (header.type === "directory" ? 493 : 420));
200
+ writeOctal(view, 108, 8, header.uid ?? 0);
201
+ writeOctal(view, 116, 8, header.gid ?? 0);
202
+ writeOctal(view, 124, 12, size);
203
+ writeOctal(view, 136, 12, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1e3));
204
+ writeString(view, 156, 1, TYPEFLAG[header.type ?? "file"]);
205
+ writeString(view, 157, 100, header.linkname);
206
+ writeString(view, 257, 6, "ustar\0");
207
+ writeString(view, 263, 2, "00");
208
+ writeString(view, 265, 32, header.uname);
209
+ writeString(view, 297, 32, header.gname);
210
+ writeString(view, 345, 155, prefix);
242
211
  writeChecksum(view);
243
212
  return view;
244
213
  }
245
214
  function parseUstarHeader(block, strict) {
246
215
  if (strict && !validateChecksum(block)) throw new Error("Invalid tar header checksum.");
247
- const typeflag = readString(block, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE);
216
+ const typeflag = readString(block, 156, 1);
248
217
  const header = {
249
- name: readString(block, USTAR_NAME_OFFSET, USTAR_NAME_SIZE),
250
- mode: readOctal(block, USTAR_MODE_OFFSET, USTAR_MODE_SIZE),
251
- uid: readNumeric(block, USTAR_UID_OFFSET, USTAR_UID_SIZE),
252
- gid: readNumeric(block, USTAR_GID_OFFSET, USTAR_GID_SIZE),
253
- size: readNumeric(block, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE),
254
- mtime: /* @__PURE__ */ new Date(readNumeric(block, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE) * 1e3),
255
- type: FLAGTYPE[typeflag] || FILE,
256
- linkname: readString(block, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE)
218
+ name: readString(block, 0, 100),
219
+ mode: readOctal(block, 100, 8),
220
+ uid: readNumeric(block, 108, 8),
221
+ gid: readNumeric(block, 116, 8),
222
+ size: readNumeric(block, 124, 12),
223
+ mtime: /* @__PURE__ */ new Date(readNumeric(block, 136, 12) * 1e3),
224
+ type: FLAGTYPE[typeflag] || "file",
225
+ linkname: readString(block, 157, 100)
257
226
  };
258
- const magic = readString(block, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE);
227
+ const magic = readString(block, 257, 6);
259
228
  if (isBodyless(header)) header.size = 0;
260
229
  if (magic.trim() === "ustar") {
261
- header.uname = readString(block, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE);
262
- header.gname = readString(block, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE);
230
+ header.uname = readString(block, 265, 32);
231
+ header.gname = readString(block, 297, 32);
263
232
  }
264
- if (magic === "ustar") header.prefix = readString(block, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE);
233
+ if (magic === "ustar") header.prefix = readString(block, 345, 155);
265
234
  return header;
266
235
  }
267
236
  const PAX_MAPPING = {
@@ -275,17 +244,17 @@ const PAX_MAPPING = {
275
244
  gname: ["gname", (v) => v]
276
245
  };
277
246
  function parsePax(buffer) {
278
- const decoder$1 = new TextDecoder("utf-8");
247
+ const decoder = new TextDecoder("utf-8");
279
248
  const overrides = Object.create(null);
280
249
  const pax = Object.create(null);
281
250
  let offset = 0;
282
251
  while (offset < buffer.length) {
283
252
  const spaceIndex = buffer.indexOf(32, offset);
284
253
  if (spaceIndex === -1) break;
285
- const length = parseInt(decoder$1.decode(buffer.subarray(offset, spaceIndex)), 10);
254
+ const length = parseInt(decoder.decode(buffer.subarray(offset, spaceIndex)), 10);
286
255
  if (Number.isNaN(length) || length === 0) break;
287
256
  const recordEnd = offset + length;
288
- const [key, value] = decoder$1.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
257
+ const [key, value] = decoder.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
289
258
  if (key && value !== void 0) {
290
259
  pax[key] = value;
291
260
  if (Object.hasOwn(PAX_MAPPING, key)) {
@@ -323,7 +292,7 @@ function getHeaderBlocks(header) {
323
292
  const base = createTarHeader(header);
324
293
  const pax = generatePax(header);
325
294
  if (!pax) return [base];
326
- const paxPadding = -pax.paxBody.length & BLOCK_SIZE_MASK;
295
+ const paxPadding = -pax.paxBody.length & 511;
327
296
  const paddingBlocks = paxPadding > 0 ? [ZERO_BLOCK.subarray(0, paxPadding)] : [];
328
297
  return [
329
298
  pax.paxHeader,
@@ -332,28 +301,25 @@ function getHeaderBlocks(header) {
332
301
  base
333
302
  ];
334
303
  }
335
-
336
304
  //#endregion
337
305
  //#region src/tar/packer.ts
338
- const EOF_BUFFER = new Uint8Array(BLOCK_SIZE * 2);
306
+ const EOF_BUFFER = new Uint8Array(512 * 2);
339
307
  function createTarPacker(onData, onError, onFinalize) {
340
308
  let currentHeader = null;
341
309
  let bytesWritten = 0;
342
310
  let finalized = false;
311
+ const fail = (message) => {
312
+ const error = new Error(message);
313
+ onError(error);
314
+ throw error;
315
+ };
343
316
  return {
344
317
  add(header) {
345
- if (finalized) {
346
- const error = /* @__PURE__ */ new Error("No new tar entries after finalize.");
347
- onError(error);
348
- throw error;
349
- }
350
- if (currentHeader !== null) {
351
- const error = /* @__PURE__ */ new Error("Previous entry must be completed before adding a new one");
352
- onError(error);
353
- throw error;
354
- }
318
+ if (finalized) fail("No new tar entries after finalize.");
319
+ if (currentHeader !== null) fail("Previous entry must be completed before adding a new one");
320
+ const size = isBodyless(header) ? 0 : header.size;
321
+ if (!Number.isSafeInteger(size) || size < 0) fail("Invalid tar entry size.");
355
322
  try {
356
- const size = isBodyless(header) ? 0 : header.size ?? 0;
357
323
  const headerBlocks = getHeaderBlocks({
358
324
  ...header,
359
325
  size
@@ -369,22 +335,10 @@ function createTarPacker(onData, onError, onFinalize) {
369
335
  }
370
336
  },
371
337
  write(chunk) {
372
- if (!currentHeader) {
373
- const error = /* @__PURE__ */ new Error("No active tar entry.");
374
- onError(error);
375
- throw error;
376
- }
377
- if (finalized) {
378
- const error = /* @__PURE__ */ new Error("Cannot write data after finalize.");
379
- onError(error);
380
- throw error;
381
- }
338
+ if (!currentHeader) fail("No active tar entry.");
339
+ if (finalized) fail("Cannot write data after finalize.");
382
340
  const newTotal = bytesWritten + chunk.length;
383
- if (newTotal > currentHeader.size) {
384
- const error = /* @__PURE__ */ new Error(`"${currentHeader.name}" exceeds given size of ${currentHeader.size} bytes.`);
385
- onError(error);
386
- throw error;
387
- }
341
+ if (newTotal > currentHeader.size) fail(`"${currentHeader.name}" exceeds given size of ${currentHeader.size} bytes.`);
388
342
  try {
389
343
  bytesWritten = newTotal;
390
344
  onData(chunk);
@@ -393,23 +347,11 @@ function createTarPacker(onData, onError, onFinalize) {
393
347
  }
394
348
  },
395
349
  endEntry() {
396
- if (!currentHeader) {
397
- const error = /* @__PURE__ */ new Error("No active entry to end.");
398
- onError(error);
399
- throw error;
400
- }
401
- if (finalized) {
402
- const error = /* @__PURE__ */ new Error("Cannot end entry after finalize.");
403
- onError(error);
404
- throw error;
405
- }
350
+ if (!currentHeader) fail("No active entry to end.");
351
+ if (finalized) fail("Cannot end entry after finalize.");
406
352
  try {
407
- if (bytesWritten !== currentHeader.size) {
408
- const error = /* @__PURE__ */ new Error(`Size mismatch for "${currentHeader.name}".`);
409
- onError(error);
410
- throw error;
411
- }
412
- const paddingSize = -currentHeader.size & BLOCK_SIZE_MASK;
353
+ if (bytesWritten !== currentHeader.size) fail(`Size mismatch for "${currentHeader.name}".`);
354
+ const paddingSize = -currentHeader.size & 511;
413
355
  if (paddingSize > 0) onData(new Uint8Array(paddingSize));
414
356
  currentHeader = null;
415
357
  bytesWritten = 0;
@@ -419,16 +361,8 @@ function createTarPacker(onData, onError, onFinalize) {
419
361
  }
420
362
  },
421
363
  finalize() {
422
- if (finalized) {
423
- const error = /* @__PURE__ */ new Error("Archive has already been finalized");
424
- onError(error);
425
- throw error;
426
- }
427
- if (currentHeader !== null) {
428
- const error = /* @__PURE__ */ new Error("Cannot finalize while an entry is still active");
429
- onError(error);
430
- throw error;
431
- }
364
+ if (finalized) fail("Archive has already been finalized");
365
+ if (currentHeader !== null) fail("Cannot finalize while an entry is still active");
432
366
  try {
433
367
  onData(EOF_BUFFER);
434
368
  finalized = true;
@@ -439,7 +373,6 @@ function createTarPacker(onData, onError, onFinalize) {
439
373
  }
440
374
  };
441
375
  }
442
-
443
376
  //#endregion
444
377
  //#region src/tar/chunk-queue.ts
445
378
  const INITIAL_CAPACITY = 256;
@@ -466,13 +399,13 @@ function createChunkQueue() {
466
399
  function pull(bytes, callback) {
467
400
  if (callback) {
468
401
  let fed = 0;
469
- let remaining$1 = Math.min(bytes, totalAvailable);
470
- while (remaining$1 > 0) {
402
+ let remaining = Math.min(bytes, totalAvailable);
403
+ while (remaining > 0) {
471
404
  const chunk = chunks[head];
472
- const toFeed = Math.min(remaining$1, chunk.length);
405
+ const toFeed = Math.min(remaining, chunk.length);
473
406
  const segment = toFeed === chunk.length ? chunk : chunk.subarray(0, toFeed);
474
407
  consumeFromHead(toFeed);
475
- remaining$1 -= toFeed;
408
+ remaining -= toFeed;
476
409
  fed += toFeed;
477
410
  if (!callback(segment)) break;
478
411
  }
@@ -557,7 +490,6 @@ function createChunkQueue() {
557
490
  pull
558
491
  };
559
492
  }
560
-
561
493
  //#endregion
562
494
  //#region src/tar/unpacker.ts
563
495
  const STATE_HEADER = 0;
@@ -576,6 +508,7 @@ function createUnpacker(options = {}) {
576
508
  const unpacker = {
577
509
  isEntryActive: () => state === STATE_BODY,
578
510
  isBodyComplete: () => !currentEntry || currentEntry.remaining === 0,
511
+ canFinish: () => !currentEntry || available() >= currentEntry.remaining + currentEntry.padding,
579
512
  write(chunk) {
580
513
  if (ended) throw new Error("Archive already ended.");
581
514
  push(chunk);
@@ -587,7 +520,7 @@ function createUnpacker(options = {}) {
587
520
  if (state !== STATE_HEADER) throw new Error("Cannot read header while an entry is active");
588
521
  if (done) return void 0;
589
522
  while (!done) {
590
- if (available() < BLOCK_SIZE) {
523
+ if (available() < 512) {
591
524
  if (ended) {
592
525
  if (available() > 0 && strict) throw truncateErr;
593
526
  done = true;
@@ -595,9 +528,9 @@ function createUnpacker(options = {}) {
595
528
  }
596
529
  return null;
597
530
  }
598
- const headerBlock = peek(BLOCK_SIZE);
531
+ const headerBlock = peek(512);
599
532
  if (isZeroBlock(headerBlock)) {
600
- if (available() < BLOCK_SIZE * 2) {
533
+ if (available() < 512 * 2) {
601
534
  if (ended) {
602
535
  if (strict) throw truncateErr;
603
536
  done = true;
@@ -605,14 +538,14 @@ function createUnpacker(options = {}) {
605
538
  }
606
539
  return null;
607
540
  }
608
- if (isZeroBlock(peek(BLOCK_SIZE * 2).subarray(BLOCK_SIZE))) {
609
- discard(BLOCK_SIZE * 2);
541
+ if (isZeroBlock(peek(512 * 2).subarray(512))) {
542
+ discard(512 * 2);
610
543
  done = true;
611
544
  eof = true;
612
545
  return;
613
546
  }
614
547
  if (strict) throw new Error("Invalid tar header.");
615
- discard(BLOCK_SIZE);
548
+ discard(512);
616
549
  continue;
617
550
  }
618
551
  let internalHeader;
@@ -620,33 +553,33 @@ function createUnpacker(options = {}) {
620
553
  internalHeader = parseUstarHeader(headerBlock, strict);
621
554
  } catch (err) {
622
555
  if (strict) throw err;
623
- discard(BLOCK_SIZE);
556
+ discard(512);
624
557
  continue;
625
558
  }
626
559
  const metaParser = getMetaParser(internalHeader.type);
627
560
  if (metaParser) {
628
- const paddedSize = internalHeader.size + BLOCK_SIZE_MASK & ~BLOCK_SIZE_MASK;
629
- if (available() < BLOCK_SIZE + paddedSize) {
561
+ const paddedSize = internalHeader.size + (-internalHeader.size & 511);
562
+ if (available() < 512 + paddedSize) {
630
563
  if (ended && strict) throw truncateErr;
631
564
  return null;
632
565
  }
633
- discard(BLOCK_SIZE);
566
+ discard(512);
634
567
  const overrides = metaParser(pull(paddedSize).subarray(0, internalHeader.size));
635
568
  const target = internalHeader.type === "pax-global-header" ? paxGlobals : nextEntryOverrides;
636
569
  for (const key in overrides) target[key] = overrides[key];
637
570
  continue;
638
571
  }
639
- discard(BLOCK_SIZE);
572
+ discard(512);
640
573
  const header = internalHeader;
641
574
  if (internalHeader.prefix) header.name = `${internalHeader.prefix}/${header.name}`;
642
575
  applyOverrides(header, paxGlobals);
643
576
  applyOverrides(header, nextEntryOverrides);
644
- if (header.name.endsWith("/") && header.type === FILE) header.type = DIRECTORY;
577
+ if (header.name.endsWith("/") && header.type === "file") header.type = DIRECTORY;
645
578
  nextEntryOverrides = {};
646
579
  currentEntry = {
647
580
  header,
648
581
  remaining: header.size,
649
- padding: -header.size & BLOCK_SIZE_MASK
582
+ padding: -header.size & 511
650
583
  };
651
584
  state = STATE_BODY;
652
585
  return header;
@@ -699,6 +632,5 @@ function isZeroBlock(block) {
699
632
  for (let i = 0; i < block.length; i++) if (block[i] !== 0) return false;
700
633
  return true;
701
634
  }
702
-
703
635
  //#endregion
704
- export { normalizeBody as a, LINK as c, isBodyless as i, SYMLINK as l, createTarPacker as n, DIRECTORY as o, transformHeader as r, FILE as s, createUnpacker as t };
636
+ export { normalizeBody as a, LINK as c, isBodyless as i, SYMLINK as l, createTarPacker as n, DIRECTORY as o, transformHeader as r, FILE as s, createUnpacker as t };
@@ -1,7 +1,6 @@
1
- import { i as UnpackOptions, n as TarEntryData, r as TarHeader, t as DecoderOptions } from "../types-Te8MHQn3.js";
1
+ import { i as UnpackOptions, n as TarEntryData, r as TarHeader, t as DecoderOptions } from "../types-D19dF2SE.js";
2
2
 
3
3
  //#region src/web/compression.d.ts
4
-
5
4
  /**
6
5
  * Creates a gzip compression stream that is compatible with Uint8Array streams.
7
6
  *
package/dist/web/index.js CHANGED
@@ -1,5 +1,4 @@
1
- import { a as normalizeBody, i as isBodyless, n as createTarPacker$1, r as transformHeader, t as createUnpacker } from "../unpacker-CEuY-276.js";
2
-
1
+ import { a as normalizeBody, i as isBodyless, n as createTarPacker$1, r as transformHeader, t as createUnpacker } from "../unpacker-CPCEF5CT.js";
3
2
  //#region src/web/compression.ts
4
3
  function createGzipEncoder() {
5
4
  return new CompressionStream("gzip");
@@ -7,7 +6,6 @@ function createGzipEncoder() {
7
6
  function createGzipDecoder() {
8
7
  return new DecompressionStream("gzip");
9
8
  }
10
-
11
9
  //#endregion
12
10
  //#region src/web/pack.ts
13
11
  function createTarPacker() {
@@ -21,9 +19,7 @@ function createTarPacker() {
21
19
  controller: {
22
20
  add(header) {
23
21
  const bodyless = isBodyless(header);
24
- const h = { ...header };
25
- if (bodyless) h.size = 0;
26
- packer.add(h);
22
+ packer.add(header);
27
23
  if (bodyless) packer.endEntry();
28
24
  return new WritableStream({
29
25
  write(chunk) {
@@ -46,7 +42,6 @@ function createTarPacker() {
46
42
  }
47
43
  };
48
44
  }
49
-
50
45
  //#endregion
51
46
  //#region src/web/stream-utils.ts
52
47
  async function streamToBuffer(stream) {
@@ -72,86 +67,159 @@ async function streamToBuffer(stream) {
72
67
  }
73
68
  }
74
69
  const drain = (stream) => stream.pipeTo(new WritableStream());
75
-
76
70
  //#endregion
77
71
  //#region src/web/unpack.ts
78
72
  function createTarDecoder(options = {}) {
79
73
  const unpacker = createUnpacker(options);
74
+ const strict = options.strict ?? false;
75
+ let controller = null;
80
76
  let bodyController = null;
81
77
  let pumping = false;
82
- const pump = (controller) => {
83
- if (pumping) return;
78
+ let eofReached = false;
79
+ let sourceEnded = false;
80
+ let closed = false;
81
+ const closeBody = () => {
82
+ try {
83
+ bodyController?.close();
84
+ } catch {}
85
+ bodyController = null;
86
+ };
87
+ const fail = (reason) => {
88
+ if (closed) return;
89
+ closed = true;
90
+ try {
91
+ bodyController?.error(reason);
92
+ } catch {}
93
+ bodyController = null;
94
+ try {
95
+ controller.error(reason);
96
+ } catch {}
97
+ controller = null;
98
+ };
99
+ const finish = () => {
100
+ if (closed) return;
101
+ closed = true;
102
+ closeBody();
103
+ try {
104
+ controller.close();
105
+ } catch {}
106
+ controller = null;
107
+ };
108
+ const truncateOrFinish = () => {
109
+ if (strict) throw new Error("Tar archive is truncated.");
110
+ finish();
111
+ };
112
+ const pump = () => {
113
+ if (pumping || closed || !controller) return;
84
114
  pumping = true;
85
115
  try {
86
- while (true) if (unpacker.isEntryActive()) {
87
- if (bodyController) {
88
- if (unpacker.streamBody((c) => (bodyController.enqueue(c), true)) === 0 && !unpacker.isBodyComplete()) break;
89
- } else if (!unpacker.skipEntry()) break;
90
- if (unpacker.isBodyComplete()) {
91
- try {
92
- bodyController?.close();
93
- } catch {}
94
- bodyController = null;
95
- if (!unpacker.skipPadding()) break;
116
+ while (true) {
117
+ if (eofReached) {
118
+ if (sourceEnded) {
119
+ unpacker.validateEOF();
120
+ finish();
121
+ }
122
+ break;
96
123
  }
97
- } else {
98
- const header = unpacker.readHeader();
99
- if (header === null || header === void 0) break;
100
- controller.enqueue({
101
- header,
102
- body: new ReadableStream({
103
- start(c) {
104
- if (header.size === 0) c.close();
105
- else bodyController = c;
106
- },
107
- pull: () => pump(controller),
108
- cancel() {
109
- bodyController = null;
110
- pump(controller);
124
+ if (unpacker.isEntryActive()) {
125
+ if (sourceEnded && !unpacker.canFinish()) {
126
+ truncateOrFinish();
127
+ break;
128
+ }
129
+ if (bodyController) {
130
+ if ((bodyController.desiredSize ?? 1) <= 0) break;
131
+ if (unpacker.streamBody((c) => (bodyController.enqueue(c), (bodyController.desiredSize ?? 1) > 0)) === 0 && !unpacker.isBodyComplete()) {
132
+ if (sourceEnded) truncateOrFinish();
133
+ break;
111
134
  }
112
- })
113
- });
135
+ } else if (!unpacker.skipEntry()) {
136
+ if (sourceEnded) truncateOrFinish();
137
+ break;
138
+ }
139
+ if (unpacker.isBodyComplete()) {
140
+ closeBody();
141
+ if (!unpacker.skipPadding()) {
142
+ if (sourceEnded) truncateOrFinish();
143
+ break;
144
+ }
145
+ }
146
+ } else {
147
+ if ((controller.desiredSize ?? 0) < 0) break;
148
+ const header = unpacker.readHeader();
149
+ if (header === null) {
150
+ if (sourceEnded) finish();
151
+ break;
152
+ }
153
+ if (header === void 0) {
154
+ if (sourceEnded) {
155
+ unpacker.validateEOF();
156
+ finish();
157
+ break;
158
+ }
159
+ eofReached = true;
160
+ break;
161
+ }
162
+ controller.enqueue({
163
+ header,
164
+ body: new ReadableStream({
165
+ start(c) {
166
+ if (header.size === 0) c.close();
167
+ else bodyController = c;
168
+ },
169
+ pull: pump,
170
+ cancel() {
171
+ bodyController = null;
172
+ pump();
173
+ }
174
+ })
175
+ });
176
+ }
114
177
  }
115
178
  } catch (error) {
116
- try {
117
- bodyController?.error(error);
118
- } catch {}
119
- bodyController = null;
179
+ fail(error);
120
180
  throw error;
121
181
  } finally {
122
182
  pumping = false;
123
183
  }
124
184
  };
125
- return new TransformStream({
126
- transform(chunk, controller) {
127
- try {
128
- unpacker.write(chunk);
129
- pump(controller);
130
- } catch (error) {
131
- try {
132
- bodyController?.error(error);
133
- } catch {}
134
- throw error;
185
+ return {
186
+ readable: new ReadableStream({
187
+ start(c) {
188
+ controller = c;
189
+ },
190
+ pull: pump,
191
+ cancel(reason) {
192
+ if (reason !== void 0) fail(reason);
193
+ else finish();
135
194
  }
136
- },
137
- flush(controller) {
138
- try {
139
- unpacker.end();
140
- pump(controller);
141
- unpacker.validateEOF();
142
- if (unpacker.isEntryActive() && !unpacker.isBodyComplete()) try {
143
- bodyController?.close();
144
- } catch {}
145
- } catch (error) {
195
+ }, { highWaterMark: 2 }),
196
+ writable: new WritableStream({
197
+ write(chunk) {
146
198
  try {
147
- bodyController?.error(error);
148
- } catch {}
149
- throw error;
199
+ if (eofReached && strict && chunk.some((byte) => byte !== 0)) throw new Error("Invalid EOF.");
200
+ unpacker.write(chunk);
201
+ pump();
202
+ } catch (error) {
203
+ fail(error);
204
+ throw error;
205
+ }
206
+ },
207
+ close() {
208
+ try {
209
+ sourceEnded = true;
210
+ unpacker.end();
211
+ pump();
212
+ } catch (error) {
213
+ fail(error);
214
+ throw error;
215
+ }
216
+ },
217
+ abort(reason) {
218
+ fail(reason);
150
219
  }
151
- }
152
- }, void 0, { highWaterMark: 1 });
220
+ })
221
+ };
153
222
  }
154
-
155
223
  //#endregion
156
224
  //#region src/web/helpers.ts
157
225
  async function packTar(entries) {
@@ -209,6 +277,5 @@ async function unpackTar(archive, options = {}) {
209
277
  }
210
278
  return results;
211
279
  }
212
-
213
280
  //#endregion
214
- export { createGzipDecoder, createGzipEncoder, createTarDecoder, createTarPacker, packTar, unpackTar };
281
+ export { createGzipDecoder, createGzipEncoder, createTarDecoder, createTarPacker, packTar, unpackTar };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "modern-tar",
3
- "version": "0.7.4",
3
+ "version": "0.7.6",
4
4
  "description": "Zero dependency streaming tar parser and writer for JavaScript.",
5
5
  "author": "Ayuhito <hello@ayuhito.com>",
6
6
  "license": "MIT",
@@ -25,18 +25,20 @@
25
25
  }
26
26
  },
27
27
  "devDependencies": {
28
- "@biomejs/biome": "2.3.8",
29
- "@types/node": "^25.0.2",
30
- "@vitest/browser-playwright": "4.0.15",
31
- "@vitest/coverage-v8": "4.0.15",
32
- "tsdown": "^0.18.0",
28
+ "@biomejs/biome": "2.4.7",
29
+ "@types/node": "^25.5.0",
30
+ "@vitest/browser-playwright": "4.1.0",
31
+ "@vitest/coverage-v8": "4.1.0",
32
+ "miniflare": "^4.20260312.0",
33
+ "tsdown": "^0.21.3",
33
34
  "typescript": "^5.9.3",
34
- "vitest": "4.0.15"
35
+ "vitest": "4.1.0"
35
36
  },
36
37
  "scripts": {
37
38
  "build": "tsdown",
38
39
  "dev": "tsdown --watch",
39
40
  "test": "vitest",
41
+ "test:workers": "tsdown && vitest --config vitest.workers.config.ts --run",
40
42
  "coverage": "vitest run --coverage",
41
43
  "check": "biome check --write",
42
44
  "typecheck": "tsc --noEmit",