modern-tar 0.2.2 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -105,22 +105,52 @@ for await (const entry of decodedStream) {
105
105
  #### Compression/Decompression (gzip)
106
106
 
107
107
  ```typescript
108
- import { createGzipDecoder, unpackTar } from 'modern-tar';
108
+ import { createGzipEncoder, createTarPacker } from 'modern-tar';
109
109
 
110
- // Fetch a .tar.gz file stream
111
- const response = await fetch('https://example.com/archive.tar.gz');
110
+ // Create and compress a tar archive
111
+ const { readable, controller } = createTarPacker();
112
+ const compressedStream = readable.pipeThrough(createGzipEncoder());
113
+
114
+ // Add entries...
115
+ const fileStream = controller.add({ name: "file.txt", size: 5, type: "file" });
116
+ const writer = fileStream.getWriter();
117
+ await writer.write(new TextEncoder().encode("hello"));
118
+ await writer.close();
119
+ controller.finalize();
120
+
121
+ // Upload compressed .tar.gz
122
+ await fetch('/api/upload', {
123
+ method: 'POST',
124
+ body: compressedStream,
125
+ headers: { 'Content-Type': 'application/gzip' }
126
+ });
127
+ ```
128
+
129
+ ```typescript
130
+ import { createGzipDecoder, createTarDecoder, unpackTar } from 'modern-tar';
131
+
132
+ // Download and process a .tar.gz file
133
+ const response = await fetch('https://api.example.com/archive.tar.gz');
112
134
  if (!response.body) throw new Error('No response body');
113
135
 
114
- // Decompress .tar.gz to .tar stream
115
- const tarStream = response.body.pipeThrough(createGzipDecoder());
136
+ // Buffer entire archive
137
+ const entries = await unpackTar(response.body.pipeThrough(createGzipDecoder()));
116
138
 
117
- // Use `unpackTar` for buffered extraction or `createTarDecoder` for streaming
118
- const entries = await unpackTar(tarStream);
119
139
  for (const entry of entries) {
120
140
  console.log(`Extracted: ${entry.header.name}`);
121
141
  const content = new TextDecoder().decode(entry.data);
122
142
  console.log(`Content: ${content}`);
123
143
  }
144
+
145
+ // Or chain decompression and tar parsing using streams
146
+ const entries = response.body
147
+ .pipeThrough(createGzipDecoder())
148
+ .pipeThrough(createTarDecoder());
149
+
150
+ for await (const entry of entries) {
151
+ console.log(`Extracted: ${entry.header.name}`);
152
+ // Process entry.body ReadableStream as needed
153
+ }
124
154
  ```
125
155
 
126
156
  ### Node.js Filesystem Usage
@@ -421,7 +451,7 @@ interface ParsedTarEntry {
421
451
  // Output entry from a buffered unpack function
422
452
  interface ParsedTarEntryWithData {
423
453
  header: TarHeader;
424
- data: Uint8Array<ArrayBuffer>;
454
+ data: Uint8Array;
425
455
  }
426
456
 
427
457
  // Platform-neutral configuration for unpacking
@@ -1,4 +1,4 @@
1
- import { TarEntryData, TarHeader, UnpackOptions } from "../index-G8Ie88oV.js";
1
+ import { TarEntryData, TarHeader, UnpackOptions } from "../index-C8X7IkYR.js";
2
2
  import { Stats } from "node:fs";
3
3
  import { Readable, Writable } from "node:stream";
4
4
 
@@ -110,7 +110,7 @@ declare function packTarSources(sources: TarSource[]): Readable;
110
110
  *
111
111
  * @example
112
112
  * ```typescript
113
- * import { packTar } from '@modern-tar/fs';
113
+ * import { packTar } from 'modern-tar/fs';
114
114
  * import { createWriteStream } from 'node:fs';
115
115
  * import { pipeline } from 'node:stream/promises';
116
116
  *
@@ -142,7 +142,7 @@ declare function packTar(directoryPath: string, options?: PackOptionsFS): Readab
142
142
  *
143
143
  * @example
144
144
  * ```typescript
145
- * import { unpackTar } from '@modern-tar/fs';
145
+ * import { unpackTar } from 'modern-tar/fs';
146
146
  * import { createReadStream } from 'node:fs';
147
147
  * import { pipeline } from 'node:stream/promises';
148
148
  *
package/dist/fs/index.js CHANGED
@@ -1,4 +1,4 @@
1
- import { BLOCK_SIZE, createTarDecoder, createTarHeader, createTarOptionsTransformer, createTarPacker } from "../web-DcwR3pag.js";
1
+ import { BLOCK_SIZE, createTarDecoder, createTarHeader, createTarOptionsTransformer, createTarPacker, encoder, generatePax } from "../web-LcCN87Qy.js";
2
2
  import { createReadStream, createWriteStream } from "node:fs";
3
3
  import * as fs from "node:fs/promises";
4
4
  import * as path from "node:path";
@@ -6,34 +6,6 @@ import { PassThrough, Readable, Writable } from "node:stream";
6
6
  import { pipeline } from "node:stream/promises";
7
7
 
8
8
  //#region src/fs/archive.ts
9
- async function addFileToPacker(controller, sourcePath, targetPath) {
10
- const stat = await fs.stat(sourcePath);
11
- const entryStream = controller.add({
12
- name: targetPath,
13
- size: stat.size,
14
- mode: stat.mode,
15
- mtime: stat.mtime,
16
- type: "file"
17
- });
18
- await pipeline(createReadStream(sourcePath), Writable.fromWeb(entryStream));
19
- }
20
- async function addDirectoryToPacker(controller, sourcePath, targetPathInArchive) {
21
- const sourceStat = await fs.stat(sourcePath);
22
- controller.add({
23
- name: `${targetPathInArchive}/`,
24
- type: "directory",
25
- mode: sourceStat.mode,
26
- mtime: sourceStat.mtime,
27
- size: 0
28
- }).close();
29
- const dirents = await fs.readdir(sourcePath, { withFileTypes: true });
30
- for (const dirent of dirents) {
31
- const fullSourcePath = path.join(sourcePath, dirent.name);
32
- const archiveEntryPath = path.join(targetPathInArchive, dirent.name).replace(/\\/g, "/");
33
- if (dirent.isDirectory()) await addDirectoryToPacker(controller, fullSourcePath, archiveEntryPath);
34
- else if (dirent.isFile()) await addFileToPacker(controller, fullSourcePath, archiveEntryPath);
35
- }
36
- }
37
9
  /**
38
10
  * Packs multiple sources into a tar archive as a Node.js Readable stream from an
39
11
  * array of sources (files, directories, or raw content).
@@ -82,7 +54,7 @@ function packTarSources(sources) {
82
54
  }
83
55
  if (content instanceof ReadableStream) {
84
56
  const chunks = [];
85
- for await (const chunk of Readable.fromWeb(content)) chunks.push(chunk);
57
+ for await (const chunk of Readable.fromWeb(content)) chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
86
58
  const buffer = Buffer.concat(chunks);
87
59
  const writer$1 = controller.add({
88
60
  name: targetPath,
@@ -96,9 +68,10 @@ function packTarSources(sources) {
96
68
  }
97
69
  let data;
98
70
  if (content === null || content === void 0) data = new Uint8Array(0);
99
- else if (typeof content === "string") data = Buffer.from(content);
71
+ else if (content instanceof Uint8Array) data = content;
100
72
  else if (content instanceof ArrayBuffer) data = new Uint8Array(content);
101
- else data = content;
73
+ else if (typeof content === "string") data = encoder.encode(content);
74
+ else throw new TypeError(`Unsupported content type for entry "${targetPath}". Expected string, Uint8Array, ArrayBuffer, Blob, ReadableStream, or undefined.`);
102
75
  const writer = controller.add({
103
76
  name: targetPath,
104
77
  size: data.length,
@@ -114,6 +87,34 @@ function packTarSources(sources) {
114
87
  })().then(() => controller.finalize()).catch((err) => controller.error(err));
115
88
  return Readable.fromWeb(readable);
116
89
  }
90
+ async function addFileToPacker(controller, sourcePath, targetPath) {
91
+ const stat = await fs.stat(sourcePath);
92
+ const entryStream = controller.add({
93
+ name: targetPath,
94
+ size: stat.size,
95
+ mode: stat.mode,
96
+ mtime: stat.mtime,
97
+ type: "file"
98
+ });
99
+ await pipeline(createReadStream(sourcePath), Writable.fromWeb(entryStream));
100
+ }
101
+ async function addDirectoryToPacker(controller, sourcePath, targetPathInArchive) {
102
+ const sourceStat = await fs.stat(sourcePath);
103
+ controller.add({
104
+ name: `${targetPathInArchive}/`,
105
+ type: "directory",
106
+ mode: sourceStat.mode,
107
+ mtime: sourceStat.mtime,
108
+ size: 0
109
+ }).close();
110
+ const dirents = await fs.readdir(sourcePath, { withFileTypes: true });
111
+ for (const dirent of dirents) {
112
+ const fullSourcePath = path.join(sourcePath, dirent.name);
113
+ const archiveEntryPath = path.join(targetPathInArchive, dirent.name).replace(/\\/g, "/");
114
+ if (dirent.isDirectory()) await addDirectoryToPacker(controller, fullSourcePath, archiveEntryPath);
115
+ else if (dirent.isFile()) await addFileToPacker(controller, fullSourcePath, archiveEntryPath);
116
+ }
117
+ }
117
118
 
118
119
  //#endregion
119
120
  //#region src/fs/pack.ts
@@ -129,7 +130,7 @@ function packTarSources(sources) {
129
130
  *
130
131
  * @example
131
132
  * ```typescript
132
- * import { packTar } from '@modern-tar/fs';
133
+ * import { packTar } from 'modern-tar/fs';
133
134
  * import { createWriteStream } from 'node:fs';
134
135
  * import { pipeline } from 'node:stream/promises';
135
136
  *
@@ -177,6 +178,13 @@ function packTar(directoryPath, options = {}) {
177
178
  } else if (stat.isFile()) header.size = stat.size;
178
179
  }
179
180
  const finalHeader = options.map ? options.map(header) : header;
181
+ const paxData = generatePax(finalHeader);
182
+ if (paxData) {
183
+ yield paxData.paxHeader;
184
+ yield paxData.paxBody;
185
+ const paxPadding = (BLOCK_SIZE - paxData.paxBody.length % BLOCK_SIZE) % BLOCK_SIZE;
186
+ if (paxPadding > 0) yield Buffer.alloc(paxPadding);
187
+ }
180
188
  yield createTarHeader(finalHeader);
181
189
  if (finalHeader.type === "file" && finalHeader.size > 0) {
182
190
  yield* createReadStream(fullPath);
@@ -210,7 +218,7 @@ function packTar(directoryPath, options = {}) {
210
218
  *
211
219
  * @example
212
220
  * ```typescript
213
- * import { unpackTar } from '@modern-tar/fs';
221
+ * import { unpackTar } from 'modern-tar/fs';
214
222
  * import { createReadStream } from 'node:fs';
215
223
  * import { pipeline } from 'node:stream/promises';
216
224
  *
@@ -315,6 +323,9 @@ function unpackTar(directoryPath, options = {}) {
315
323
  await fs.link(resolvedLinkTarget, outPath);
316
324
  break;
317
325
  }
326
+ default:
327
+ await entry.body.cancel();
328
+ break;
318
329
  }
319
330
  if (header.mtime) try {
320
331
  await (header.type === "symlink" ? fs.lutimes : fs.utimes)(outPath, header.mtime, header.mtime);
@@ -1,12 +1,11 @@
1
1
  //#region src/web/compression.d.ts
2
2
  /**
3
- * Creates a gzip compression stream using the native
4
- * [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream) API.
3
+ * Creates a gzip compression stream that is compatible with Uint8Array streams.
5
4
  *
6
- * @returns A [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream) configured for gzip compression
5
+ * @returns A {@link ReadableWritablePair} configured for gzip compression.
7
6
  * @example
8
7
  * ```typescript
9
- * import { createGzipEncoder, createTarPacker } from '@modern-tar/core';
8
+ * import { createGzipEncoder, createTarPacker } from 'modern-tar';
10
9
  *
11
10
  * // Create and compress a tar archive
12
11
  * const { readable, controller } = createTarPacker();
@@ -27,46 +26,65 @@
27
26
  * });
28
27
  * ```
29
28
  */
30
- declare function createGzipEncoder(): CompressionStream;
29
+ declare function createGzipEncoder(): ReadableWritablePair<Uint8Array, Uint8Array>;
31
30
  /**
32
- * Creates a gzip decompression stream using the native
33
- * [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream) API.
31
+ * Creates a gzip decompression stream that is compatible with Uint8Array streams.
34
32
  *
35
- * @returns A [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream) configured for gzip decompression
33
+ * @returns A {@link ReadableWritablePair} configured for gzip decompression.
36
34
  * @example
37
35
  * ```typescript
38
- * import { createGzipDecoder, createTarDecoder } from '@modern-tar/core';
36
+ * import { createGzipDecoder, createTarDecoder } from 'modern-tar';
39
37
  *
40
38
  * // Download and process a .tar.gz file
41
39
  * const response = await fetch('https://api.example.com/archive.tar.gz');
42
40
  * if (!response.body) throw new Error('No response body');
43
41
  *
44
- * // Chain decompression and tar parsing
45
- * const entries = response.body
46
- * .pipeThrough(createGzipDecoder())
47
- * .pipeThrough(createTarDecoder());
42
+ * // Buffer entire archive
43
+ * const entries = await unpackTar(response.body.pipeThrough(createGzipDecoder()));
48
44
  *
49
- * for await (const entry of entries) {
45
+ * for (const entry of entries) {
50
46
  * console.log(`Extracted: ${entry.header.name}`);
51
- * // Process entry.body ReadableStream as needed
47
+ * const content = new TextDecoder().decode(entry.data);
48
+ * console.log(`Content: ${content}`);
52
49
  * }
53
50
  * ```
54
51
  * @example
55
52
  * ```typescript
56
- * // Decompress local .tar.gz data
57
- * const gzippedData = new Uint8Array([...]); // your gzipped tar data
58
- * const stream = new ReadableStream({
59
- * start(controller) {
60
- * controller.enqueue(gzippedData);
61
- * controller.close();
62
- * }
63
- * });
53
+ * import { createGzipDecoder, createTarDecoder } from 'modern-tar';
54
+ *
55
+ * // Download and process a .tar.gz file
56
+ * const response = await fetch('https://api.example.com/archive.tar.gz');
57
+ * if (!response.body) throw new Error('No response body');
58
+ *
59
+ * // Chain decompression and tar parsing using streams
60
+ * const entries = response.body
61
+ * .pipeThrough(createGzipDecoder())
62
+ * .pipeThrough(createTarDecoder());
64
63
  *
65
- * const tarStream = stream.pipeThrough(createGzipDecoder());
66
- * // Now process tarStream with createTarDecoder()...
64
+ * for await (const entry of entries) {
65
+ * console.log(`Extracted: ${entry.header.name}`);
66
+ * // Process entry.body ReadableStream as needed
67
+ * }
67
68
  * ```
68
69
  */
69
- declare function createGzipDecoder(): DecompressionStream;
70
+ declare function createGzipDecoder(): ReadableWritablePair<Uint8Array, Uint8Array>;
71
+ //#endregion
72
+ //#region src/web/constants.d.ts
73
+
74
+ /** Type flag constants for file types. */
75
+ declare const TYPEFLAG: {
76
+ readonly file: "0";
77
+ readonly link: "1";
78
+ readonly symlink: "2";
79
+ readonly "character-device": "3";
80
+ readonly "block-device": "4";
81
+ readonly directory: "5";
82
+ readonly fifo: "6";
83
+ readonly "pax-header": "x";
84
+ readonly "pax-global-header": "g";
85
+ readonly "gnu-long-name": "L";
86
+ readonly "gnu-long-link-name": "K";
87
+ };
70
88
  //#endregion
71
89
  //#region src/web/types.d.ts
72
90
  /**
@@ -82,7 +100,7 @@ interface TarHeader {
82
100
  /** Unix file permissions as an octal number (e.g., 0o644 for rw-r--r--). Defaults to 0o644 for files and 0o755 for directories. */
83
101
  mode?: number;
84
102
  /** Entry type. Defaults to "file" if not specified. */
85
- type?: "file" | "directory" | "symlink" | "link" | "pax-header" | "pax-global-header";
103
+ type?: keyof typeof TYPEFLAG;
86
104
  /** User ID of the entry owner. */
87
105
  uid?: number;
88
106
  /** Group ID of the entry owner. */
@@ -98,16 +116,8 @@ interface TarHeader {
98
116
  }
99
117
  /**
100
118
  * Union type for entry body data that can be packed into a tar archive.
101
- *
102
- * Supports multiple input types for convenience:
103
- * - `string` - Text content (encoded as UTF-8)
104
- * - `Uint8Array` - Binary data
105
- * - `ArrayBuffer` - Binary data
106
- * - `ReadableStream<Uint8Array>` - Streaming data
107
- * - `Blob` - File-like data
108
- * - `null` - No content (for directories, etc.)
109
119
  */
110
- type TarEntryData = string | Uint8Array | ArrayBuffer | ReadableStream<Uint8Array> | Blob | null;
120
+ type TarEntryData = string | Uint8Array | ArrayBuffer | ReadableStream<Uint8Array> | Blob | null | undefined;
111
121
  /**
112
122
  * Represents a complete entry to be packed into a tar archive.
113
123
  *
@@ -127,11 +137,10 @@ interface ParsedTarEntry {
127
137
  }
128
138
  /**
129
139
  * Represents an extracted entry with fully buffered content.
130
-
131
140
  */
132
141
  interface ParsedTarEntryWithData {
133
142
  header: TarHeader;
134
- data: Uint8Array<ArrayBuffer>;
143
+ data: Uint8Array;
135
144
  }
136
145
  /**
137
146
  * Platform-neutral configuration options for extracting tar archives.
@@ -158,7 +167,7 @@ interface UnpackOptions {
158
167
  * @returns A `Promise` that resolves to the complete tar archive as a Uint8Array
159
168
  * @example
160
169
  * ```typescript
161
- * import { packTar } from '@modern-tar/core';
170
+ * import { packTar } from 'modern-tar';
162
171
  *
163
172
  * const entries = [
164
173
  * {
@@ -195,7 +204,7 @@ declare function packTar(entries: TarEntry[]): Promise<Uint8Array>;
195
204
  * @returns A `Promise` that resolves to an array of entries with buffered data
196
205
  * @example
197
206
  * ```typescript
198
- * import { unpackTar } from '@modern-tar/core';
207
+ * import { unpackTar } from '@modern-tar';
199
208
  *
200
209
  * // From a file upload or fetch
201
210
  * const response = await fetch('/api/archive.tar');
@@ -238,7 +247,7 @@ declare function unpackTar(archive: ArrayBuffer | Uint8Array | ReadableStream<Ui
238
247
  *
239
248
  * @example
240
249
  * ```typescript
241
- * import { createTarDecoder, createTarOptionsTransformer } from '@modern-tar/core';
250
+ * import { createTarDecoder, createTarOptionsTransformer } from 'modern-tar';
242
251
  *
243
252
  * const transformedStream = sourceStream
244
253
  * .pipeThrough(createTarDecoder())
@@ -311,9 +320,8 @@ interface TarPackController {
311
320
  /**
312
321
  * Create a streaming tar packer.
313
322
  *
314
- * This function provides a controller-based API for creating tar archives, suitable
315
- * for scenarios where entries are generated dynamically or when you need control over
316
- * the packing process. The returned [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
323
+ * Provides a controller-based API for creating tar archives, suitable for scenarios where entries are
324
+ * generated dynamically. The returned [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
317
325
  * outputs tar archive bytes as entries are added.
318
326
  *
319
327
  * @returns Object containing the readable stream and controller
@@ -322,7 +330,7 @@ interface TarPackController {
322
330
  *
323
331
  * @example
324
332
  * ```typescript
325
- * import { createTarPacker } from '@modern-tar/core';
333
+ * import { createTarPacker } from 'modern-tar';
326
334
  *
327
335
  * const { readable, controller } = createTarPacker();
328
336
  *
@@ -360,9 +368,22 @@ declare function createTarPacker(): {
360
368
  controller: TarPackController;
361
369
  };
362
370
  //#endregion
363
- //#region src/web/stream.d.ts
371
+ //#region src/web/unpack.d.ts
364
372
  /**
365
- * Creates a TransformStream that parses a tar archive into ParsedTarEntry objects.
373
+ * Create a transform stream that parses tar bytes into entries.
374
+ *
375
+ * @returns `TransformStream` that converts tar archive bytes to {@link ParsedTarEntry} objects.
376
+ * @example
377
+ * ```typescript
378
+ * import { createTarDecoder } from 'modern-tar';
379
+ *
380
+ * const decoder = createTarDecoder();
381
+ * const entriesStream = tarStream.pipeThrough(decoder);
382
+ *
383
+ * for await (const entry of entriesStream) {
384
+ * console.log(`Entry: ${entry.header.name}`);
385
+ * // Process entry.body stream as needed
386
+ * }
366
387
  */
367
388
  declare function createTarDecoder(): TransformStream<Uint8Array, ParsedTarEntry>;
368
389
  //#endregion
@@ -1,2 +1,2 @@
1
- import { ParsedTarEntry, ParsedTarEntryWithData, TarEntry, TarEntryData, TarHeader, TarPackController, UnpackOptions, createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar } from "../index-G8Ie88oV.js";
1
+ import { ParsedTarEntry, ParsedTarEntryWithData, TarEntry, TarEntryData, TarHeader, TarPackController, UnpackOptions, createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar } from "../index-C8X7IkYR.js";
2
2
  export { ParsedTarEntry, ParsedTarEntryWithData, TarEntry, TarEntryData, TarHeader, TarPackController, UnpackOptions, createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar };
package/dist/web/index.js CHANGED
@@ -1,3 +1,3 @@
1
- import { createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar } from "../web-DcwR3pag.js";
1
+ import { createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar } from "../web-LcCN87Qy.js";
2
2
 
3
3
  export { createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar };
@@ -1,12 +1,11 @@
1
1
  //#region src/web/compression.ts
2
2
  /**
3
- * Creates a gzip compression stream using the native
4
- * [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream) API.
3
+ * Creates a gzip compression stream that is compatible with Uint8Array streams.
5
4
  *
6
- * @returns A [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream) configured for gzip compression
5
+ * @returns A {@link ReadableWritablePair} configured for gzip compression.
7
6
  * @example
8
7
  * ```typescript
9
- * import { createGzipEncoder, createTarPacker } from '@modern-tar/core';
8
+ * import { createGzipEncoder, createTarPacker } from 'modern-tar';
10
9
  *
11
10
  * // Create and compress a tar archive
12
11
  * const { readable, controller } = createTarPacker();
@@ -31,41 +30,43 @@ function createGzipEncoder() {
31
30
  return new CompressionStream("gzip");
32
31
  }
33
32
  /**
34
- * Creates a gzip decompression stream using the native
35
- * [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream) API.
33
+ * Creates a gzip decompression stream that is compatible with Uint8Array streams.
36
34
  *
37
- * @returns A [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream) configured for gzip decompression
35
+ * @returns A {@link ReadableWritablePair} configured for gzip decompression.
38
36
  * @example
39
37
  * ```typescript
40
- * import { createGzipDecoder, createTarDecoder } from '@modern-tar/core';
38
+ * import { createGzipDecoder, createTarDecoder } from 'modern-tar';
41
39
  *
42
40
  * // Download and process a .tar.gz file
43
41
  * const response = await fetch('https://api.example.com/archive.tar.gz');
44
42
  * if (!response.body) throw new Error('No response body');
45
43
  *
46
- * // Chain decompression and tar parsing
47
- * const entries = response.body
48
- * .pipeThrough(createGzipDecoder())
49
- * .pipeThrough(createTarDecoder());
44
+ * // Buffer entire archive
45
+ * const entries = await unpackTar(response.body.pipeThrough(createGzipDecoder()));
50
46
  *
51
- * for await (const entry of entries) {
47
+ * for (const entry of entries) {
52
48
  * console.log(`Extracted: ${entry.header.name}`);
53
- * // Process entry.body ReadableStream as needed
49
+ * const content = new TextDecoder().decode(entry.data);
50
+ * console.log(`Content: ${content}`);
54
51
  * }
55
52
  * ```
56
53
  * @example
57
54
  * ```typescript
58
- * // Decompress local .tar.gz data
59
- * const gzippedData = new Uint8Array([...]); // your gzipped tar data
60
- * const stream = new ReadableStream({
61
- * start(controller) {
62
- * controller.enqueue(gzippedData);
63
- * controller.close();
64
- * }
65
- * });
55
+ * import { createGzipDecoder, createTarDecoder } from 'modern-tar';
56
+ *
57
+ * // Download and process a .tar.gz file
58
+ * const response = await fetch('https://api.example.com/archive.tar.gz');
59
+ * if (!response.body) throw new Error('No response body');
60
+ *
61
+ * // Chain decompression and tar parsing using streams
62
+ * const entries = response.body
63
+ * .pipeThrough(createGzipDecoder())
64
+ * .pipeThrough(createTarDecoder());
66
65
  *
67
- * const tarStream = stream.pipeThrough(createGzipDecoder());
68
- * // Now process tarStream with createTarDecoder()...
66
+ * for await (const entry of entries) {
67
+ * console.log(`Extracted: ${entry.header.name}`);
68
+ * // Process entry.body ReadableStream as needed
69
+ * }
69
70
  * ```
70
71
  */
71
72
  function createGzipDecoder() {
@@ -82,7 +83,7 @@ function createGzipDecoder() {
82
83
  *
83
84
  * @example
84
85
  * ```typescript
85
- * import { createTarDecoder, createTarOptionsTransformer } from '@modern-tar/core';
86
+ * import { createTarDecoder, createTarOptionsTransformer } from 'modern-tar';
86
87
  *
87
88
  * const transformedStream = sourceStream
88
89
  * .pipeThrough(createTarDecoder())
@@ -224,25 +225,37 @@ const USTAR = {
224
225
  };
225
226
  /** USTAR version ("00"). */
226
227
  const USTAR_VERSION = "00";
227
- /** ASCII code for a space character, used as a placeholder in checksum calculation. */
228
- const CHECKSUM_SPACE = 32;
228
+ /** USTAR max value in 8-byte octal field. */
229
+ const USTAR_MAX_UID_GID = 2097151;
230
+ /** USTAR max value in 12-byte octal field (~8GB). */
231
+ const USTAR_MAX_SIZE = 8589934591;
229
232
  /** Type flag constants for file types. */
230
233
  const TYPEFLAG = {
231
234
  file: "0",
232
235
  link: "1",
233
236
  symlink: "2",
237
+ "character-device": "3",
238
+ "block-device": "4",
234
239
  directory: "5",
240
+ fifo: "6",
235
241
  "pax-header": "x",
236
- "pax-global-header": "g"
242
+ "pax-global-header": "g",
243
+ "gnu-long-name": "L",
244
+ "gnu-long-link-name": "K"
237
245
  };
238
246
  /** Reverse mapping from flag characters to type names. */
239
247
  const FLAGTYPE = {
240
- [TYPEFLAG.file]: "file",
241
- [TYPEFLAG.link]: "link",
242
- [TYPEFLAG.symlink]: "symlink",
243
- [TYPEFLAG.directory]: "directory",
244
- [TYPEFLAG["pax-header"]]: "pax-header",
245
- [TYPEFLAG["pax-global-header"]]: "pax-global-header"
248
+ "0": "file",
249
+ "1": "link",
250
+ "2": "symlink",
251
+ "3": "character-device",
252
+ "4": "block-device",
253
+ "5": "directory",
254
+ "6": "fifo",
255
+ x: "pax-header",
256
+ g: "pax-global-header",
257
+ L: "gnu-long-name",
258
+ K: "gnu-long-link-name"
246
259
  };
247
260
 
248
261
  //#endregion
@@ -280,58 +293,130 @@ function readOctal(view, offset, size) {
280
293
  const octalString = readString(view, offset, size).trim();
281
294
  return octalString ? Number.parseInt(octalString, 8) : 0;
282
295
  }
283
-
284
- //#endregion
285
- //#region src/web/pack.ts
286
296
  /**
287
- * Creates a 512-byte USTAR format tar header block from a TarHeader object.
297
+ * Reads a numeric field that can be octal or POSIX base-256.
298
+ * This implementation handles positive integers, such as uid, gid, and size.
288
299
  */
289
- function createTarHeader(header) {
290
- const view = new Uint8Array(BLOCK_SIZE);
291
- const size = header.type === "directory" || header.type === "symlink" || header.type === "link" ? 0 : header.size ?? 0;
292
- let name = header.name;
293
- let prefix = "";
294
- if (name.length > USTAR.name.size) {
295
- let i = name.length;
296
- while (i > 0) {
297
- const slashIndex = name.lastIndexOf("/", i);
298
- if (slashIndex === -1) break;
299
- const p = name.slice(0, slashIndex);
300
- const n = name.slice(slashIndex + 1);
301
- if (p.length <= USTAR.prefix.size && n.length <= USTAR.name.size) {
302
- prefix = p;
303
- name = n;
304
- break;
305
- }
306
- i = slashIndex - 1;
300
+ function readNumeric(view, offset, size) {
301
+ if (view[offset] & 128) {
302
+ let result = view[offset] & 127;
303
+ for (let i = 1; i < size; i++) result = result << 8 | view[offset + i];
304
+ return result;
305
+ }
306
+ return readOctal(view, offset, size);
307
+ }
308
+ /**
309
+ * Reads an entire ReadableStream of Uint8Arrays into a single, combined Uint8Array.
310
+ *
311
+ * The easy way to do this is `new Response(stream).arrayBuffer()`, but we can be more
312
+ * performant by buffering the chunks directly.
313
+ */
314
+ async function streamToBuffer(stream) {
315
+ const chunks = [];
316
+ const reader = stream.getReader();
317
+ let totalLength = 0;
318
+ try {
319
+ while (true) {
320
+ const { done, value } = await reader.read();
321
+ if (done) break;
322
+ chunks.push(value);
323
+ totalLength += value.length;
307
324
  }
325
+ const result = new Uint8Array(totalLength);
326
+ let offset = 0;
327
+ for (const chunk of chunks) {
328
+ result.set(chunk, offset);
329
+ offset += chunk.length;
330
+ }
331
+ return result;
332
+ } finally {
333
+ reader.releaseLock();
308
334
  }
309
- writeString(view, USTAR.name.offset, USTAR.name.size, name);
310
- writeOctal(view, USTAR.mode.offset, USTAR.mode.size, header.mode ?? (header.type === "directory" ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));
311
- writeOctal(view, USTAR.uid.offset, USTAR.uid.size, header.uid ?? 0);
312
- writeOctal(view, USTAR.gid.offset, USTAR.gid.size, header.gid ?? 0);
313
- writeOctal(view, USTAR.size.offset, USTAR.size.size, size);
314
- writeOctal(view, USTAR.mtime.offset, USTAR.mtime.size, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1e3));
315
- writeString(view, USTAR.typeflag.offset, USTAR.typeflag.size, TYPEFLAG[header.type ?? "file"]);
316
- writeString(view, USTAR.linkname.offset, USTAR.linkname.size, header.linkname);
317
- writeString(view, USTAR.magic.offset, USTAR.magic.size, "ustar\0");
318
- writeString(view, USTAR.version.offset, USTAR.version.size, USTAR_VERSION);
319
- writeString(view, USTAR.uname.offset, USTAR.uname.size, header.uname);
320
- writeString(view, USTAR.gname.offset, USTAR.gname.size, header.gname);
321
- writeString(view, USTAR.prefix.offset, USTAR.prefix.size, prefix);
322
- view.fill(CHECKSUM_SPACE, USTAR.checksum.offset, USTAR.checksum.offset + USTAR.checksum.size);
335
+ }
336
+
337
+ //#endregion
338
+ //#region src/web/checksum.ts
339
+ const CHECKSUM_SPACE = 32;
340
+ /**
341
+ * Validates the checksum of a tar header block.
342
+ */
343
+ function validateChecksum(block) {
344
+ const storedChecksum = readOctal(block, USTAR.checksum.offset, USTAR.checksum.size);
345
+ let unsignedSum = 0;
346
+ for (let i = 0; i < USTAR.checksum.offset; i++) unsignedSum += block[i];
347
+ unsignedSum += CHECKSUM_SPACE * USTAR.checksum.size;
348
+ for (let i = USTAR.checksum.offset + USTAR.checksum.size; i < block.length; i++) unsignedSum += block[i];
349
+ return storedChecksum === unsignedSum;
350
+ }
351
+ /**
352
+ * Calculates and writes the checksum to a tar header block.
353
+ */
354
+ function writeChecksum(block) {
355
+ const checksumEnd = USTAR.checksum.offset + USTAR.checksum.size;
356
+ block.fill(CHECKSUM_SPACE, USTAR.checksum.offset, checksumEnd);
323
357
  let checksum = 0;
324
- for (const byte of view) checksum += byte;
358
+ for (const byte of block) checksum += byte;
325
359
  const checksumString = `${checksum.toString(8).padStart(6, "0")}\0 `;
326
- writeString(view, USTAR.checksum.offset, USTAR.checksum.size, checksumString);
327
- return view;
360
+ const checksumBytes = encoder.encode(checksumString);
361
+ block.set(checksumBytes, USTAR.checksum.offset);
362
+ }
363
+
364
+ //#endregion
365
+ //#region src/web/pack-pax.ts
366
+ function generatePax(header) {
367
+ const paxRecords = {};
368
+ if (header.name.length > USTAR.name.size) {
369
+ if (findUstarSplit(header.name) === null) paxRecords.path = header.name;
370
+ }
371
+ if (header.linkname && header.linkname.length > USTAR.name.size) paxRecords.linkpath = header.linkname;
372
+ if (header.uname && header.uname.length > USTAR.uname.size) paxRecords.uname = header.uname;
373
+ if (header.gname && header.gname.length > USTAR.gname.size) paxRecords.gname = header.gname;
374
+ if (header.uid != null && header.uid > USTAR_MAX_UID_GID) paxRecords.uid = String(header.uid);
375
+ if (header.gid != null && header.gid > USTAR_MAX_UID_GID) paxRecords.gid = String(header.gid);
376
+ if (header.size != null && header.size > USTAR_MAX_SIZE) paxRecords.size = String(header.size);
377
+ if (header.pax) Object.assign(paxRecords, header.pax);
378
+ const paxEntries = Object.entries(paxRecords);
379
+ if (paxEntries.length === 0) return null;
380
+ const paxBody = encoder.encode(paxEntries.map(([key, value]) => {
381
+ const record = `${key}=${value}\n`;
382
+ const partLength = encoder.encode(record).length + 1;
383
+ let totalLength = partLength + String(partLength).length;
384
+ totalLength = partLength + String(totalLength).length;
385
+ return `${totalLength} ${record}`;
386
+ }).join(""));
387
+ return {
388
+ paxHeader: createTarHeader({
389
+ name: decoder.decode(encoder.encode(`PaxHeader/${header.name}`).slice(0, 100)),
390
+ size: paxBody.length,
391
+ type: "pax-header",
392
+ mode: 420,
393
+ mtime: header.mtime,
394
+ uname: header.uname,
395
+ gname: header.gname,
396
+ uid: header.uid,
397
+ gid: header.gid
398
+ }),
399
+ paxBody
400
+ };
328
401
  }
402
+ function findUstarSplit(path) {
403
+ if (path.length <= USTAR.name.size) return null;
404
+ const minSlashIndex = path.length - USTAR.name.size - 1;
405
+ const slashIndex = path.lastIndexOf("/", USTAR.prefix.size);
406
+ if (slashIndex > 0 && slashIndex >= minSlashIndex) return {
407
+ prefix: path.slice(0, slashIndex),
408
+ name: path.slice(slashIndex + 1)
409
+ };
410
+ return null;
411
+ }
412
+
413
+ //#endregion
414
+ //#region src/web/pack.ts
329
415
  /**
330
416
  * Create a streaming tar packer.
331
417
  *
332
- * This function provides a controller-based API for creating tar archives, suitable
333
- * for scenarios where entries are generated dynamically or when you need control over
334
- * the packing process. The returned [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
418
+ * Provides a controller-based API for creating tar archives, suitable for scenarios where entries are
419
+ * generated dynamically. The returned [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
335
420
  * outputs tar archive bytes as entries are added.
336
421
  *
337
422
  * @returns Object containing the readable stream and controller
@@ -340,7 +425,7 @@ function createTarHeader(header) {
340
425
  *
341
426
  * @example
342
427
  * ```typescript
343
- * import { createTarPacker } from '@modern-tar/core';
428
+ * import { createTarPacker } from 'modern-tar';
344
429
  *
345
430
  * const { readable, controller } = createTarPacker();
346
431
  *
@@ -382,31 +467,12 @@ function createTarPacker() {
382
467
  controller: {
383
468
  add(header) {
384
469
  const size = header.type === "directory" || header.type === "symlink" || header.type === "link" ? 0 : header.size ?? 0;
385
- if (header.pax) {
386
- let paxRecords = "";
387
- for (const [key, value] of Object.entries(header.pax)) {
388
- const record = `${key}=${value}\n`;
389
- let length = record.length + 1;
390
- const lengthStr = String(length);
391
- length += lengthStr.length;
392
- const finalLengthStr = String(length);
393
- if (finalLengthStr.length !== lengthStr.length) length += finalLengthStr.length - lengthStr.length;
394
- paxRecords += `${length} ${record}`;
395
- }
396
- if (paxRecords) {
397
- const paxBytes = encoder.encode(paxRecords);
398
- const paxHeader = createTarHeader({
399
- name: `PaxHeader/${header.name}`,
400
- size: paxBytes.length,
401
- type: "pax-header",
402
- mode: 420,
403
- mtime: header.mtime
404
- });
405
- streamController.enqueue(paxHeader);
406
- streamController.enqueue(paxBytes);
407
- const paxPadding = (BLOCK_SIZE - paxBytes.length % BLOCK_SIZE) % BLOCK_SIZE;
408
- if (paxPadding > 0) streamController.enqueue(new Uint8Array(paxPadding));
409
- }
470
+ const paxData = generatePax(header);
471
+ if (paxData) {
472
+ streamController.enqueue(paxData.paxHeader);
473
+ streamController.enqueue(paxData.paxBody);
474
+ const paxPadding = (BLOCK_SIZE - paxData.paxBody.length % BLOCK_SIZE) % BLOCK_SIZE;
475
+ if (paxPadding > 0) streamController.enqueue(new Uint8Array(paxPadding));
410
476
  }
411
477
  const headerBlock = createTarHeader({
412
478
  ...header,
@@ -448,137 +514,140 @@ function createTarPacker() {
448
514
  }
449
515
  };
450
516
  }
451
-
452
- //#endregion
453
- //#region src/web/stream.ts
454
- function parseHeader(block) {
455
- let name = readString(block, USTAR.name.offset, USTAR.name.size);
456
- if (readString(block, USTAR.magic.offset, USTAR.magic.size) === "ustar") {
457
- const prefix = readString(block, USTAR.prefix.offset, USTAR.prefix.size);
458
- if (prefix) name = `${prefix}/${name}`;
459
- }
460
- const typeFlag = readString(block, USTAR.typeflag.offset, USTAR.typeflag.size);
461
- return {
462
- name,
463
- mode: readOctal(block, USTAR.mode.offset, USTAR.mode.size),
464
- uid: readOctal(block, USTAR.uid.offset, USTAR.uid.size),
465
- gid: readOctal(block, USTAR.gid.offset, USTAR.gid.size),
466
- size: readOctal(block, USTAR.size.offset, USTAR.size.size),
467
- mtime: /* @__PURE__ */ new Date(readOctal(block, USTAR.mtime.offset, USTAR.mtime.size) * 1e3),
468
- type: FLAGTYPE[typeFlag] || "file",
469
- linkname: readString(block, USTAR.linkname.offset, USTAR.linkname.size),
470
- uname: readString(block, USTAR.uname.offset, USTAR.uname.size),
471
- gname: readString(block, USTAR.gname.offset, USTAR.gname.size)
472
- };
473
- }
474
- function parsePax(buffer) {
475
- const pax = {};
476
- let offset = 0;
477
- while (offset < buffer.length) {
478
- const spaceIndex = buffer.indexOf(32, offset);
479
- if (spaceIndex === -1) break;
480
- const lengthStr = decoder.decode(buffer.subarray(offset, spaceIndex));
481
- const length = Number.parseInt(lengthStr, 10);
482
- if (!length) break;
483
- const recordEnd = offset + length;
484
- const [key, value] = decoder.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
485
- if (key && value !== void 0) pax[key] = value;
486
- offset = recordEnd;
517
+ /**
518
+ * Creates a 512-byte USTAR format tar header block from a TarHeader object.
519
+ */
520
+ function createTarHeader(header) {
521
+ const view = new Uint8Array(BLOCK_SIZE);
522
+ const size = header.type === "directory" || header.type === "symlink" || header.type === "link" ? 0 : header.size ?? 0;
523
+ let name = header.name;
524
+ let prefix = "";
525
+ if (!header.pax?.path) {
526
+ const split = findUstarSplit(name);
527
+ if (split) {
528
+ name = split.name;
529
+ prefix = split.prefix;
530
+ }
487
531
  }
488
- return pax;
489
- }
490
- function applyPax(header, pax) {
491
- header.name = pax.path ?? header.name;
492
- header.linkname = pax.linkpath ?? header.linkname;
493
- if (pax.size) header.size = Number.parseInt(pax.size, 10);
494
- if (pax.mtime) header.mtime = /* @__PURE__ */ new Date(Number.parseFloat(pax.mtime) * 1e3);
495
- if (pax.uid) header.uid = Number.parseInt(pax.uid, 10);
496
- if (pax.gid) header.gid = Number.parseInt(pax.gid, 10);
497
- header.uname = pax.uname ?? header.uname;
498
- header.gname = pax.gname ?? header.gname;
499
- header.pax = pax;
532
+ writeString(view, USTAR.name.offset, USTAR.name.size, name);
533
+ writeOctal(view, USTAR.mode.offset, USTAR.mode.size, header.mode ?? (header.type === "directory" ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));
534
+ writeOctal(view, USTAR.uid.offset, USTAR.uid.size, header.uid ?? 0);
535
+ writeOctal(view, USTAR.gid.offset, USTAR.gid.size, header.gid ?? 0);
536
+ writeOctal(view, USTAR.size.offset, USTAR.size.size, size);
537
+ writeOctal(view, USTAR.mtime.offset, USTAR.mtime.size, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1e3));
538
+ writeString(view, USTAR.typeflag.offset, USTAR.typeflag.size, TYPEFLAG[header.type ?? "file"]);
539
+ writeString(view, USTAR.linkname.offset, USTAR.linkname.size, header.linkname);
540
+ writeString(view, USTAR.magic.offset, USTAR.magic.size, "ustar\0");
541
+ writeString(view, USTAR.version.offset, USTAR.version.size, USTAR_VERSION);
542
+ writeString(view, USTAR.uname.offset, USTAR.uname.size, header.uname);
543
+ writeString(view, USTAR.gname.offset, USTAR.gname.size, header.gname);
544
+ writeString(view, USTAR.prefix.offset, USTAR.prefix.size, prefix);
545
+ writeChecksum(view);
546
+ return view;
500
547
  }
548
+
549
+ //#endregion
550
+ //#region src/web/unpack.ts
551
+ const metaEntryParsers = {
552
+ "pax-global-header": parsePax,
553
+ "pax-header": parsePax,
554
+ "gnu-long-name": (data) => ({ name: readString(data, 0, data.length) }),
555
+ "gnu-long-link-name": (data) => ({ linkname: readString(data, 0, data.length) })
556
+ };
501
557
  /**
502
- * Creates a TransformStream that parses a tar archive into ParsedTarEntry objects.
558
+ * Create a transform stream that parses tar bytes into entries.
559
+ *
560
+ * @returns `TransformStream` that converts tar archive bytes to {@link ParsedTarEntry} objects.
561
+ * @example
562
+ * ```typescript
563
+ * import { createTarDecoder } from 'modern-tar';
564
+ *
565
+ * const decoder = createTarDecoder();
566
+ * const entriesStream = tarStream.pipeThrough(decoder);
567
+ *
568
+ * for await (const entry of entriesStream) {
569
+ * console.log(`Entry: ${entry.header.name}`);
570
+ * // Process entry.body stream as needed
571
+ * }
503
572
  */
504
573
  function createTarDecoder() {
505
574
  let buffer = new Uint8Array(0);
506
575
  let currentEntry = null;
507
- let pax = null;
508
- let paxGlobal = {};
509
- const closeEntryBody = () => {
510
- try {
511
- currentEntry?.controller.close();
512
- } catch {}
513
- };
576
+ let paxGlobals = {};
577
+ let nextEntryOverrides = {};
514
578
  return new TransformStream({
515
579
  transform(chunk, controller) {
516
580
  const combined = new Uint8Array(buffer.length + chunk.length);
517
581
  combined.set(buffer);
518
582
  combined.set(chunk, buffer.length);
583
+ buffer = combined;
519
584
  let offset = 0;
520
585
  while (true) {
521
- const remainingBytes = combined.length - offset;
522
586
  if (currentEntry) {
523
- const toWrite = combined.subarray(offset, offset + Math.min(remainingBytes, currentEntry.bytesLeft));
587
+ const toWrite = buffer.subarray(offset, offset + Math.min(buffer.length - offset, currentEntry.bytesLeft));
524
588
  currentEntry.controller.enqueue(toWrite);
525
589
  currentEntry.bytesLeft -= toWrite.length;
526
590
  offset += toWrite.length;
527
591
  if (currentEntry.bytesLeft === 0) {
528
592
  const padding = (BLOCK_SIZE - currentEntry.header.size % BLOCK_SIZE) % BLOCK_SIZE;
529
- if (remainingBytes - toWrite.length < padding) break;
530
- closeEntryBody();
593
+ if (buffer.length - offset < padding) break;
594
+ try {
595
+ currentEntry?.controller.close();
596
+ } catch {}
531
597
  offset += padding;
532
598
  currentEntry = null;
533
599
  } else break;
534
600
  }
535
- if (remainingBytes < BLOCK_SIZE) break;
536
- const headerBlock = combined.subarray(offset, offset + BLOCK_SIZE);
601
+ if (buffer.length - offset < BLOCK_SIZE) break;
602
+ const headerBlock = buffer.subarray(offset, offset + BLOCK_SIZE);
537
603
  if (headerBlock.every((b) => b === 0)) {
538
- controller.terminate();
539
- return;
540
- }
541
- const header = parseHeader(headerBlock);
542
- offset += BLOCK_SIZE;
543
- if (header.type === "pax-header" || header.type === "pax-global-header") {
544
- const totalPaxSize = header.size + (BLOCK_SIZE - header.size % BLOCK_SIZE) % BLOCK_SIZE;
545
- if (combined.length - offset < totalPaxSize) {
546
- offset -= BLOCK_SIZE;
547
- break;
604
+ if (buffer.length - offset < BLOCK_SIZE * 2) break;
605
+ if (buffer.subarray(offset + BLOCK_SIZE, offset + BLOCK_SIZE * 2).every((b) => b === 0)) {
606
+ controller.terminate();
607
+ return;
548
608
  }
549
- const parsedPax = parsePax(combined.subarray(offset, offset + header.size));
550
- if (header.type === "pax-header") pax = parsedPax;
551
- else paxGlobal = {
552
- ...paxGlobal,
553
- ...parsedPax
609
+ }
610
+ const header = parseUstarHeader(headerBlock);
611
+ const metaParser = metaEntryParsers[header.type];
612
+ if (metaParser) {
613
+ const dataSize = header.size;
614
+ const dataBlocksSize = Math.ceil(dataSize / BLOCK_SIZE) * BLOCK_SIZE;
615
+ if (buffer.length - offset - BLOCK_SIZE < dataBlocksSize) break;
616
+ const data = buffer.subarray(offset + BLOCK_SIZE, offset + BLOCK_SIZE + dataSize);
617
+ const overrides = metaParser(data);
618
+ if (header.type === "pax-global-header") paxGlobals = {
619
+ ...paxGlobals,
620
+ ...overrides
621
+ };
622
+ else nextEntryOverrides = {
623
+ ...nextEntryOverrides,
624
+ ...overrides
554
625
  };
555
- offset += totalPaxSize;
626
+ offset += BLOCK_SIZE + dataBlocksSize;
556
627
  continue;
557
628
  }
558
- const combinedPax = {
559
- ...paxGlobal,
560
- ...pax
561
- };
562
- if (pax || Object.keys(paxGlobal).length > 0) {
563
- applyPax(header, combinedPax);
564
- pax = null;
565
- }
629
+ const finalHeader = header;
630
+ applyOverrides(finalHeader, paxGlobals);
631
+ applyOverrides(finalHeader, nextEntryOverrides);
632
+ if (header.prefix && header.magic === "ustar" && !nextEntryOverrides.name && !paxGlobals.name) finalHeader.name = `${header.prefix}/${finalHeader.name}`;
633
+ nextEntryOverrides = {};
566
634
  let bodyController;
567
635
  const body = new ReadableStream({ start: (c) => bodyController = c });
568
636
  controller.enqueue({
569
- header,
637
+ header: finalHeader,
570
638
  body
571
639
  });
572
- if (header.size > 0) currentEntry = {
573
- header,
574
- bytesLeft: header.size,
640
+ offset += BLOCK_SIZE;
641
+ if (finalHeader.size > 0) currentEntry = {
642
+ header: finalHeader,
643
+ bytesLeft: finalHeader.size,
575
644
  controller: bodyController
576
645
  };
577
646
  else try {
578
647
  bodyController.close();
579
648
  } catch {}
580
649
  }
581
- buffer = combined.subarray(offset);
650
+ if (offset > 0) buffer = buffer.slice(offset);
582
651
  },
583
652
  flush(controller) {
584
653
  if (currentEntry) {
@@ -590,6 +659,84 @@ function createTarDecoder() {
590
659
  }
591
660
  });
592
661
  }
662
+ function parseUstarHeader(block) {
663
+ if (!validateChecksum(block)) throw new Error("Invalid tar header checksum.");
664
+ const typeflag = readString(block, USTAR.typeflag.offset, USTAR.typeflag.size);
665
+ return {
666
+ name: readString(block, USTAR.name.offset, USTAR.name.size),
667
+ mode: readOctal(block, USTAR.mode.offset, USTAR.mode.size),
668
+ uid: readNumeric(block, USTAR.uid.offset, USTAR.uid.size),
669
+ gid: readNumeric(block, USTAR.gid.offset, USTAR.gid.size),
670
+ size: readNumeric(block, USTAR.size.offset, USTAR.size.size),
671
+ mtime: /* @__PURE__ */ new Date(readNumeric(block, USTAR.mtime.offset, USTAR.mtime.size) * 1e3),
672
+ checksum: readOctal(block, USTAR.checksum.offset, USTAR.checksum.size),
673
+ type: FLAGTYPE[typeflag] || "file",
674
+ linkname: readString(block, USTAR.linkname.offset, USTAR.linkname.size),
675
+ magic: readString(block, USTAR.magic.offset, USTAR.magic.size),
676
+ uname: readString(block, USTAR.uname.offset, USTAR.uname.size),
677
+ gname: readString(block, USTAR.gname.offset, USTAR.gname.size),
678
+ prefix: readString(block, USTAR.prefix.offset, USTAR.prefix.size)
679
+ };
680
+ }
681
+ function parsePax(buffer) {
682
+ const overrides = {};
683
+ const pax = {};
684
+ let offset = 0;
685
+ while (offset < buffer.length) {
686
+ const spaceIndex = buffer.indexOf(32, offset);
687
+ if (spaceIndex === -1) break;
688
+ const length = Number.parseInt(decoder.decode(buffer.subarray(offset, spaceIndex)), 10);
689
+ if (Number.isNaN(length) || length === 0) break;
690
+ const recordEnd = offset + length;
691
+ const [key, value] = decoder.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
692
+ if (key && value !== void 0) {
693
+ pax[key] = value;
694
+ switch (key) {
695
+ case "path":
696
+ overrides.name = value;
697
+ break;
698
+ case "linkpath":
699
+ overrides.linkname = value;
700
+ break;
701
+ case "size":
702
+ overrides.size = Number.parseInt(value, 10);
703
+ break;
704
+ case "mtime":
705
+ overrides.mtime = Number.parseFloat(value);
706
+ break;
707
+ case "uid":
708
+ overrides.uid = Number.parseInt(value, 10);
709
+ break;
710
+ case "gid":
711
+ overrides.gid = Number.parseInt(value, 10);
712
+ break;
713
+ case "uname":
714
+ overrides.uname = value;
715
+ break;
716
+ case "gname":
717
+ overrides.gname = value;
718
+ break;
719
+ }
720
+ }
721
+ offset = recordEnd;
722
+ }
723
+ if (Object.keys(pax).length > 0) overrides.pax = pax;
724
+ return overrides;
725
+ }
726
+ function applyOverrides(header, overrides) {
727
+ if (overrides.name !== void 0) header.name = overrides.name;
728
+ if (overrides.linkname !== void 0) header.linkname = overrides.linkname;
729
+ if (overrides.size !== void 0) header.size = overrides.size;
730
+ if (overrides.mtime !== void 0) header.mtime = /* @__PURE__ */ new Date(overrides.mtime * 1e3);
731
+ if (overrides.uid !== void 0) header.uid = overrides.uid;
732
+ if (overrides.gid !== void 0) header.gid = overrides.gid;
733
+ if (overrides.uname !== void 0) header.uname = overrides.uname;
734
+ if (overrides.gname !== void 0) header.gname = overrides.gname;
735
+ if (overrides.pax) header.pax = {
736
+ ...header.pax ?? {},
737
+ ...overrides.pax
738
+ };
739
+ }
593
740
 
594
741
  //#endregion
595
742
  //#region src/web/helpers.ts
@@ -602,7 +749,7 @@ function createTarDecoder() {
602
749
  * @returns A `Promise` that resolves to the complete tar archive as a Uint8Array
603
750
  * @example
604
751
  * ```typescript
605
- * import { packTar } from '@modern-tar/core';
752
+ * import { packTar } from 'modern-tar';
606
753
  *
607
754
  * const entries = [
608
755
  * {
@@ -642,16 +789,18 @@ async function packTar(entries) {
642
789
  else if (body instanceof Blob) await body.stream().pipeTo(entryStream);
643
790
  else {
644
791
  let chunk;
645
- if (typeof body === "string") chunk = encoder.encode(body);
792
+ if (body === null || body === void 0) chunk = new Uint8Array(0);
646
793
  else if (body instanceof Uint8Array) chunk = body;
647
- else chunk = new Uint8Array(body);
794
+ else if (body instanceof ArrayBuffer) chunk = new Uint8Array(body);
795
+ else if (typeof body === "string") chunk = encoder.encode(body);
796
+ else throw new TypeError(`Unsupported content type for entry "${entry.header.name}". Expected string, Uint8Array, ArrayBuffer, Blob, ReadableStream, or undefined.`);
648
797
  const writer = entryStream.getWriter();
649
798
  await writer.write(chunk);
650
799
  await writer.close();
651
800
  }
652
801
  }
653
802
  })().then(() => controller.finalize()).catch((err) => controller.error(err));
654
- const buffer = await new Response(readable).arrayBuffer();
803
+ const buffer = await streamToBuffer(readable);
655
804
  await packingPromise;
656
805
  return new Uint8Array(buffer);
657
806
  }
@@ -665,7 +814,7 @@ async function packTar(entries) {
665
814
  * @returns A `Promise` that resolves to an array of entries with buffered data
666
815
  * @example
667
816
  * ```typescript
668
- * import { unpackTar } from '@modern-tar/core';
817
+ * import { unpackTar } from '@modern-tar';
669
818
  *
670
819
  * // From a file upload or fetch
671
820
  * const response = await fetch('/api/archive.tar');
@@ -709,7 +858,7 @@ async function unpackTar(archive, options = {}) {
709
858
  while (true) {
710
859
  const { done, value: entry } = await reader.read();
711
860
  if (done) break;
712
- const data = new Uint8Array(await new Response(entry.body).arrayBuffer());
861
+ const data = await streamToBuffer(entry.body);
713
862
  results.push({
714
863
  header: entry.header,
715
864
  data
@@ -722,4 +871,4 @@ async function unpackTar(archive, options = {}) {
722
871
  }
723
872
 
724
873
  //#endregion
725
- export { BLOCK_SIZE, createGzipDecoder, createGzipEncoder, createTarDecoder, createTarHeader, createTarOptionsTransformer, createTarPacker, packTar, unpackTar };
874
+ export { BLOCK_SIZE, createGzipDecoder, createGzipEncoder, createTarDecoder, createTarHeader, createTarOptionsTransformer, createTarPacker, encoder, generatePax, packTar, unpackTar };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "modern-tar",
3
- "version": "0.2.2",
3
+ "version": "0.3.0",
4
4
  "description": "Zero dependency streaming tar parser and writer for JavaScript.",
5
5
  "author": "Ayuhito <hello@ayuhito.com>",
6
6
  "license": "MIT",
@@ -26,6 +26,7 @@
26
26
  "devDependencies": {
27
27
  "@biomejs/biome": "2.2.4",
28
28
  "@types/node": "^24.5.2",
29
+ "@vitest/coverage-v8": "^3.2.4",
29
30
  "tsdown": "^0.15.4",
30
31
  "typescript": "^5.9.2",
31
32
  "vitest": "^3.2.4"
@@ -34,6 +35,7 @@
34
35
  "build": "tsdown",
35
36
  "dev": "tsdown --watch",
36
37
  "test": "vitest",
38
+ "coverage": "vitest run --coverage",
37
39
  "check": "biome check --write",
38
40
  "typecheck": "tsc --noEmit"
39
41
  },