modern-tar 0.2.2 → 0.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -105,22 +105,52 @@ for await (const entry of decodedStream) {
105
105
  #### Compression/Decompression (gzip)
106
106
 
107
107
  ```typescript
108
- import { createGzipDecoder, unpackTar } from 'modern-tar';
108
+ import { createGzipEncoder, createTarPacker } from 'modern-tar';
109
109
 
110
- // Fetch a .tar.gz file stream
111
- const response = await fetch('https://example.com/archive.tar.gz');
110
+ // Create and compress a tar archive
111
+ const { readable, controller } = createTarPacker();
112
+ const compressedStream = readable.pipeThrough(createGzipEncoder());
113
+
114
+ // Add entries...
115
+ const fileStream = controller.add({ name: "file.txt", size: 5, type: "file" });
116
+ const writer = fileStream.getWriter();
117
+ await writer.write(new TextEncoder().encode("hello"));
118
+ await writer.close();
119
+ controller.finalize();
120
+
121
+ // Upload compressed .tar.gz
122
+ await fetch('/api/upload', {
123
+ method: 'POST',
124
+ body: compressedStream,
125
+ headers: { 'Content-Type': 'application/gzip' }
126
+ });
127
+ ```
128
+
129
+ ```typescript
130
+ import { createGzipDecoder, createTarDecoder, unpackTar } from 'modern-tar';
131
+
132
+ // Download and process a .tar.gz file
133
+ const response = await fetch('https://api.example.com/archive.tar.gz');
112
134
  if (!response.body) throw new Error('No response body');
113
135
 
114
- // Decompress .tar.gz to .tar stream
115
- const tarStream = response.body.pipeThrough(createGzipDecoder());
136
+ // Buffer entire archive
137
+ const entries = await unpackTar(response.body.pipeThrough(createGzipDecoder()));
116
138
 
117
- // Use `unpackTar` for buffered extraction or `createTarDecoder` for streaming
118
- const entries = await unpackTar(tarStream);
119
139
  for (const entry of entries) {
120
140
  console.log(`Extracted: ${entry.header.name}`);
121
141
  const content = new TextDecoder().decode(entry.data);
122
142
  console.log(`Content: ${content}`);
123
143
  }
144
+
145
+ // Or chain decompression and tar parsing using streams
146
+ const entries = response.body
147
+ .pipeThrough(createGzipDecoder())
148
+ .pipeThrough(createTarDecoder());
149
+
150
+ for await (const entry of entries) {
151
+ console.log(`Extracted: ${entry.header.name}`);
152
+ // Process entry.body ReadableStream as needed
153
+ }
124
154
  ```
125
155
 
126
156
  ### Node.js Filesystem Usage
@@ -421,7 +451,7 @@ interface ParsedTarEntry {
421
451
  // Output entry from a buffered unpack function
422
452
  interface ParsedTarEntryWithData {
423
453
  header: TarHeader;
424
- data: Uint8Array<ArrayBuffer>;
454
+ data: Uint8Array;
425
455
  }
426
456
 
427
457
  // Platform-neutral configuration for unpacking
@@ -1,4 +1,4 @@
1
- import { TarEntryData, TarHeader, UnpackOptions } from "../index-G8Ie88oV.js";
1
+ import { TarEntryData, TarHeader, UnpackOptions } from "../index-BnmXbcC2.js";
2
2
  import { Stats } from "node:fs";
3
3
  import { Readable, Writable } from "node:stream";
4
4
 
@@ -110,7 +110,7 @@ declare function packTarSources(sources: TarSource[]): Readable;
110
110
  *
111
111
  * @example
112
112
  * ```typescript
113
- * import { packTar } from '@modern-tar/fs';
113
+ * import { packTar } from 'modern-tar/fs';
114
114
  * import { createWriteStream } from 'node:fs';
115
115
  * import { pipeline } from 'node:stream/promises';
116
116
  *
@@ -142,7 +142,7 @@ declare function packTar(directoryPath: string, options?: PackOptionsFS): Readab
142
142
  *
143
143
  * @example
144
144
  * ```typescript
145
- * import { unpackTar } from '@modern-tar/fs';
145
+ * import { unpackTar } from 'modern-tar/fs';
146
146
  * import { createReadStream } from 'node:fs';
147
147
  * import { pipeline } from 'node:stream/promises';
148
148
  *
package/dist/fs/index.js CHANGED
@@ -1,4 +1,4 @@
1
- import { BLOCK_SIZE, createTarDecoder, createTarHeader, createTarOptionsTransformer, createTarPacker } from "../web-DcwR3pag.js";
1
+ import { BLOCK_SIZE, createTarDecoder, createTarHeader, createTarOptionsTransformer, createTarPacker, encoder } from "../web-DFB_2guC.js";
2
2
  import { createReadStream, createWriteStream } from "node:fs";
3
3
  import * as fs from "node:fs/promises";
4
4
  import * as path from "node:path";
@@ -6,34 +6,6 @@ import { PassThrough, Readable, Writable } from "node:stream";
6
6
  import { pipeline } from "node:stream/promises";
7
7
 
8
8
  //#region src/fs/archive.ts
9
- async function addFileToPacker(controller, sourcePath, targetPath) {
10
- const stat = await fs.stat(sourcePath);
11
- const entryStream = controller.add({
12
- name: targetPath,
13
- size: stat.size,
14
- mode: stat.mode,
15
- mtime: stat.mtime,
16
- type: "file"
17
- });
18
- await pipeline(createReadStream(sourcePath), Writable.fromWeb(entryStream));
19
- }
20
- async function addDirectoryToPacker(controller, sourcePath, targetPathInArchive) {
21
- const sourceStat = await fs.stat(sourcePath);
22
- controller.add({
23
- name: `${targetPathInArchive}/`,
24
- type: "directory",
25
- mode: sourceStat.mode,
26
- mtime: sourceStat.mtime,
27
- size: 0
28
- }).close();
29
- const dirents = await fs.readdir(sourcePath, { withFileTypes: true });
30
- for (const dirent of dirents) {
31
- const fullSourcePath = path.join(sourcePath, dirent.name);
32
- const archiveEntryPath = path.join(targetPathInArchive, dirent.name).replace(/\\/g, "/");
33
- if (dirent.isDirectory()) await addDirectoryToPacker(controller, fullSourcePath, archiveEntryPath);
34
- else if (dirent.isFile()) await addFileToPacker(controller, fullSourcePath, archiveEntryPath);
35
- }
36
- }
37
9
  /**
38
10
  * Packs multiple sources into a tar archive as a Node.js Readable stream from an
39
11
  * array of sources (files, directories, or raw content).
@@ -82,7 +54,7 @@ function packTarSources(sources) {
82
54
  }
83
55
  if (content instanceof ReadableStream) {
84
56
  const chunks = [];
85
- for await (const chunk of Readable.fromWeb(content)) chunks.push(chunk);
57
+ for await (const chunk of Readable.fromWeb(content)) chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
86
58
  const buffer = Buffer.concat(chunks);
87
59
  const writer$1 = controller.add({
88
60
  name: targetPath,
@@ -96,9 +68,10 @@ function packTarSources(sources) {
96
68
  }
97
69
  let data;
98
70
  if (content === null || content === void 0) data = new Uint8Array(0);
99
- else if (typeof content === "string") data = Buffer.from(content);
71
+ else if (content instanceof Uint8Array) data = content;
100
72
  else if (content instanceof ArrayBuffer) data = new Uint8Array(content);
101
- else data = content;
73
+ else if (typeof content === "string") data = encoder.encode(content);
74
+ else throw new TypeError(`Unsupported content type for entry "${targetPath}". Expected string, Uint8Array, ArrayBuffer, Blob, ReadableStream, or undefined.`);
102
75
  const writer = controller.add({
103
76
  name: targetPath,
104
77
  size: data.length,
@@ -114,6 +87,34 @@ function packTarSources(sources) {
114
87
  })().then(() => controller.finalize()).catch((err) => controller.error(err));
115
88
  return Readable.fromWeb(readable);
116
89
  }
90
+ async function addFileToPacker(controller, sourcePath, targetPath) {
91
+ const stat = await fs.stat(sourcePath);
92
+ const entryStream = controller.add({
93
+ name: targetPath,
94
+ size: stat.size,
95
+ mode: stat.mode,
96
+ mtime: stat.mtime,
97
+ type: "file"
98
+ });
99
+ await pipeline(createReadStream(sourcePath), Writable.fromWeb(entryStream));
100
+ }
101
+ async function addDirectoryToPacker(controller, sourcePath, targetPathInArchive) {
102
+ const sourceStat = await fs.stat(sourcePath);
103
+ controller.add({
104
+ name: `${targetPathInArchive}/`,
105
+ type: "directory",
106
+ mode: sourceStat.mode,
107
+ mtime: sourceStat.mtime,
108
+ size: 0
109
+ }).close();
110
+ const dirents = await fs.readdir(sourcePath, { withFileTypes: true });
111
+ for (const dirent of dirents) {
112
+ const fullSourcePath = path.join(sourcePath, dirent.name);
113
+ const archiveEntryPath = path.join(targetPathInArchive, dirent.name).replace(/\\/g, "/");
114
+ if (dirent.isDirectory()) await addDirectoryToPacker(controller, fullSourcePath, archiveEntryPath);
115
+ else if (dirent.isFile()) await addFileToPacker(controller, fullSourcePath, archiveEntryPath);
116
+ }
117
+ }
117
118
 
118
119
  //#endregion
119
120
  //#region src/fs/pack.ts
@@ -129,7 +130,7 @@ function packTarSources(sources) {
129
130
  *
130
131
  * @example
131
132
  * ```typescript
132
- * import { packTar } from '@modern-tar/fs';
133
+ * import { packTar } from 'modern-tar/fs';
133
134
  * import { createWriteStream } from 'node:fs';
134
135
  * import { pipeline } from 'node:stream/promises';
135
136
  *
@@ -210,7 +211,7 @@ function packTar(directoryPath, options = {}) {
210
211
  *
211
212
  * @example
212
213
  * ```typescript
213
- * import { unpackTar } from '@modern-tar/fs';
214
+ * import { unpackTar } from 'modern-tar/fs';
214
215
  * import { createReadStream } from 'node:fs';
215
216
  * import { pipeline } from 'node:stream/promises';
216
217
  *
@@ -1,12 +1,11 @@
1
1
  //#region src/web/compression.d.ts
2
2
  /**
3
- * Creates a gzip compression stream using the native
4
- * [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream) API.
3
+ * Creates a gzip compression stream that is compatible with Uint8Array streams.
5
4
  *
6
- * @returns A [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream) configured for gzip compression
5
+ * @returns A {@link ReadableWritablePair} configured for gzip compression.
7
6
  * @example
8
7
  * ```typescript
9
- * import { createGzipEncoder, createTarPacker } from '@modern-tar/core';
8
+ * import { createGzipEncoder, createTarPacker } from 'modern-tar';
10
9
  *
11
10
  * // Create and compress a tar archive
12
11
  * const { readable, controller } = createTarPacker();
@@ -27,46 +26,48 @@
27
26
  * });
28
27
  * ```
29
28
  */
30
- declare function createGzipEncoder(): CompressionStream;
29
+ declare function createGzipEncoder(): ReadableWritablePair<Uint8Array, Uint8Array>;
31
30
  /**
32
- * Creates a gzip decompression stream using the native
33
- * [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream) API.
31
+ * Creates a gzip decompression stream that is compatible with Uint8Array streams.
34
32
  *
35
- * @returns A [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream) configured for gzip decompression
33
+ * @returns A {@link ReadableWritablePair} configured for gzip decompression.
36
34
  * @example
37
35
  * ```typescript
38
- * import { createGzipDecoder, createTarDecoder } from '@modern-tar/core';
36
+ * import { createGzipDecoder, createTarDecoder } from 'modern-tar';
39
37
  *
40
38
  * // Download and process a .tar.gz file
41
39
  * const response = await fetch('https://api.example.com/archive.tar.gz');
42
40
  * if (!response.body) throw new Error('No response body');
43
41
  *
44
- * // Chain decompression and tar parsing
45
- * const entries = response.body
46
- * .pipeThrough(createGzipDecoder())
47
- * .pipeThrough(createTarDecoder());
42
+ * // Buffer entire archive
43
+ * const entries = await unpackTar(response.body.pipeThrough(createGzipDecoder()));
48
44
  *
49
- * for await (const entry of entries) {
45
+ * for (const entry of entries) {
50
46
  * console.log(`Extracted: ${entry.header.name}`);
51
- * // Process entry.body ReadableStream as needed
47
+ * const content = new TextDecoder().decode(entry.data);
48
+ * console.log(`Content: ${content}`);
52
49
  * }
53
50
  * ```
54
51
  * @example
55
52
  * ```typescript
56
- * // Decompress local .tar.gz data
57
- * const gzippedData = new Uint8Array([...]); // your gzipped tar data
58
- * const stream = new ReadableStream({
59
- * start(controller) {
60
- * controller.enqueue(gzippedData);
61
- * controller.close();
62
- * }
63
- * });
53
+ * import { createGzipDecoder, createTarDecoder } from 'modern-tar';
54
+ *
55
+ * // Download and process a .tar.gz file
56
+ * const response = await fetch('https://api.example.com/archive.tar.gz');
57
+ * if (!response.body) throw new Error('No response body');
64
58
  *
65
- * const tarStream = stream.pipeThrough(createGzipDecoder());
66
- * // Now process tarStream with createTarDecoder()...
59
+ * // Chain decompression and tar parsing using streams
60
+ * const entries = response.body
61
+ * .pipeThrough(createGzipDecoder())
62
+ * .pipeThrough(createTarDecoder());
63
+ *
64
+ * for await (const entry of entries) {
65
+ * console.log(`Extracted: ${entry.header.name}`);
66
+ * // Process entry.body ReadableStream as needed
67
+ * }
67
68
  * ```
68
69
  */
69
- declare function createGzipDecoder(): DecompressionStream;
70
+ declare function createGzipDecoder(): ReadableWritablePair<Uint8Array, Uint8Array>;
70
71
  //#endregion
71
72
  //#region src/web/types.d.ts
72
73
  /**
@@ -107,7 +108,7 @@ interface TarHeader {
107
108
  * - `Blob` - File-like data
108
109
  * - `null` - No content (for directories, etc.)
109
110
  */
110
- type TarEntryData = string | Uint8Array | ArrayBuffer | ReadableStream<Uint8Array> | Blob | null;
111
+ type TarEntryData = string | Uint8Array | ArrayBuffer | ReadableStream<Uint8Array> | Blob | null | undefined;
111
112
  /**
112
113
  * Represents a complete entry to be packed into a tar archive.
113
114
  *
@@ -131,7 +132,7 @@ interface ParsedTarEntry {
131
132
  */
132
133
  interface ParsedTarEntryWithData {
133
134
  header: TarHeader;
134
- data: Uint8Array<ArrayBuffer>;
135
+ data: Uint8Array;
135
136
  }
136
137
  /**
137
138
  * Platform-neutral configuration options for extracting tar archives.
@@ -158,7 +159,7 @@ interface UnpackOptions {
158
159
  * @returns A `Promise` that resolves to the complete tar archive as a Uint8Array
159
160
  * @example
160
161
  * ```typescript
161
- * import { packTar } from '@modern-tar/core';
162
+ * import { packTar } from 'modern-tar';
162
163
  *
163
164
  * const entries = [
164
165
  * {
@@ -195,7 +196,7 @@ declare function packTar(entries: TarEntry[]): Promise<Uint8Array>;
195
196
  * @returns A `Promise` that resolves to an array of entries with buffered data
196
197
  * @example
197
198
  * ```typescript
198
- * import { unpackTar } from '@modern-tar/core';
199
+ * import { unpackTar } from '@modern-tar';
199
200
  *
200
201
  * // From a file upload or fetch
201
202
  * const response = await fetch('/api/archive.tar');
@@ -238,7 +239,7 @@ declare function unpackTar(archive: ArrayBuffer | Uint8Array | ReadableStream<Ui
238
239
  *
239
240
  * @example
240
241
  * ```typescript
241
- * import { createTarDecoder, createTarOptionsTransformer } from '@modern-tar/core';
242
+ * import { createTarDecoder, createTarOptionsTransformer } from 'modern-tar';
242
243
  *
243
244
  * const transformedStream = sourceStream
244
245
  * .pipeThrough(createTarDecoder())
@@ -311,9 +312,8 @@ interface TarPackController {
311
312
  /**
312
313
  * Create a streaming tar packer.
313
314
  *
314
- * This function provides a controller-based API for creating tar archives, suitable
315
- * for scenarios where entries are generated dynamically or when you need control over
316
- * the packing process. The returned [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
315
+ * Provides a controller-based API for creating tar archives, suitable for scenarios where entries are
316
+ * generated dynamically. The returned [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
317
317
  * outputs tar archive bytes as entries are added.
318
318
  *
319
319
  * @returns Object containing the readable stream and controller
@@ -322,7 +322,7 @@ interface TarPackController {
322
322
  *
323
323
  * @example
324
324
  * ```typescript
325
- * import { createTarPacker } from '@modern-tar/core';
325
+ * import { createTarPacker } from 'modern-tar';
326
326
  *
327
327
  * const { readable, controller } = createTarPacker();
328
328
  *
@@ -360,9 +360,22 @@ declare function createTarPacker(): {
360
360
  controller: TarPackController;
361
361
  };
362
362
  //#endregion
363
- //#region src/web/stream.d.ts
363
+ //#region src/web/unpack.d.ts
364
364
  /**
365
- * Creates a TransformStream that parses a tar archive into ParsedTarEntry objects.
365
+ * Create a transform stream that parses tar bytes into entries.
366
+ *
367
+ * @returns `TransformStream` that converts tar archive bytes to {@link ParsedTarEntry} objects.
368
+ * @example
369
+ * ```typescript
370
+ * import { createTarDecoder } from 'modern-tar';
371
+ *
372
+ * const decoder = createTarDecoder();
373
+ * const entriesStream = tarStream.pipeThrough(decoder);
374
+ *
375
+ * for await (const entry of entriesStream) {
376
+ * console.log(`Entry: ${entry.header.name}`);
377
+ * // Process entry.body stream as needed
378
+ * }
366
379
  */
367
380
  declare function createTarDecoder(): TransformStream<Uint8Array, ParsedTarEntry>;
368
381
  //#endregion
@@ -1,2 +1,2 @@
1
- import { ParsedTarEntry, ParsedTarEntryWithData, TarEntry, TarEntryData, TarHeader, TarPackController, UnpackOptions, createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar } from "../index-G8Ie88oV.js";
1
+ import { ParsedTarEntry, ParsedTarEntryWithData, TarEntry, TarEntryData, TarHeader, TarPackController, UnpackOptions, createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar } from "../index-BnmXbcC2.js";
2
2
  export { ParsedTarEntry, ParsedTarEntryWithData, TarEntry, TarEntryData, TarHeader, TarPackController, UnpackOptions, createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar };
package/dist/web/index.js CHANGED
@@ -1,3 +1,3 @@
1
- import { createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar } from "../web-DcwR3pag.js";
1
+ import { createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar } from "../web-DFB_2guC.js";
2
2
 
3
3
  export { createGzipDecoder, createGzipEncoder, createTarDecoder, createTarOptionsTransformer, createTarPacker, packTar, unpackTar };
@@ -1,12 +1,11 @@
1
1
  //#region src/web/compression.ts
2
2
  /**
3
- * Creates a gzip compression stream using the native
4
- * [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream) API.
3
+ * Creates a gzip compression stream that is compatible with Uint8Array streams.
5
4
  *
6
- * @returns A [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream) configured for gzip compression
5
+ * @returns A {@link ReadableWritablePair} configured for gzip compression.
7
6
  * @example
8
7
  * ```typescript
9
- * import { createGzipEncoder, createTarPacker } from '@modern-tar/core';
8
+ * import { createGzipEncoder, createTarPacker } from 'modern-tar';
10
9
  *
11
10
  * // Create and compress a tar archive
12
11
  * const { readable, controller } = createTarPacker();
@@ -31,41 +30,43 @@ function createGzipEncoder() {
31
30
  return new CompressionStream("gzip");
32
31
  }
33
32
  /**
34
- * Creates a gzip decompression stream using the native
35
- * [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream) API.
33
+ * Creates a gzip decompression stream that is compatible with Uint8Array streams.
36
34
  *
37
- * @returns A [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream) configured for gzip decompression
35
+ * @returns A {@link ReadableWritablePair} configured for gzip decompression.
38
36
  * @example
39
37
  * ```typescript
40
- * import { createGzipDecoder, createTarDecoder } from '@modern-tar/core';
38
+ * import { createGzipDecoder, createTarDecoder } from 'modern-tar';
41
39
  *
42
40
  * // Download and process a .tar.gz file
43
41
  * const response = await fetch('https://api.example.com/archive.tar.gz');
44
42
  * if (!response.body) throw new Error('No response body');
45
43
  *
46
- * // Chain decompression and tar parsing
47
- * const entries = response.body
48
- * .pipeThrough(createGzipDecoder())
49
- * .pipeThrough(createTarDecoder());
44
+ * // Buffer entire archive
45
+ * const entries = await unpackTar(response.body.pipeThrough(createGzipDecoder()));
50
46
  *
51
- * for await (const entry of entries) {
47
+ * for (const entry of entries) {
52
48
  * console.log(`Extracted: ${entry.header.name}`);
53
- * // Process entry.body ReadableStream as needed
49
+ * const content = new TextDecoder().decode(entry.data);
50
+ * console.log(`Content: ${content}`);
54
51
  * }
55
52
  * ```
56
53
  * @example
57
54
  * ```typescript
58
- * // Decompress local .tar.gz data
59
- * const gzippedData = new Uint8Array([...]); // your gzipped tar data
60
- * const stream = new ReadableStream({
61
- * start(controller) {
62
- * controller.enqueue(gzippedData);
63
- * controller.close();
64
- * }
65
- * });
55
+ * import { createGzipDecoder, createTarDecoder } from 'modern-tar';
66
56
  *
67
- * const tarStream = stream.pipeThrough(createGzipDecoder());
68
- * // Now process tarStream with createTarDecoder()...
57
+ * // Download and process a .tar.gz file
58
+ * const response = await fetch('https://api.example.com/archive.tar.gz');
59
+ * if (!response.body) throw new Error('No response body');
60
+ *
61
+ * // Chain decompression and tar parsing using streams
62
+ * const entries = response.body
63
+ * .pipeThrough(createGzipDecoder())
64
+ * .pipeThrough(createTarDecoder());
65
+ *
66
+ * for await (const entry of entries) {
67
+ * console.log(`Extracted: ${entry.header.name}`);
68
+ * // Process entry.body ReadableStream as needed
69
+ * }
69
70
  * ```
70
71
  */
71
72
  function createGzipDecoder() {
@@ -82,7 +83,7 @@ function createGzipDecoder() {
82
83
  *
83
84
  * @example
84
85
  * ```typescript
85
- * import { createTarDecoder, createTarOptionsTransformer } from '@modern-tar/core';
86
+ * import { createTarDecoder, createTarOptionsTransformer } from 'modern-tar';
86
87
  *
87
88
  * const transformedStream = sourceStream
88
89
  * .pipeThrough(createTarDecoder())
@@ -237,12 +238,12 @@ const TYPEFLAG = {
237
238
  };
238
239
  /** Reverse mapping from flag characters to type names. */
239
240
  const FLAGTYPE = {
240
- [TYPEFLAG.file]: "file",
241
- [TYPEFLAG.link]: "link",
242
- [TYPEFLAG.symlink]: "symlink",
243
- [TYPEFLAG.directory]: "directory",
244
- [TYPEFLAG["pax-header"]]: "pax-header",
245
- [TYPEFLAG["pax-global-header"]]: "pax-global-header"
241
+ "0": "file",
242
+ "1": "link",
243
+ "2": "symlink",
244
+ "5": "directory",
245
+ x: "pax-header",
246
+ g: "pax-global-header"
246
247
  };
247
248
 
248
249
  //#endregion
@@ -280,58 +281,42 @@ function readOctal(view, offset, size) {
280
281
  const octalString = readString(view, offset, size).trim();
281
282
  return octalString ? Number.parseInt(octalString, 8) : 0;
282
283
  }
283
-
284
- //#endregion
285
- //#region src/web/pack.ts
286
284
  /**
287
- * Creates a 512-byte USTAR format tar header block from a TarHeader object.
285
+ * Reads an entire ReadableStream of Uint8Arrays into a single, combined Uint8Array.
286
+ *
287
+ * The easy way to do this is `new Response(stream).arrayBuffer()`, but we can be more
288
+ * performant by buffering the chunks directly.
288
289
  */
289
- function createTarHeader(header) {
290
- const view = new Uint8Array(BLOCK_SIZE);
291
- const size = header.type === "directory" || header.type === "symlink" || header.type === "link" ? 0 : header.size ?? 0;
292
- let name = header.name;
293
- let prefix = "";
294
- if (name.length > USTAR.name.size) {
295
- let i = name.length;
296
- while (i > 0) {
297
- const slashIndex = name.lastIndexOf("/", i);
298
- if (slashIndex === -1) break;
299
- const p = name.slice(0, slashIndex);
300
- const n = name.slice(slashIndex + 1);
301
- if (p.length <= USTAR.prefix.size && n.length <= USTAR.name.size) {
302
- prefix = p;
303
- name = n;
304
- break;
305
- }
306
- i = slashIndex - 1;
290
+ async function streamToBuffer(stream) {
291
+ const chunks = [];
292
+ const reader = stream.getReader();
293
+ let totalLength = 0;
294
+ try {
295
+ while (true) {
296
+ const { done, value } = await reader.read();
297
+ if (done) break;
298
+ chunks.push(value);
299
+ totalLength += value.length;
307
300
  }
301
+ const result = new Uint8Array(totalLength);
302
+ let offset = 0;
303
+ for (const chunk of chunks) {
304
+ result.set(chunk, offset);
305
+ offset += chunk.length;
306
+ }
307
+ return result;
308
+ } finally {
309
+ reader.releaseLock();
308
310
  }
309
- writeString(view, USTAR.name.offset, USTAR.name.size, name);
310
- writeOctal(view, USTAR.mode.offset, USTAR.mode.size, header.mode ?? (header.type === "directory" ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));
311
- writeOctal(view, USTAR.uid.offset, USTAR.uid.size, header.uid ?? 0);
312
- writeOctal(view, USTAR.gid.offset, USTAR.gid.size, header.gid ?? 0);
313
- writeOctal(view, USTAR.size.offset, USTAR.size.size, size);
314
- writeOctal(view, USTAR.mtime.offset, USTAR.mtime.size, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1e3));
315
- writeString(view, USTAR.typeflag.offset, USTAR.typeflag.size, TYPEFLAG[header.type ?? "file"]);
316
- writeString(view, USTAR.linkname.offset, USTAR.linkname.size, header.linkname);
317
- writeString(view, USTAR.magic.offset, USTAR.magic.size, "ustar\0");
318
- writeString(view, USTAR.version.offset, USTAR.version.size, USTAR_VERSION);
319
- writeString(view, USTAR.uname.offset, USTAR.uname.size, header.uname);
320
- writeString(view, USTAR.gname.offset, USTAR.gname.size, header.gname);
321
- writeString(view, USTAR.prefix.offset, USTAR.prefix.size, prefix);
322
- view.fill(CHECKSUM_SPACE, USTAR.checksum.offset, USTAR.checksum.offset + USTAR.checksum.size);
323
- let checksum = 0;
324
- for (const byte of view) checksum += byte;
325
- const checksumString = `${checksum.toString(8).padStart(6, "0")}\0 `;
326
- writeString(view, USTAR.checksum.offset, USTAR.checksum.size, checksumString);
327
- return view;
328
311
  }
312
+
313
+ //#endregion
314
+ //#region src/web/pack.ts
329
315
  /**
330
316
  * Create a streaming tar packer.
331
317
  *
332
- * This function provides a controller-based API for creating tar archives, suitable
333
- * for scenarios where entries are generated dynamically or when you need control over
334
- * the packing process. The returned [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
318
+ * Provides a controller-based API for creating tar archives, suitable for scenarios where entries are
319
+ * generated dynamically. The returned [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
335
320
  * outputs tar archive bytes as entries are added.
336
321
  *
337
322
  * @returns Object containing the readable stream and controller
@@ -340,7 +325,7 @@ function createTarHeader(header) {
340
325
  *
341
326
  * @example
342
327
  * ```typescript
343
- * import { createTarPacker } from '@modern-tar/core';
328
+ * import { createTarPacker } from 'modern-tar';
344
329
  *
345
330
  * const { readable, controller } = createTarPacker();
346
331
  *
@@ -448,69 +433,73 @@ function createTarPacker() {
448
433
  }
449
434
  };
450
435
  }
451
-
452
- //#endregion
453
- //#region src/web/stream.ts
454
- function parseHeader(block) {
455
- let name = readString(block, USTAR.name.offset, USTAR.name.size);
456
- if (readString(block, USTAR.magic.offset, USTAR.magic.size) === "ustar") {
457
- const prefix = readString(block, USTAR.prefix.offset, USTAR.prefix.size);
458
- if (prefix) name = `${prefix}/${name}`;
459
- }
460
- const typeFlag = readString(block, USTAR.typeflag.offset, USTAR.typeflag.size);
461
- return {
462
- name,
463
- mode: readOctal(block, USTAR.mode.offset, USTAR.mode.size),
464
- uid: readOctal(block, USTAR.uid.offset, USTAR.uid.size),
465
- gid: readOctal(block, USTAR.gid.offset, USTAR.gid.size),
466
- size: readOctal(block, USTAR.size.offset, USTAR.size.size),
467
- mtime: /* @__PURE__ */ new Date(readOctal(block, USTAR.mtime.offset, USTAR.mtime.size) * 1e3),
468
- type: FLAGTYPE[typeFlag] || "file",
469
- linkname: readString(block, USTAR.linkname.offset, USTAR.linkname.size),
470
- uname: readString(block, USTAR.uname.offset, USTAR.uname.size),
471
- gname: readString(block, USTAR.gname.offset, USTAR.gname.size)
472
- };
473
- }
474
- function parsePax(buffer) {
475
- const pax = {};
476
- let offset = 0;
477
- while (offset < buffer.length) {
478
- const spaceIndex = buffer.indexOf(32, offset);
479
- if (spaceIndex === -1) break;
480
- const lengthStr = decoder.decode(buffer.subarray(offset, spaceIndex));
481
- const length = Number.parseInt(lengthStr, 10);
482
- if (!length) break;
483
- const recordEnd = offset + length;
484
- const [key, value] = decoder.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
485
- if (key && value !== void 0) pax[key] = value;
486
- offset = recordEnd;
436
+ /**
437
+ * Creates a 512-byte USTAR format tar header block from a TarHeader object.
438
+ */
439
+ function createTarHeader(header) {
440
+ const view = new Uint8Array(BLOCK_SIZE);
441
+ const size = header.type === "directory" || header.type === "symlink" || header.type === "link" ? 0 : header.size ?? 0;
442
+ let name = header.name;
443
+ let prefix = "";
444
+ if (name.length > USTAR.name.size) {
445
+ let i = name.length;
446
+ while (i > 0) {
447
+ const slashIndex = name.lastIndexOf("/", i);
448
+ if (slashIndex === -1) break;
449
+ const p = name.slice(0, slashIndex);
450
+ const n = name.slice(slashIndex + 1);
451
+ if (p.length <= USTAR.prefix.size && n.length <= USTAR.name.size) {
452
+ prefix = p;
453
+ name = n;
454
+ break;
455
+ }
456
+ i = slashIndex - 1;
457
+ }
487
458
  }
488
- return pax;
489
- }
490
- function applyPax(header, pax) {
491
- header.name = pax.path ?? header.name;
492
- header.linkname = pax.linkpath ?? header.linkname;
493
- if (pax.size) header.size = Number.parseInt(pax.size, 10);
494
- if (pax.mtime) header.mtime = /* @__PURE__ */ new Date(Number.parseFloat(pax.mtime) * 1e3);
495
- if (pax.uid) header.uid = Number.parseInt(pax.uid, 10);
496
- if (pax.gid) header.gid = Number.parseInt(pax.gid, 10);
497
- header.uname = pax.uname ?? header.uname;
498
- header.gname = pax.gname ?? header.gname;
499
- header.pax = pax;
459
+ writeString(view, USTAR.name.offset, USTAR.name.size, name);
460
+ writeOctal(view, USTAR.mode.offset, USTAR.mode.size, header.mode ?? (header.type === "directory" ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));
461
+ writeOctal(view, USTAR.uid.offset, USTAR.uid.size, header.uid ?? 0);
462
+ writeOctal(view, USTAR.gid.offset, USTAR.gid.size, header.gid ?? 0);
463
+ writeOctal(view, USTAR.size.offset, USTAR.size.size, size);
464
+ writeOctal(view, USTAR.mtime.offset, USTAR.mtime.size, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1e3));
465
+ writeString(view, USTAR.typeflag.offset, USTAR.typeflag.size, TYPEFLAG[header.type ?? "file"]);
466
+ writeString(view, USTAR.linkname.offset, USTAR.linkname.size, header.linkname);
467
+ writeString(view, USTAR.magic.offset, USTAR.magic.size, "ustar\0");
468
+ writeString(view, USTAR.version.offset, USTAR.version.size, USTAR_VERSION);
469
+ writeString(view, USTAR.uname.offset, USTAR.uname.size, header.uname);
470
+ writeString(view, USTAR.gname.offset, USTAR.gname.size, header.gname);
471
+ writeString(view, USTAR.prefix.offset, USTAR.prefix.size, prefix);
472
+ view.fill(CHECKSUM_SPACE, USTAR.checksum.offset, USTAR.checksum.offset + USTAR.checksum.size);
473
+ let checksum = 0;
474
+ for (const byte of view) checksum += byte;
475
+ const checksumString = `${checksum.toString(8).padStart(6, "0")}\0 `;
476
+ writeString(view, USTAR.checksum.offset, USTAR.checksum.size, checksumString);
477
+ return view;
500
478
  }
479
+
480
+ //#endregion
481
+ //#region src/web/unpack.ts
501
482
  /**
502
- * Creates a TransformStream that parses a tar archive into ParsedTarEntry objects.
483
+ * Create a transform stream that parses tar bytes into entries.
484
+ *
485
+ * @returns `TransformStream` that converts tar archive bytes to {@link ParsedTarEntry} objects.
486
+ * @example
487
+ * ```typescript
488
+ * import { createTarDecoder } from 'modern-tar';
489
+ *
490
+ * const decoder = createTarDecoder();
491
+ * const entriesStream = tarStream.pipeThrough(decoder);
492
+ *
493
+ * for await (const entry of entriesStream) {
494
+ * console.log(`Entry: ${entry.header.name}`);
495
+ * // Process entry.body stream as needed
496
+ * }
503
497
  */
504
498
  function createTarDecoder() {
505
499
  let buffer = new Uint8Array(0);
506
500
  let currentEntry = null;
507
501
  let pax = null;
508
502
  let paxGlobal = {};
509
- const closeEntryBody = () => {
510
- try {
511
- currentEntry?.controller.close();
512
- } catch {}
513
- };
514
503
  return new TransformStream({
515
504
  transform(chunk, controller) {
516
505
  const combined = new Uint8Array(buffer.length + chunk.length);
@@ -527,7 +516,9 @@ function createTarDecoder() {
527
516
  if (currentEntry.bytesLeft === 0) {
528
517
  const padding = (BLOCK_SIZE - currentEntry.header.size % BLOCK_SIZE) % BLOCK_SIZE;
529
518
  if (remainingBytes - toWrite.length < padding) break;
530
- closeEntryBody();
519
+ try {
520
+ currentEntry?.controller.close();
521
+ } catch {}
531
522
  offset += padding;
532
523
  currentEntry = null;
533
524
  } else break;
@@ -590,6 +581,62 @@ function createTarDecoder() {
590
581
  }
591
582
  });
592
583
  }
584
+ function parseHeader(block) {
585
+ if (!validateChecksum(block)) throw new Error("Invalid tar header checksum");
586
+ let name = readString(block, USTAR.name.offset, USTAR.name.size);
587
+ if (readString(block, USTAR.magic.offset, USTAR.magic.size) === "ustar") {
588
+ const prefix = readString(block, USTAR.prefix.offset, USTAR.prefix.size);
589
+ if (prefix) name = `${prefix}/${name}`;
590
+ }
591
+ const typeFlag = readString(block, USTAR.typeflag.offset, USTAR.typeflag.size);
592
+ return {
593
+ name,
594
+ mode: readOctal(block, USTAR.mode.offset, USTAR.mode.size),
595
+ uid: readOctal(block, USTAR.uid.offset, USTAR.uid.size),
596
+ gid: readOctal(block, USTAR.gid.offset, USTAR.gid.size),
597
+ size: readOctal(block, USTAR.size.offset, USTAR.size.size),
598
+ mtime: /* @__PURE__ */ new Date(readOctal(block, USTAR.mtime.offset, USTAR.mtime.size) * 1e3),
599
+ type: FLAGTYPE[typeFlag] || "file",
600
+ linkname: readString(block, USTAR.linkname.offset, USTAR.linkname.size),
601
+ uname: readString(block, USTAR.uname.offset, USTAR.uname.size),
602
+ gname: readString(block, USTAR.gname.offset, USTAR.gname.size)
603
+ };
604
+ }
605
+ function parsePax(buffer) {
606
+ const pax = {};
607
+ let offset = 0;
608
+ while (offset < buffer.length) {
609
+ const spaceIndex = buffer.indexOf(32, offset);
610
+ if (spaceIndex === -1) break;
611
+ const lengthStr = decoder.decode(buffer.subarray(offset, spaceIndex));
612
+ const length = Number.parseInt(lengthStr, 10);
613
+ if (!length) break;
614
+ const recordEnd = offset + length;
615
+ const [key, value] = decoder.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
616
+ if (key && value !== void 0) pax[key] = value;
617
+ offset = recordEnd;
618
+ }
619
+ return pax;
620
+ }
621
+ function applyPax(header, pax) {
622
+ header.name = pax.path ?? header.name;
623
+ header.linkname = pax.linkpath ?? header.linkname;
624
+ if (pax.size) header.size = Number.parseInt(pax.size, 10);
625
+ if (pax.mtime) header.mtime = /* @__PURE__ */ new Date(Number.parseFloat(pax.mtime) * 1e3);
626
+ if (pax.uid) header.uid = Number.parseInt(pax.uid, 10);
627
+ if (pax.gid) header.gid = Number.parseInt(pax.gid, 10);
628
+ header.uname = pax.uname ?? header.uname;
629
+ header.gname = pax.gname ?? header.gname;
630
+ header.pax = pax;
631
+ }
632
+ function validateChecksum(block) {
633
+ const storedChecksum = readOctal(block, USTAR.checksum.offset, USTAR.checksum.size);
634
+ const blockCopy = new Uint8Array(block);
635
+ blockCopy.fill(CHECKSUM_SPACE, USTAR.checksum.offset, USTAR.checksum.offset + USTAR.checksum.size);
636
+ let calculatedChecksum = 0;
637
+ for (const byte of blockCopy) calculatedChecksum += byte;
638
+ return storedChecksum === calculatedChecksum;
639
+ }
593
640
 
594
641
  //#endregion
595
642
  //#region src/web/helpers.ts
@@ -602,7 +649,7 @@ function createTarDecoder() {
602
649
  * @returns A `Promise` that resolves to the complete tar archive as a Uint8Array
603
650
  * @example
604
651
  * ```typescript
605
- * import { packTar } from '@modern-tar/core';
652
+ * import { packTar } from 'modern-tar';
606
653
  *
607
654
  * const entries = [
608
655
  * {
@@ -642,16 +689,18 @@ async function packTar(entries) {
642
689
  else if (body instanceof Blob) await body.stream().pipeTo(entryStream);
643
690
  else {
644
691
  let chunk;
645
- if (typeof body === "string") chunk = encoder.encode(body);
692
+ if (body === null || body === void 0) chunk = new Uint8Array(0);
646
693
  else if (body instanceof Uint8Array) chunk = body;
647
- else chunk = new Uint8Array(body);
694
+ else if (body instanceof ArrayBuffer) chunk = new Uint8Array(body);
695
+ else if (typeof body === "string") chunk = encoder.encode(body);
696
+ else throw new TypeError(`Unsupported content type for entry "${entry.header.name}". Expected string, Uint8Array, ArrayBuffer, Blob, ReadableStream, or undefined.`);
648
697
  const writer = entryStream.getWriter();
649
698
  await writer.write(chunk);
650
699
  await writer.close();
651
700
  }
652
701
  }
653
702
  })().then(() => controller.finalize()).catch((err) => controller.error(err));
654
- const buffer = await new Response(readable).arrayBuffer();
703
+ const buffer = await streamToBuffer(readable);
655
704
  await packingPromise;
656
705
  return new Uint8Array(buffer);
657
706
  }
@@ -665,7 +714,7 @@ async function packTar(entries) {
665
714
  * @returns A `Promise` that resolves to an array of entries with buffered data
666
715
  * @example
667
716
  * ```typescript
668
- * import { unpackTar } from '@modern-tar/core';
717
+ * import { unpackTar } from '@modern-tar';
669
718
  *
670
719
  * // From a file upload or fetch
671
720
  * const response = await fetch('/api/archive.tar');
@@ -709,7 +758,7 @@ async function unpackTar(archive, options = {}) {
709
758
  while (true) {
710
759
  const { done, value: entry } = await reader.read();
711
760
  if (done) break;
712
- const data = new Uint8Array(await new Response(entry.body).arrayBuffer());
761
+ const data = await streamToBuffer(entry.body);
713
762
  results.push({
714
763
  header: entry.header,
715
764
  data
@@ -722,4 +771,4 @@ async function unpackTar(archive, options = {}) {
722
771
  }
723
772
 
724
773
  //#endregion
725
- export { BLOCK_SIZE, createGzipDecoder, createGzipEncoder, createTarDecoder, createTarHeader, createTarOptionsTransformer, createTarPacker, packTar, unpackTar };
774
+ export { BLOCK_SIZE, createGzipDecoder, createGzipEncoder, createTarDecoder, createTarHeader, createTarOptionsTransformer, createTarPacker, encoder, packTar, unpackTar };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "modern-tar",
3
- "version": "0.2.2",
3
+ "version": "0.2.3",
4
4
  "description": "Zero dependency streaming tar parser and writer for JavaScript.",
5
5
  "author": "Ayuhito <hello@ayuhito.com>",
6
6
  "license": "MIT",