modern-tar 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +442 -0
- package/dist/fs/index.d.ts +102 -0
- package/dist/fs/index.js +193 -0
- package/dist/index-BLp7i3zL.d.ts +369 -0
- package/dist/web/index.d.ts +2 -0
- package/dist/web/index.js +3 -0
- package/dist/web-BsjPG7md.js +681 -0
- package/package.json +55 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Ayuhito
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,442 @@
|
|
|
1
|
+
# 🗄️ modern-tar
|
|
2
|
+
|
|
3
|
+
Zero-dependency, cross-platform, streaming tar archive library for every JavaScript runtime. Built with the browser-native Web Streams API for performance and memory efficiency.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- 🚀 **Streaming Architecture** - Supports large archives without loading everything into memory.
|
|
8
|
+
- 📋 **Standards Compliant** - Full USTAR format support with PAX extensions. Compatible with GNU tar, BSD tar, and other standard implementations.
|
|
9
|
+
- 🗜️ **Compression** - Includes helpers for gzip compression/decompression.
|
|
10
|
+
- 📝 **TypeScript First** - Full type safety with detailed TypeDoc documentation.
|
|
11
|
+
- ⚡ **Zero Dependencies** - No external dependencies, minimal bundle size.
|
|
12
|
+
- 🌐 **Cross-Platform** - Works in browsers, Node.js, Cloudflare Workers, and other JavaScript runtimes.
|
|
13
|
+
- 📁 **Node.js Integration** - Additional high-level APIs for directory packing and extraction.
|
|
14
|
+
|
|
15
|
+
## Table of Contents
|
|
16
|
+
|
|
17
|
+
- [Installation](#installation)
|
|
18
|
+
- [Usage](#usage)
|
|
19
|
+
- [Core Usage (Browser, Node.js, etc.)](#core-usage)
|
|
20
|
+
- [Node.js Filesystem Usage](#nodejs-filesystem-usage)
|
|
21
|
+
- [API Reference](#api-reference)
|
|
22
|
+
- [Core API (`modern-tar`)](#core-api-modern-tar)
|
|
23
|
+
- [Node.js Filesystem API (`modern-tar/fs`)](#nodejs-filesystem-api-modern-tarfs)
|
|
24
|
+
- [Types](#types)
|
|
25
|
+
- [Core Types](#core-types)
|
|
26
|
+
- [Filesystem Types](#filesystem-types)
|
|
27
|
+
- [Compatibility](#compatibility)
|
|
28
|
+
|
|
29
|
+
## Installation
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
npm install modern-tar
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Usage
|
|
36
|
+
|
|
37
|
+
This package provides two entry points:
|
|
38
|
+
|
|
39
|
+
- `modern-tar`: The core, cross-platform streaming API (works everywhere).
|
|
40
|
+
- `modern-tar/fs`: High-level filesystem utilities for Node.js.
|
|
41
|
+
|
|
42
|
+
### Core Usage
|
|
43
|
+
|
|
44
|
+
These APIs use the Web Streams API and can be used in any modern JavaScript environment.
|
|
45
|
+
|
|
46
|
+
#### Simple
|
|
47
|
+
|
|
48
|
+
```typescript
|
|
49
|
+
import { packTar, unpackTar } from 'modern-tar';
|
|
50
|
+
|
|
51
|
+
// Pack entries into a tar buffer
|
|
52
|
+
const entries = [
|
|
53
|
+
{ header: { name: "file.txt", size: 5 }, body: "hello" },
|
|
54
|
+
{ header: { name: "dir/", type: "directory", size: 0 } },
|
|
55
|
+
{ header: { name: "dir/nested.txt", size: 3 }, body: new Uint8Array([97, 98, 99]) } // "abc"
|
|
56
|
+
];
|
|
57
|
+
|
|
58
|
+
// Accepts string, Uint8Array, Blob, ReadableStream<Uint8Array> and more...
|
|
59
|
+
const tarBuffer = await packTar(entries);
|
|
60
|
+
|
|
61
|
+
// Unpack tar buffer into entries
|
|
62
|
+
for await (const entry of unpackTar(tarBuffer)) {
|
|
63
|
+
console.log(`File: ${entry.header.name}`);
|
|
64
|
+
const content = new TextDecoder().decode(entry.data);
|
|
65
|
+
console.log(`Content: ${content}`);
|
|
66
|
+
}
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
#### Streaming
|
|
70
|
+
|
|
71
|
+
```typescript
|
|
72
|
+
import { createTarPacker, createTarDecoder } from 'modern-tar';
|
|
73
|
+
|
|
74
|
+
// Create a tar packer
|
|
75
|
+
const { readable, controller } = createTarPacker();
|
|
76
|
+
|
|
77
|
+
// Add entries dynamically
|
|
78
|
+
const fileStream = controller.add({
|
|
79
|
+
name: "dynamic.txt",
|
|
80
|
+
size: 5,
|
|
81
|
+
type: "file"
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
// Write content to the stream
|
|
85
|
+
const writer = fileStream.getWriter();
|
|
86
|
+
await writer.write(new TextEncoder().encode("hello"));
|
|
87
|
+
await writer.close();
|
|
88
|
+
|
|
89
|
+
// When done adding entries, finalize the archive
|
|
90
|
+
controller.finalize();
|
|
91
|
+
|
|
92
|
+
// `readable` now contains the complete tar archive which can be piped or processed
|
|
93
|
+
const tarStream = readable;
|
|
94
|
+
|
|
95
|
+
// Create a tar decoder
|
|
96
|
+
const decoder = createTarDecoder();
|
|
97
|
+
const decodedStream = tarStream.pipeThrough(decoder);
|
|
98
|
+
for await (const entry of decodedStream) {
|
|
99
|
+
console.log(`Decoded: ${entry.header.name}`);
|
|
100
|
+
// Process `entry.body` stream as needed
|
|
101
|
+
}
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
#### Compression/Decompression (gzip)
|
|
105
|
+
|
|
106
|
+
```typescript
|
|
107
|
+
import { createGzipDecoder, unpackTar } from 'modern-tar';
|
|
108
|
+
|
|
109
|
+
// Fetch a .tar.gz file stream
|
|
110
|
+
const response = await fetch('https://example.com/archive.tar.gz');
|
|
111
|
+
if (!response.body) throw new Error('No response body');
|
|
112
|
+
|
|
113
|
+
// Decompress .tar.gz to .tar stream
|
|
114
|
+
const tarStream = response.body.pipeThrough(createGzipDecoder());
|
|
115
|
+
|
|
116
|
+
// Use `unpackTar` for buffered extraction or `createTarDecoder` for streaming
|
|
117
|
+
for await (const entry of unpackTar(tarStream)) {
|
|
118
|
+
console.log(`Extracted: ${entry.header.name}`);
|
|
119
|
+
const content = new TextDecoder().decode(entry.data);
|
|
120
|
+
console.log(`Content: ${content}`);
|
|
121
|
+
}
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
### Node.js Filesystem Usage
|
|
125
|
+
|
|
126
|
+
These APIs use Node.js streams when interacting with the local filesystem.
|
|
127
|
+
|
|
128
|
+
#### Simple
|
|
129
|
+
|
|
130
|
+
```typescript
|
|
131
|
+
import { packTar, unpackTar } from 'modern-tar/fs';
|
|
132
|
+
import { createWriteStream, createReadStream } from 'node:fs';
|
|
133
|
+
import { pipeline } from 'node:stream/promises';
|
|
134
|
+
|
|
135
|
+
// Pack a directory into a tar file
|
|
136
|
+
const tarStream = packTar('./my/project');
|
|
137
|
+
const fileStream = createWriteStream('./project.tar');
|
|
138
|
+
await pipeline(tarStream, fileStream);
|
|
139
|
+
|
|
140
|
+
// Extract a tar file to a directory
|
|
141
|
+
const tarReadStream = createReadStream('./project.tar');
|
|
142
|
+
const extractStream = unpackTar('./output/directory');
|
|
143
|
+
await pipeline(tarReadStream, extractStream);
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
#### Filtering and Transformation
|
|
147
|
+
|
|
148
|
+
```typescript
|
|
149
|
+
import { packTar, unpackTar } from 'modern-tar/fs';
|
|
150
|
+
import { createReadStream } from 'node:fs';
|
|
151
|
+
import { pipeline } from 'node:stream/promises';
|
|
152
|
+
|
|
153
|
+
// Pack with filtering
|
|
154
|
+
const packStream = packTar('./my/project', {
|
|
155
|
+
filter: (filePath, stats) => !filePath.includes('node_modules'),
|
|
156
|
+
map: (header) => ({ ...header, mode: 0o644 }), // Set all files to 644
|
|
157
|
+
dereference: true // Follow symlinks instead of archiving them
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
// Unpack with advanced options
|
|
161
|
+
const sourceStream = createReadStream('./archive.tar');
|
|
162
|
+
const extractStream = unpackTar('./output', {
|
|
163
|
+
// Core options
|
|
164
|
+
strip: 1, // Remove first directory level
|
|
165
|
+
filter: (header) => header.name.endsWith('.js'), // Only extract JS files
|
|
166
|
+
map: (header) => ({ ...header, name: header.name.toLowerCase() }), // Transform names
|
|
167
|
+
|
|
168
|
+
// Filesystem-specific options
|
|
169
|
+
fmode: 0o644, // Override file permissions
|
|
170
|
+
dmode: 0o755 // Override directory permissions
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
await pipeline(sourceStream, extractStream);
|
|
174
|
+
```
|
|
175
|
+
|
|
176
|
+
## API Reference
|
|
177
|
+
|
|
178
|
+
### Core API (`modern-tar`)
|
|
179
|
+
|
|
180
|
+
#### `packTar(entries: TarEntry[]): Promise<Uint8Array>`
|
|
181
|
+
|
|
182
|
+
Pack an array of entries into a tar archive buffer.
|
|
183
|
+
|
|
184
|
+
- **`entries`**: Array of `TarEntry` objects to pack.
|
|
185
|
+
- **Returns**: Promise resolving to a complete tar archive as a `Uint8Array`.
|
|
186
|
+
|
|
187
|
+
**Example:**
|
|
188
|
+
|
|
189
|
+
```typescript
|
|
190
|
+
const entries = [
|
|
191
|
+
{ header: { name: "file.txt", size: 5 }, body: "hello" },
|
|
192
|
+
{ header: { name: "dir/", type: "directory", size: 0 } }
|
|
193
|
+
];
|
|
194
|
+
const tarBuffer = await packTar(entries);
|
|
195
|
+
```
|
|
196
|
+
|
|
197
|
+
#### `unpackTar(archive: ArrayBuffer | Uint8Array, options?: UnpackOptions): Promise<ParsedTarEntryWithData[]>`
|
|
198
|
+
|
|
199
|
+
Extract all entries from a tar archive buffer with optional filtering and transformation.
|
|
200
|
+
|
|
201
|
+
- **`archive`**: Complete tar archive as `ArrayBuffer` or `Uint8Array`.
|
|
202
|
+
- **`options`**: Optional extraction configuration (see `UnpackOptions`).
|
|
203
|
+
- **Returns**: Promise resolving to an array of entries with buffered data.
|
|
204
|
+
|
|
205
|
+
**Example:**
|
|
206
|
+
|
|
207
|
+
```typescript
|
|
208
|
+
// With filtering and path manipulation
|
|
209
|
+
const filteredEntries = await unpackTar(tarBuffer, {
|
|
210
|
+
strip: 1, // Remove first path component
|
|
211
|
+
filter: (header) => header.name.endsWith('.js'),
|
|
212
|
+
map: (header) => ({ ...header, name: header.name.toLowerCase() })
|
|
213
|
+
});
|
|
214
|
+
```
|
|
215
|
+
|
|
216
|
+
#### `createTarPacker(): { readable, controller }`
|
|
217
|
+
|
|
218
|
+
Create a streaming tar packer for dynamic entry creation.
|
|
219
|
+
|
|
220
|
+
- **Returns**: An object containing:
|
|
221
|
+
- `readable` - `ReadableStream` outputting tar archive bytes.
|
|
222
|
+
- `controller` - `TarPackController` for adding entries.
|
|
223
|
+
|
|
224
|
+
**Example:**
|
|
225
|
+
|
|
226
|
+
```typescript
|
|
227
|
+
const { readable, controller } = createTarPacker();
|
|
228
|
+
|
|
229
|
+
// Add entries dynamically
|
|
230
|
+
const stream1 = controller.add({ name: "file1.txt", size: 5 });
|
|
231
|
+
const stream2 = controller.add({ name: "file2.txt", size: 4 });
|
|
232
|
+
|
|
233
|
+
// Write content to streams and finalize
|
|
234
|
+
// ...
|
|
235
|
+
controller.finalize();
|
|
236
|
+
```
|
|
237
|
+
|
|
238
|
+
#### `createTarDecoder(): TransformStream<Uint8Array, ParsedTarEntry>`
|
|
239
|
+
|
|
240
|
+
Create a transform stream that parses tar bytes into entries.
|
|
241
|
+
|
|
242
|
+
- **Returns**: `TransformStream` that converts tar archive bytes to `ParsedTarEntry` objects.
|
|
243
|
+
|
|
244
|
+
**Example:**
|
|
245
|
+
|
|
246
|
+
```typescript
|
|
247
|
+
const decoder = createTarDecoder();
|
|
248
|
+
const entriesStream = tarStream.pipeThrough(decoder);
|
|
249
|
+
|
|
250
|
+
for await (const entry of entriesStream) {
|
|
251
|
+
console.log(`Entry: ${entry.header.name}`);
|
|
252
|
+
// Process entry.body stream as needed
|
|
253
|
+
}
|
|
254
|
+
```
|
|
255
|
+
|
|
256
|
+
#### `createTarOptionsTransformer(options?: UnpackOptions): TransformStream<ParsedTarEntry, ParsedTarEntry>`
|
|
257
|
+
|
|
258
|
+
Create a transform stream that applies unpacking options (`strip`, `filter`, `map`) to tar entries.
|
|
259
|
+
|
|
260
|
+
- **`options`**: Optional unpacking configuration (see `UnpackOptions`).
|
|
261
|
+
- **Returns**: `TransformStream` that processes `ParsedTarEntry` objects.
|
|
262
|
+
|
|
263
|
+
**Example:**
|
|
264
|
+
|
|
265
|
+
```typescript
|
|
266
|
+
import { createTarDecoder, createTarOptionsTransformer } from 'modern-tar';
|
|
267
|
+
|
|
268
|
+
const transformedStream = sourceStream
|
|
269
|
+
.pipeThrough(createTarDecoder())
|
|
270
|
+
.pipeThrough(createTarOptionsTransformer({
|
|
271
|
+
strip: 1,
|
|
272
|
+
filter: (header) => header.name.endsWith('.txt'),
|
|
273
|
+
}));
|
|
274
|
+
```
|
|
275
|
+
|
|
276
|
+
#### `createGzipEncoder(): CompressionStream`
|
|
277
|
+
|
|
278
|
+
Create a gzip compression stream for `.tar.gz` creation.
|
|
279
|
+
|
|
280
|
+
**Example:**
|
|
281
|
+
|
|
282
|
+
```typescript
|
|
283
|
+
const tarStream = /* ... */;
|
|
284
|
+
const compressedStream = tarStream.pipeThrough(createGzipEncoder());
|
|
285
|
+
```
|
|
286
|
+
|
|
287
|
+
#### `createGzipDecoder(): DecompressionStream`
|
|
288
|
+
|
|
289
|
+
Create a gzip decompression stream for `.tar.gz` extraction.
|
|
290
|
+
|
|
291
|
+
**Example:**
|
|
292
|
+
|
|
293
|
+
```typescript
|
|
294
|
+
const gzipStream = /* ... */;
|
|
295
|
+
const tarStream = gzipStream.pipeThrough(createGzipDecoder());
|
|
296
|
+
```
|
|
297
|
+
|
|
298
|
+
### Node.js Filesystem API (`modern-tar/fs`)
|
|
299
|
+
|
|
300
|
+
#### `packTar(directoryPath: string, options?: PackOptionsFS): Readable`
|
|
301
|
+
|
|
302
|
+
Pack a directory into a Node.js Readable stream containing tar archive bytes.
|
|
303
|
+
|
|
304
|
+
- **`directoryPath`**: Path to the directory to pack.
|
|
305
|
+
- **`options`**: Optional packing configuration (see `PackOptionsFS`).
|
|
306
|
+
- **Returns**: Node.js `Readable` stream of tar archive bytes.
|
|
307
|
+
|
|
308
|
+
**Example:**
|
|
309
|
+
|
|
310
|
+
```typescript
|
|
311
|
+
import { packTar } from 'modern-tar/fs';
|
|
312
|
+
|
|
313
|
+
const tarStream = packTar('/home/user/project', {
|
|
314
|
+
dereference: true, // Follow symlinks
|
|
315
|
+
filter: (path, stats) => !path.includes('tmp'),
|
|
316
|
+
});
|
|
317
|
+
```
|
|
318
|
+
|
|
319
|
+
#### `unpackTar(directoryPath: string, options?: UnpackOptionsFS): Writable`
|
|
320
|
+
|
|
321
|
+
Extract a tar archive to a directory.
|
|
322
|
+
|
|
323
|
+
- **`directoryPath`**: Path to the directory where files will be extracted.
|
|
324
|
+
- **`options`**: Optional extraction configuration (see `UnpackOptionsFS`).
|
|
325
|
+
- **Returns**: Node.js `Writable` stream to pipe tar archive bytes into.
|
|
326
|
+
|
|
327
|
+
**Example:**
|
|
328
|
+
|
|
329
|
+
```typescript
|
|
330
|
+
import { unpackTar } from 'modern-tar/fs';
|
|
331
|
+
import { createReadStream } from 'node:fs';
|
|
332
|
+
import { pipeline } from 'node:stream/promises';
|
|
333
|
+
|
|
334
|
+
const tarStream = createReadStream('backup.tar');
|
|
335
|
+
const extractStream = unpackTar('/restore/location', {
|
|
336
|
+
strip: 1,
|
|
337
|
+
fmode: 0o644, // Set consistent file permissions
|
|
338
|
+
});
|
|
339
|
+
await pipeline(tarStream, extractStream);
|
|
340
|
+
```
|
|
341
|
+
|
|
342
|
+
## Types
|
|
343
|
+
|
|
344
|
+
### Core Types
|
|
345
|
+
|
|
346
|
+
```typescript
|
|
347
|
+
// Header information for a tar entry
|
|
348
|
+
interface TarHeader {
|
|
349
|
+
name: string; // File/directory name
|
|
350
|
+
size: number; // File size in bytes
|
|
351
|
+
mtime?: Date; // Modification time
|
|
352
|
+
mode?: number; // File permissions (e.g., 0o644)
|
|
353
|
+
type?: "file" | "directory" | "symlink" | "link" | "pax-header" | "pax-global-header";
|
|
354
|
+
linkname?: string; // Target for symlinks/hardlinks
|
|
355
|
+
uid?: number; // User ID
|
|
356
|
+
gid?: number; // Group ID
|
|
357
|
+
uname?: string; // User name
|
|
358
|
+
gname?: string; // Group name
|
|
359
|
+
pax?: Record<string, string>; // PAX extended attributes
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
// Input entry for packing functions
|
|
363
|
+
interface TarEntry {
|
|
364
|
+
header: TarHeader;
|
|
365
|
+
body?: string | Uint8Array | ArrayBuffer | ReadableStream<Uint8Array> | Blob | null;
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// Output entry from a streaming decoder
|
|
369
|
+
interface ParsedTarEntry {
|
|
370
|
+
header: TarHeader;
|
|
371
|
+
body: ReadableStream<Uint8Array>;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
// Output entry from a buffered unpack function
|
|
375
|
+
interface ParsedTarEntryWithData {
|
|
376
|
+
header: TarHeader;
|
|
377
|
+
data: Uint8Array;
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
// Platform-neutral configuration for unpacking
|
|
381
|
+
interface UnpackOptions {
|
|
382
|
+
/** Number of leading path components to strip from entry names (e.g., strip: 1 removes first directory) */
|
|
383
|
+
strip?: number;
|
|
384
|
+
/** Filter function to include/exclude entries (return false to skip) */
|
|
385
|
+
filter?: (header: TarHeader) => boolean;
|
|
386
|
+
/** Transform function to modify tar headers before extraction */
|
|
387
|
+
map?: (header: TarHeader) => TarHeader;
|
|
388
|
+
}
|
|
389
|
+
```
|
|
390
|
+
|
|
391
|
+
### Filesystem Types
|
|
392
|
+
|
|
393
|
+
```typescript
|
|
394
|
+
interface PackOptionsFS {
|
|
395
|
+
/** Follow symlinks instead of archiving them as symlinks (default: false) */
|
|
396
|
+
dereference?: boolean;
|
|
397
|
+
/** Filter function to determine which files to include (uses Node.js fs.Stats) */
|
|
398
|
+
filter?: (path: string, stat: Stats) => boolean;
|
|
399
|
+
/** Transform function to modify headers before packing */
|
|
400
|
+
map?: (header: TarHeader) => TarHeader;
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
interface UnpackOptionsFS extends UnpackOptions {
|
|
404
|
+
// Inherited from UnpackOptions (platform-neutral):
|
|
405
|
+
/** Number of leading path components to strip from entry names */
|
|
406
|
+
strip?: number;
|
|
407
|
+
/** Filter function to determine which entries to extract */
|
|
408
|
+
filter?: (header: TarHeader) => boolean;
|
|
409
|
+
/** Transform function to modify headers before extraction */
|
|
410
|
+
map?: (header: TarHeader) => TarHeader;
|
|
411
|
+
|
|
412
|
+
// Filesystem-specific options:
|
|
413
|
+
/** Default mode for created directories (e.g., 0o755). Overrides tar header mode */
|
|
414
|
+
dmode?: number;
|
|
415
|
+
/** Default mode for created files (e.g., 0o644). Overrides tar header mode */
|
|
416
|
+
fmode?: number;
|
|
417
|
+
/**
|
|
418
|
+
* Prevent symlinks from pointing outside the extraction directory.
|
|
419
|
+
* @default true
|
|
420
|
+
*/
|
|
421
|
+
validateSymlinks?: boolean;
|
|
422
|
+
}
|
|
423
|
+
```
|
|
424
|
+
|
|
425
|
+
## Compatibility
|
|
426
|
+
|
|
427
|
+
The core library uses the [Web Streams API](https://caniuse.com/streams) and requires:
|
|
428
|
+
|
|
429
|
+
- **Node.js**: 18.0+
|
|
430
|
+
- **Browsers**: Modern browsers with Web Streams support
|
|
431
|
+
- Chrome 71+
|
|
432
|
+
- Firefox 102+
|
|
433
|
+
- Safari 14.1+
|
|
434
|
+
- Edge 79+
|
|
435
|
+
|
|
436
|
+
## Acknowledgements
|
|
437
|
+
|
|
438
|
+
- [`tar-stream`](https://github.com/mafintosh/tar-stream) and [`tar-fs`](https://github.com/mafintosh/tar-fs) - For the inspiration and test fixtures.
|
|
439
|
+
|
|
440
|
+
## License
|
|
441
|
+
|
|
442
|
+
MIT
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { TarHeader, UnpackOptions } from "../index-BLp7i3zL.js";
|
|
2
|
+
import { Stats } from "node:fs";
|
|
3
|
+
import { Readable, Writable } from "node:stream";
|
|
4
|
+
|
|
5
|
+
//#region src/fs/pack.d.ts
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Filesystem-specific configuration options for packing directories into tar archives.
|
|
9
|
+
*
|
|
10
|
+
* These options are specific to Node.js filesystem operations and use Node.js-specific
|
|
11
|
+
* types like `Stats` for file system metadata.
|
|
12
|
+
*/
|
|
13
|
+
interface PackOptionsFS {
|
|
14
|
+
/** Follow symlinks instead of storing them as symlinks (default: false) */
|
|
15
|
+
dereference?: boolean;
|
|
16
|
+
/** Filter function to include/exclude files (return false to exclude) */
|
|
17
|
+
filter?: (path: string, stat: Stats) => boolean;
|
|
18
|
+
/** Transform function to modify tar headers before packing */
|
|
19
|
+
map?: (header: TarHeader) => TarHeader;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Pack a directory into a Node.js [`Readable`](https://nodejs.org/api/stream.html#class-streamreadable) stream containing tar archive bytes.
|
|
23
|
+
*
|
|
24
|
+
* Recursively walks the directory structure and creates tar entries for files, directories,
|
|
25
|
+
* symlinks, and hardlinks.
|
|
26
|
+
*
|
|
27
|
+
* @param directoryPath - Path to directory to pack
|
|
28
|
+
* @param options - Optional packing configuration
|
|
29
|
+
* @returns Node.js [`Readable`](https://nodejs.org/api/stream.html#class-streamreadable) stream of tar archive bytes
|
|
30
|
+
*
|
|
31
|
+
* @example
|
|
32
|
+
* ```typescript
|
|
33
|
+
* import { packTar } from '@modern-tar/fs';
|
|
34
|
+
* import { createWriteStream } from 'node:fs';
|
|
35
|
+
* import { pipeline } from 'node:stream/promises';
|
|
36
|
+
*
|
|
37
|
+
* // Basic directory packing
|
|
38
|
+
* const tarStream = packTar('/home/user/project');
|
|
39
|
+
* await pipeline(tarStream, createWriteStream('project.tar'));
|
|
40
|
+
*
|
|
41
|
+
* // With filtering and transformation
|
|
42
|
+
* const filteredStream = packTar('/my/project', {
|
|
43
|
+
* filter: (path, stats) => !path.includes('node_modules'),
|
|
44
|
+
* map: (header) => ({ ...header, uname: 'builder' }),
|
|
45
|
+
* dereference: true // Follow symlinks
|
|
46
|
+
* });
|
|
47
|
+
* ```
|
|
48
|
+
*/
|
|
49
|
+
declare function packTar(directoryPath: string, options?: PackOptionsFS): Readable;
|
|
50
|
+
//#endregion
|
|
51
|
+
//#region src/fs/unpack.d.ts
|
|
52
|
+
/**
|
|
53
|
+
* Filesystem-specific configuration options for extracting tar archives to the filesystem.
|
|
54
|
+
*
|
|
55
|
+
* Extends the core {@link UnpackOptions} with Node.js filesystem-specific settings
|
|
56
|
+
* for controlling file permissions and other filesystem behaviors.
|
|
57
|
+
*/
|
|
58
|
+
interface UnpackOptionsFS extends UnpackOptions {
|
|
59
|
+
/** Default mode for created directories (e.g., 0o755). If not specified, uses mode from tar header or system default */
|
|
60
|
+
dmode?: number;
|
|
61
|
+
/** Default mode for created files (e.g., 0o644). If not specified, uses mode from tar header or system default */
|
|
62
|
+
fmode?: number;
|
|
63
|
+
/**
|
|
64
|
+
* Prevent symlinks from pointing outside the extraction directory.
|
|
65
|
+
* @default true
|
|
66
|
+
*/
|
|
67
|
+
validateSymlinks?: boolean;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Extract a tar archive to a directory.
|
|
71
|
+
*
|
|
72
|
+
* Returns a Node.js [`Writable`](https://nodejs.org/api/stream.html#class-streamwritable)
|
|
73
|
+
* stream to pipe tar archive bytes into. Files, directories, symlinks, and hardlinks
|
|
74
|
+
* are written to the filesystem with correct permissions and timestamps.
|
|
75
|
+
*
|
|
76
|
+
* @param directoryPath - Path to directory where files will be extracted
|
|
77
|
+
* @param options - Optional extraction configuration
|
|
78
|
+
* @returns Node.js [`Writable`](https://nodejs.org/api/stream.html#class-streamwritable) stream to pipe tar archive bytes into
|
|
79
|
+
*
|
|
80
|
+
* @example
|
|
81
|
+
* ```typescript
|
|
82
|
+
* import { unpackTar } from '@modern-tar/fs';
|
|
83
|
+
* import { createReadStream } from 'node:fs';
|
|
84
|
+
* import { pipeline } from 'node:stream/promises';
|
|
85
|
+
*
|
|
86
|
+
* // Basic extraction
|
|
87
|
+
* const tarStream = createReadStream('project.tar');
|
|
88
|
+
* const extractStream = unpackTar('/output/directory');
|
|
89
|
+
* await pipeline(tarStream, extractStream);
|
|
90
|
+
*
|
|
91
|
+
* // Extract with path manipulation and filtering
|
|
92
|
+
* const advancedStream = unpackTar('/output', {
|
|
93
|
+
* strip: 1, // Remove first path component
|
|
94
|
+
* filter: (header) => header.type === 'file' && header.name.endsWith('.js'),
|
|
95
|
+
* map: (header) => ({ ...header, mode: 0o644 })
|
|
96
|
+
* });
|
|
97
|
+
* await pipeline(createReadStream('archive.tar'), advancedStream);
|
|
98
|
+
* ```
|
|
99
|
+
*/
|
|
100
|
+
declare function unpackTar(directoryPath: string, options?: UnpackOptionsFS): Writable;
|
|
101
|
+
//#endregion
|
|
102
|
+
export { type PackOptionsFS, type UnpackOptionsFS, packTar, unpackTar };
|