modern-tar 0.4.1 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -3
- package/dist/fs/index.d.ts +38 -31
- package/dist/fs/index.js +185 -131
- package/dist/{types-D-xPQp4Z.d.ts → types-Dc3p5B3s.d.ts} +2 -26
- package/dist/{unpacker-DBTDVhe4.js → unpacker-yB6Ahxxk.js} +19 -28
- package/dist/web/index.d.ts +27 -1
- package/dist/web/index.js +1 -1
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -197,7 +197,7 @@ await pipeline(sourceStream, extractStream);
|
|
|
197
197
|
#### Archive Creation
|
|
198
198
|
|
|
199
199
|
```typescript
|
|
200
|
-
import {
|
|
200
|
+
import { packTar, type TarSource } from 'modern-tar/fs';
|
|
201
201
|
import { createWriteStream } from 'node:fs';
|
|
202
202
|
import { pipeline } from 'node:stream/promises';
|
|
203
203
|
|
|
@@ -205,11 +205,14 @@ import { pipeline } from 'node:stream/promises';
|
|
|
205
205
|
const sources: TarSource[] = [
|
|
206
206
|
{ type: 'file', source: './package.json', target: 'project/package.json' },
|
|
207
207
|
{ type: 'directory', source: './src', target: 'project/src' },
|
|
208
|
+
|
|
208
209
|
{ type: 'content', content: 'Hello World!', target: 'project/hello.txt' },
|
|
209
|
-
{ type: 'content', content: '#!/bin/bash\necho "Executable"', target: 'bin/script.sh', mode: 0o755 }
|
|
210
|
+
{ type: 'content', content: '#!/bin/bash\necho "Executable"', target: 'bin/script.sh', mode: 0o755 },
|
|
211
|
+
{ type: 'stream', content: createReadStream('./large-file.bin'), target: 'project/data.bin', size: 1048576 },
|
|
212
|
+
{ type: 'stream', content: fetch('/api/data').then(r => r.body!), target: 'project/remote.json', size: 2048 }
|
|
210
213
|
];
|
|
211
214
|
|
|
212
|
-
const archiveStream =
|
|
215
|
+
const archiveStream = packTar(sources);
|
|
213
216
|
await pipeline(archiveStream, createWriteStream('project.tar'));
|
|
214
217
|
```
|
|
215
218
|
|
package/dist/fs/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { TarEntryData, TarHeader, UnpackOptions } from "../types-
|
|
2
|
-
import { Stats } from "node:fs";
|
|
1
|
+
import { TarEntryData, TarHeader, UnpackOptions } from "../types-Dc3p5B3s.js";
|
|
3
2
|
import { Readable, Writable } from "node:stream";
|
|
3
|
+
import { Stats } from "node:fs";
|
|
4
4
|
|
|
5
5
|
//#region src/fs/types.d.ts
|
|
6
6
|
|
|
@@ -19,6 +19,11 @@ interface PackOptionsFS {
|
|
|
19
19
|
map?: (header: TarHeader) => TarHeader;
|
|
20
20
|
/** Base directory for symlink security validation, when `dereference` is set to true. */
|
|
21
21
|
baseDir?: string;
|
|
22
|
+
/**
|
|
23
|
+
* Maximum number of concurrent filesystem operations during packing.
|
|
24
|
+
* @default os.cpus().length || 8
|
|
25
|
+
*/
|
|
26
|
+
concurrency?: number;
|
|
22
27
|
}
|
|
23
28
|
/**
|
|
24
29
|
* Filesystem-specific configuration options for extracting tar archives to the filesystem.
|
|
@@ -61,49 +66,42 @@ interface DirectorySource {
|
|
|
61
66
|
/** Destination path for the directory inside the tar archive. */
|
|
62
67
|
target: string;
|
|
63
68
|
}
|
|
64
|
-
/** Describes raw content to be added to the archive.
|
|
69
|
+
/** Describes raw, buffered content to be added to the archive. */
|
|
65
70
|
interface ContentSource {
|
|
66
71
|
type: "content";
|
|
67
|
-
/** Raw content to add. Supports string, Uint8Array, ArrayBuffer,
|
|
72
|
+
/** Raw content to add. Supports string, Uint8Array, ArrayBuffer, Blob, or null. */
|
|
68
73
|
content: TarEntryData;
|
|
69
74
|
/** Destination path for the content inside the tar archive. */
|
|
70
75
|
target: string;
|
|
71
76
|
/** Optional Unix file permissions for the entry (e.g., 0o644). */
|
|
72
77
|
mode?: number;
|
|
73
78
|
}
|
|
79
|
+
/** Describes a stream of content to be added to the archive. */
|
|
80
|
+
interface StreamSource {
|
|
81
|
+
type: "stream";
|
|
82
|
+
/** A Readable or ReadableStream. */
|
|
83
|
+
content: Readable | ReadableStream;
|
|
84
|
+
/** Destination path for the content inside the tar archive. */
|
|
85
|
+
target: string;
|
|
86
|
+
/** The total size of the stream's content in bytes. This is required for streams. */
|
|
87
|
+
size: number;
|
|
88
|
+
/** Optional Unix file permissions for the entry (e.g., 0o644). */
|
|
89
|
+
mode?: number;
|
|
90
|
+
}
|
|
74
91
|
/** A union of all possible source types for creating a tar archive. */
|
|
75
|
-
type TarSource = FileSource | DirectorySource | ContentSource;
|
|
92
|
+
type TarSource = FileSource | DirectorySource | ContentSource | StreamSource;
|
|
76
93
|
//#endregion
|
|
77
94
|
//#region src/fs/pack.d.ts
|
|
78
95
|
/**
|
|
79
|
-
*
|
|
80
|
-
* array of sources (files, directories, or raw content).
|
|
81
|
-
*
|
|
82
|
-
* @param sources - An array of {@link TarSource} objects describing what to include.
|
|
83
|
-
* @param options - Optional packing configuration using {@link PackOptionsFS}.
|
|
84
|
-
* @returns A Node.js [`Readable`](https://nodejs.org/api/stream.html#class-streamreadable)
|
|
85
|
-
* stream that outputs the tar archive bytes.
|
|
86
|
-
*
|
|
87
|
-
* @example
|
|
88
|
-
* ```typescript
|
|
89
|
-
* import { packTarSources, TarSource } from 'modern-tar/fs';
|
|
90
|
-
*
|
|
91
|
-
* const sources: TarSource[] = [
|
|
92
|
-
* { type: 'file', source: './package.json', target: 'project/package.json' },
|
|
93
|
-
* { type: 'directory', source: './src', target: 'project/src' },
|
|
94
|
-
* { type: 'content', content: 'hello world', target: 'project/hello.txt' }
|
|
95
|
-
* ];
|
|
96
|
-
*
|
|
97
|
-
* const archiveStream = packTarSources(sources);
|
|
98
|
-
* await pipeline(archiveStream, createWriteStream('project.tar'));
|
|
99
|
-
* ```
|
|
96
|
+
* @deprecated Use `packTar` instead. This function is now an alias for `packTar`.
|
|
100
97
|
*/
|
|
101
|
-
declare
|
|
98
|
+
declare const packTarSources: typeof packTar;
|
|
102
99
|
/**
|
|
103
|
-
* Pack a directory into a Node.js `Readable` stream
|
|
104
|
-
*
|
|
100
|
+
* Pack a directory or multiple sources into a Node.js `Readable` stream containing
|
|
101
|
+
* tar archive bytes. Can pack either a single directory or an array of sources
|
|
102
|
+
* (files, directories, or raw content).
|
|
105
103
|
*
|
|
106
|
-
* @param
|
|
104
|
+
* @param sources - Either a directory path string or an array of {@link TarSource} objects.
|
|
107
105
|
* @param options - Optional packing configuration using {@link PackOptionsFS}.
|
|
108
106
|
* @returns Node.js [`Readable`](https://nodejs.org/api/stream.html#class-streamreadable) stream of tar archive bytes
|
|
109
107
|
*
|
|
@@ -117,6 +115,15 @@ declare function packTarSources(sources: TarSource[], options?: PackOptionsFS):
|
|
|
117
115
|
* const tarStream = packTar('/home/user/project');
|
|
118
116
|
* await pipeline(tarStream, createWriteStream('project.tar'));
|
|
119
117
|
*
|
|
118
|
+
* // Pack multiple sources
|
|
119
|
+
* const sources = [
|
|
120
|
+
* { type: 'file', source: './package.json', target: 'project/package.json' },
|
|
121
|
+
* { type: 'directory', source: './src', target: 'project/src' },
|
|
122
|
+
* { type: 'content', content: 'hello world', target: 'project/hello.txt' }
|
|
123
|
+
* ];
|
|
124
|
+
* const archiveStream = packTar(sources);
|
|
125
|
+
* await pipeline(archiveStream, createWriteStream('project.tar'));
|
|
126
|
+
*
|
|
120
127
|
* // With filtering and transformation
|
|
121
128
|
* const filteredStream = packTar('/my/project', {
|
|
122
129
|
* filter: (path, stats) => !path.includes('node_modules'),
|
|
@@ -125,7 +132,7 @@ declare function packTarSources(sources: TarSource[], options?: PackOptionsFS):
|
|
|
125
132
|
* });
|
|
126
133
|
* ```
|
|
127
134
|
*/
|
|
128
|
-
declare function packTar(
|
|
135
|
+
declare function packTar(sources: TarSource[] | string, options?: PackOptionsFS): Readable;
|
|
129
136
|
//#endregion
|
|
130
137
|
//#region src/fs/unpack.d.ts
|
|
131
138
|
/**
|
package/dist/fs/index.js
CHANGED
|
@@ -1,149 +1,203 @@
|
|
|
1
|
-
import { createTarPacker, createTarUnpacker, normalizeBody, transformHeader } from "../unpacker-
|
|
2
|
-
import { createReadStream, createWriteStream } from "node:fs";
|
|
1
|
+
import { createTarPacker, createTarUnpacker, normalizeBody, transformHeader } from "../unpacker-yB6Ahxxk.js";
|
|
3
2
|
import * as fs from "node:fs/promises";
|
|
3
|
+
import { cpus } from "node:os";
|
|
4
4
|
import * as path from "node:path";
|
|
5
5
|
import { PassThrough, Readable, Writable } from "node:stream";
|
|
6
|
-
import {
|
|
6
|
+
import { createWriteStream } from "node:fs";
|
|
7
7
|
import { pipeline } from "node:stream/promises";
|
|
8
8
|
|
|
9
9
|
//#region src/fs/pack.ts
|
|
10
|
-
|
|
11
|
-
|
|
10
|
+
const packTarSources = packTar;
|
|
11
|
+
function packTar(sources, options = {}) {
|
|
12
12
|
const stream = new Readable({ read() {} });
|
|
13
|
-
const packer = createTarPacker((chunk) => stream.push(Buffer.from(chunk)), (error) => stream.destroy(error), () => stream.push(null));
|
|
14
13
|
(async () => {
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
packer.endEntry();
|
|
55
|
-
break;
|
|
14
|
+
const packer = createTarPacker((chunk) => stream.push(Buffer.from(chunk)), stream.destroy.bind(stream), () => stream.push(null));
|
|
15
|
+
const { dereference = false, filter, map, baseDir, concurrency = cpus().length || 8 } = options;
|
|
16
|
+
const isDir = typeof sources === "string";
|
|
17
|
+
const directoryPath = isDir ? path.resolve(sources) : null;
|
|
18
|
+
const jobs = isDir ? (await fs.readdir(directoryPath, { withFileTypes: true })).map((entry) => ({
|
|
19
|
+
type: entry.isDirectory() ? "directory" : "file",
|
|
20
|
+
source: path.join(directoryPath, entry.name),
|
|
21
|
+
target: entry.name
|
|
22
|
+
})) : sources;
|
|
23
|
+
const results = /* @__PURE__ */ new Map();
|
|
24
|
+
const resolvers = /* @__PURE__ */ new Map();
|
|
25
|
+
const seenInodes = /* @__PURE__ */ new Map();
|
|
26
|
+
let jobIndex = 0;
|
|
27
|
+
let writeIndex = 0;
|
|
28
|
+
let activeWorkers = 0;
|
|
29
|
+
let allJobsQueued = false;
|
|
30
|
+
const writer = async () => {
|
|
31
|
+
const readBuffer = Buffer.alloc(64 * 1024);
|
|
32
|
+
while (true) {
|
|
33
|
+
if (stream.destroyed) return;
|
|
34
|
+
if (allJobsQueued && writeIndex >= jobs.length) break;
|
|
35
|
+
if (!results.has(writeIndex)) {
|
|
36
|
+
await new Promise((resolve) => resolvers.set(writeIndex, resolve));
|
|
37
|
+
continue;
|
|
38
|
+
}
|
|
39
|
+
const result = results.get(writeIndex);
|
|
40
|
+
results.delete(writeIndex);
|
|
41
|
+
resolvers.delete(writeIndex);
|
|
42
|
+
if (!result) {
|
|
43
|
+
writeIndex++;
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
46
|
+
packer.add(result.header);
|
|
47
|
+
if (result.body) if (result.body instanceof Uint8Array) {
|
|
48
|
+
if (result.body.length > 0) packer.write(result.body);
|
|
49
|
+
} else if (result.body instanceof Readable || result.body instanceof ReadableStream) try {
|
|
50
|
+
for await (const chunk of result.body) {
|
|
51
|
+
if (stream.destroyed) break;
|
|
52
|
+
packer.write(chunk instanceof Uint8Array ? chunk : Buffer.from(chunk));
|
|
56
53
|
}
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
packer.add(header);
|
|
72
|
-
packer.endEntry();
|
|
73
|
-
const dirents = await fs.readdir(source.source, { withFileTypes: true });
|
|
74
|
-
for (let i = dirents.length - 1; i >= 0; i--) {
|
|
75
|
-
const dirent = dirents[i];
|
|
76
|
-
const childSourcePath = path.join(source.source, dirent.name);
|
|
77
|
-
const childTargetPath = `${target.replace(/\/$/, "")}/${dirent.name}`;
|
|
78
|
-
if (baseDir && dereference && await isSymlinkUnsafe(childSourcePath, baseDir)) continue;
|
|
79
|
-
stack.push({
|
|
80
|
-
type: dirent.isDirectory() ? "directory" : "file",
|
|
81
|
-
source: childSourcePath,
|
|
82
|
-
target: childTargetPath
|
|
83
|
-
});
|
|
54
|
+
} catch (error) {
|
|
55
|
+
stream.destroy(error);
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
else {
|
|
59
|
+
const { handle, size } = result.body;
|
|
60
|
+
try {
|
|
61
|
+
let bytesLeft = size;
|
|
62
|
+
while (bytesLeft > 0 && !stream.destroyed) {
|
|
63
|
+
const toRead = Math.min(bytesLeft, readBuffer.length);
|
|
64
|
+
const { bytesRead } = await handle.read(readBuffer, 0, toRead, null);
|
|
65
|
+
if (bytesRead === 0) break;
|
|
66
|
+
packer.write(readBuffer.subarray(0, bytesRead));
|
|
67
|
+
bytesLeft -= bytesRead;
|
|
84
68
|
}
|
|
85
|
-
|
|
69
|
+
} catch (error) {
|
|
70
|
+
stream.destroy(error);
|
|
71
|
+
return;
|
|
72
|
+
} finally {
|
|
73
|
+
await handle.close();
|
|
86
74
|
}
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
75
|
+
}
|
|
76
|
+
packer.endEntry();
|
|
77
|
+
writeIndex++;
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
const controller = () => {
|
|
81
|
+
if (stream.destroyed || allJobsQueued) return;
|
|
82
|
+
while (activeWorkers < concurrency && jobIndex < jobs.length) {
|
|
83
|
+
activeWorkers++;
|
|
84
|
+
const currentIndex = jobIndex++;
|
|
85
|
+
processJob(jobs[currentIndex], currentIndex).catch(stream.destroy.bind(stream)).finally(() => {
|
|
86
|
+
activeWorkers--;
|
|
87
|
+
controller();
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
if (activeWorkers === 0 && jobIndex >= jobs.length) {
|
|
91
|
+
allJobsQueued = true;
|
|
92
|
+
resolvers.get(writeIndex)?.();
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
const processJob = async (job, index) => {
|
|
96
|
+
let jobResult = null;
|
|
97
|
+
const target = job.target.replace(/\\/g, "/");
|
|
98
|
+
try {
|
|
99
|
+
if (job.type === "content" || job.type === "stream") {
|
|
100
|
+
let body$1;
|
|
101
|
+
let size;
|
|
102
|
+
if (job.type === "stream") {
|
|
103
|
+
if (typeof job.size !== "number" || job.size <= 0) throw new Error("StreamSource requires a positive size property.");
|
|
104
|
+
size = job.size;
|
|
105
|
+
body$1 = job.content;
|
|
106
|
+
} else {
|
|
107
|
+
const content = await normalizeBody(job.content);
|
|
108
|
+
size = content.length;
|
|
109
|
+
body$1 = content;
|
|
100
110
|
}
|
|
111
|
+
const stat$1 = {
|
|
112
|
+
size,
|
|
113
|
+
isFile: () => true,
|
|
114
|
+
isDirectory: () => false,
|
|
115
|
+
isSymbolicLink: () => false,
|
|
116
|
+
mode: job.mode ?? 420,
|
|
117
|
+
mtime: /* @__PURE__ */ new Date(),
|
|
118
|
+
uid: process.getuid?.() ?? 0,
|
|
119
|
+
gid: process.getgid?.() ?? 0
|
|
120
|
+
};
|
|
121
|
+
if (filter && !filter(target, stat$1)) return;
|
|
122
|
+
let header$1 = {
|
|
123
|
+
name: target,
|
|
124
|
+
type: "file",
|
|
125
|
+
size,
|
|
126
|
+
mode: stat$1.mode,
|
|
127
|
+
mtime: stat$1.mtime,
|
|
128
|
+
uid: stat$1.uid,
|
|
129
|
+
gid: stat$1.gid
|
|
130
|
+
};
|
|
131
|
+
if (map) header$1 = map(header$1);
|
|
132
|
+
jobResult = {
|
|
133
|
+
header: header$1,
|
|
134
|
+
body: body$1
|
|
135
|
+
};
|
|
136
|
+
return;
|
|
101
137
|
}
|
|
138
|
+
let stat = await fs.lstat(job.source, { bigint: true });
|
|
139
|
+
if (dereference && stat.isSymbolicLink()) {
|
|
140
|
+
const linkTarget = await fs.readlink(job.source);
|
|
141
|
+
const resolved = path.resolve(path.dirname(job.source), linkTarget);
|
|
142
|
+
const resolvedBase = baseDir ?? directoryPath ?? process.cwd();
|
|
143
|
+
if (!resolved.startsWith(resolvedBase + path.sep) && resolved !== resolvedBase) return;
|
|
144
|
+
stat = await fs.stat(job.source, { bigint: true });
|
|
145
|
+
}
|
|
146
|
+
if (filter && !filter(target, stat)) return;
|
|
147
|
+
let header = {
|
|
148
|
+
name: target,
|
|
149
|
+
size: 0,
|
|
150
|
+
mode: Number(stat.mode),
|
|
151
|
+
mtime: stat.mtime,
|
|
152
|
+
uid: Number(stat.uid),
|
|
153
|
+
gid: Number(stat.gid),
|
|
154
|
+
type: "file"
|
|
155
|
+
};
|
|
156
|
+
let body;
|
|
157
|
+
if (stat.isDirectory()) {
|
|
158
|
+
header.type = "directory";
|
|
159
|
+
header.name = target.endsWith("/") ? target : `${target}/`;
|
|
160
|
+
try {
|
|
161
|
+
for (const d of await fs.readdir(job.source, { withFileTypes: true })) jobs.push({
|
|
162
|
+
type: d.isDirectory() ? "directory" : "file",
|
|
163
|
+
source: path.join(job.source, d.name),
|
|
164
|
+
target: `${header.name}${d.name}`
|
|
165
|
+
});
|
|
166
|
+
} catch {}
|
|
167
|
+
} else if (stat.isSymbolicLink()) {
|
|
168
|
+
header.type = "symlink";
|
|
169
|
+
header.linkname = await fs.readlink(job.source);
|
|
170
|
+
} else if (stat.isFile()) {
|
|
171
|
+
header.size = Number(stat.size);
|
|
172
|
+
if (stat.nlink > 1 && seenInodes.has(stat.ino)) {
|
|
173
|
+
header.type = "link";
|
|
174
|
+
header.linkname = seenInodes.get(stat.ino);
|
|
175
|
+
header.size = 0;
|
|
176
|
+
} else {
|
|
177
|
+
if (stat.nlink > 1) seenInodes.set(stat.ino, target);
|
|
178
|
+
if (header.size > 0) if (header.size < 32 * 1024) body = await fs.readFile(job.source);
|
|
179
|
+
else body = {
|
|
180
|
+
handle: await fs.open(job.source, "r"),
|
|
181
|
+
size: header.size
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
} else return;
|
|
185
|
+
if (map) header = map(header);
|
|
186
|
+
jobResult = {
|
|
187
|
+
header,
|
|
188
|
+
body
|
|
189
|
+
};
|
|
190
|
+
} finally {
|
|
191
|
+
results.set(index, jobResult);
|
|
192
|
+
resolvers.get(index)?.();
|
|
102
193
|
}
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
})();
|
|
194
|
+
};
|
|
195
|
+
controller();
|
|
196
|
+
await writer();
|
|
197
|
+
if (!stream.destroyed) packer.finalize();
|
|
198
|
+
})().catch((error) => stream.destroy(error));
|
|
108
199
|
return stream;
|
|
109
200
|
}
|
|
110
|
-
function packTar(directoryPath, options = {}) {
|
|
111
|
-
const stream = new Readable({ read() {} });
|
|
112
|
-
(async () => {
|
|
113
|
-
try {
|
|
114
|
-
const resolvedPath = path.resolve(directoryPath);
|
|
115
|
-
const allSources = (await fs.readdir(resolvedPath, { withFileTypes: true })).map((dirent) => ({
|
|
116
|
-
type: dirent.isDirectory() ? "directory" : "file",
|
|
117
|
-
source: path.join(resolvedPath, dirent.name),
|
|
118
|
-
target: dirent.name
|
|
119
|
-
}));
|
|
120
|
-
const sources = [];
|
|
121
|
-
for (const source of allSources) if (source.type === "content" || !options.dereference || !await isSymlinkUnsafe(source.source, resolvedPath)) sources.push(source);
|
|
122
|
-
const sourceStream = packTarSources(sources, {
|
|
123
|
-
...options,
|
|
124
|
-
baseDir: resolvedPath
|
|
125
|
-
});
|
|
126
|
-
sourceStream.on("data", (chunk) => stream.push(chunk));
|
|
127
|
-
sourceStream.on("end", () => stream.push(null));
|
|
128
|
-
sourceStream.on("error", (err) => stream.destroy(err));
|
|
129
|
-
} catch (error) {
|
|
130
|
-
stream.destroy(error);
|
|
131
|
-
}
|
|
132
|
-
})();
|
|
133
|
-
return stream;
|
|
134
|
-
}
|
|
135
|
-
async function isSymlinkUnsafe(sourcePath, baseDir) {
|
|
136
|
-
try {
|
|
137
|
-
if ((await fs.lstat(sourcePath)).isSymbolicLink()) {
|
|
138
|
-
const linkTarget = await fs.readlink(sourcePath);
|
|
139
|
-
const resolvedTarget = path.resolve(path.dirname(sourcePath), linkTarget);
|
|
140
|
-
return !(resolvedTarget === baseDir || resolvedTarget.startsWith(baseDir + path.sep));
|
|
141
|
-
}
|
|
142
|
-
} catch {
|
|
143
|
-
return true;
|
|
144
|
-
}
|
|
145
|
-
return false;
|
|
146
|
-
}
|
|
147
201
|
|
|
148
202
|
//#endregion
|
|
149
203
|
//#region src/fs/path.ts
|
|
@@ -337,7 +391,7 @@ function createFSHandler(directoryPath, options) {
|
|
|
337
391
|
handler: {
|
|
338
392
|
onHeader(header) {
|
|
339
393
|
if (signal.aborted) return;
|
|
340
|
-
activeEntryStream = new PassThrough();
|
|
394
|
+
activeEntryStream = new PassThrough({ highWaterMark: header.size > 1048576 ? 524288 : void 0 });
|
|
341
395
|
const entryStream = activeEntryStream;
|
|
342
396
|
const startOperation = () => {
|
|
343
397
|
let opPromise;
|
|
@@ -46,31 +46,7 @@ interface TarHeader {
|
|
|
46
46
|
/**
|
|
47
47
|
* Union type for entry body data that can be packed into a tar archive.
|
|
48
48
|
*/
|
|
49
|
-
type TarEntryData = string | Uint8Array | ArrayBuffer |
|
|
50
|
-
/**
|
|
51
|
-
* Represents a complete entry to be packed into a tar archive.
|
|
52
|
-
*
|
|
53
|
-
* Combines header metadata with optional body data. Used as input to {@link packTar}
|
|
54
|
-
* and the controller returned by {@link createTarPacker}.
|
|
55
|
-
*/
|
|
56
|
-
interface TarEntry {
|
|
57
|
-
header: TarHeader;
|
|
58
|
-
body?: TarEntryData;
|
|
59
|
-
}
|
|
60
|
-
/**
|
|
61
|
-
* Represents an entry parsed from a tar archive stream.
|
|
62
|
-
*/
|
|
63
|
-
interface ParsedTarEntry {
|
|
64
|
-
header: TarHeader;
|
|
65
|
-
body: ReadableStream<Uint8Array>;
|
|
66
|
-
}
|
|
67
|
-
/**
|
|
68
|
-
* Represents an extracted entry with fully buffered content.
|
|
69
|
-
*/
|
|
70
|
-
interface ParsedTarEntryWithData {
|
|
71
|
-
header: TarHeader;
|
|
72
|
-
data: Uint8Array;
|
|
73
|
-
}
|
|
49
|
+
type TarEntryData = string | Uint8Array | ArrayBuffer | Blob | null | undefined;
|
|
74
50
|
/**
|
|
75
51
|
* Configuration options for creating a tar decoder stream.
|
|
76
52
|
*/
|
|
@@ -105,4 +81,4 @@ interface UnpackOptions extends DecoderOptions {
|
|
|
105
81
|
streamTimeout?: number;
|
|
106
82
|
}
|
|
107
83
|
//#endregion
|
|
108
|
-
export { DecoderOptions,
|
|
84
|
+
export { DecoderOptions, TarEntryData, TarHeader, UnpackOptions };
|
|
@@ -93,8 +93,10 @@ function readOctal(view, offset, size) {
|
|
|
93
93
|
function readNumeric(view, offset, size) {
|
|
94
94
|
if (view[offset] & 128) {
|
|
95
95
|
let result = 0;
|
|
96
|
-
|
|
97
|
-
|
|
96
|
+
result = view[offset] & 127;
|
|
97
|
+
for (let i = 1; i < size; i++) result = result * 256 + view[offset + i];
|
|
98
|
+
if (!Number.isSafeInteger(result)) throw new Error("TAR number too large");
|
|
99
|
+
return result;
|
|
98
100
|
}
|
|
99
101
|
return readOctal(view, offset, size);
|
|
100
102
|
}
|
|
@@ -126,7 +128,6 @@ async function normalizeBody(body) {
|
|
|
126
128
|
if (typeof body === "string") return encoder.encode(body);
|
|
127
129
|
if (body instanceof ArrayBuffer) return new Uint8Array(body);
|
|
128
130
|
if (body instanceof Blob) return new Uint8Array(await body.arrayBuffer());
|
|
129
|
-
if (body instanceof ReadableStream) return streamToBuffer(body);
|
|
130
131
|
throw new TypeError("Unsupported content type for entry body.");
|
|
131
132
|
}
|
|
132
133
|
|
|
@@ -253,6 +254,16 @@ function parseUstarHeader(block, strict) {
|
|
|
253
254
|
if (magic === "ustar") header.prefix = readString(block, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE);
|
|
254
255
|
return header;
|
|
255
256
|
}
|
|
257
|
+
const PAX_MAPPING = {
|
|
258
|
+
path: ["name", (v) => v],
|
|
259
|
+
linkpath: ["linkname", (v) => v],
|
|
260
|
+
size: ["size", (v) => parseInt(v, 10)],
|
|
261
|
+
mtime: ["mtime", parseFloat],
|
|
262
|
+
uid: ["uid", (v) => parseInt(v, 10)],
|
|
263
|
+
gid: ["gid", (v) => parseInt(v, 10)],
|
|
264
|
+
uname: ["uname", (v) => v],
|
|
265
|
+
gname: ["gname", (v) => v]
|
|
266
|
+
};
|
|
256
267
|
function parsePax(buffer) {
|
|
257
268
|
const decoder$1 = new TextDecoder("utf-8");
|
|
258
269
|
const overrides = {};
|
|
@@ -267,31 +278,11 @@ function parsePax(buffer) {
|
|
|
267
278
|
const [key, value] = decoder$1.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
|
|
268
279
|
if (key && value !== void 0) {
|
|
269
280
|
pax[key] = value;
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
overrides.linkname = value;
|
|
276
|
-
break;
|
|
277
|
-
case "size":
|
|
278
|
-
overrides.size = parseInt(value, 10);
|
|
279
|
-
break;
|
|
280
|
-
case "mtime":
|
|
281
|
-
overrides.mtime = parseFloat(value);
|
|
282
|
-
break;
|
|
283
|
-
case "uid":
|
|
284
|
-
overrides.uid = parseInt(value, 10);
|
|
285
|
-
break;
|
|
286
|
-
case "gid":
|
|
287
|
-
overrides.gid = parseInt(value, 10);
|
|
288
|
-
break;
|
|
289
|
-
case "uname":
|
|
290
|
-
overrides.uname = value;
|
|
291
|
-
break;
|
|
292
|
-
case "gname":
|
|
293
|
-
overrides.gname = value;
|
|
294
|
-
break;
|
|
281
|
+
const mapping = PAX_MAPPING[key];
|
|
282
|
+
if (mapping) {
|
|
283
|
+
const [targetKey, parser] = mapping;
|
|
284
|
+
const parsedValue = parser(value);
|
|
285
|
+
if (typeof parsedValue === "string" || !Number.isNaN(parsedValue)) overrides[targetKey] = parsedValue;
|
|
295
286
|
}
|
|
296
287
|
}
|
|
297
288
|
offset = recordEnd;
|
package/dist/web/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { DecoderOptions,
|
|
1
|
+
import { DecoderOptions, TarEntryData, TarHeader, UnpackOptions } from "../types-Dc3p5B3s.js";
|
|
2
2
|
|
|
3
3
|
//#region src/web/compression.d.ts
|
|
4
4
|
|
|
@@ -72,6 +72,32 @@ declare function createGzipEncoder(): ReadableWritablePair<Uint8Array, Uint8Arra
|
|
|
72
72
|
*/
|
|
73
73
|
declare function createGzipDecoder(): ReadableWritablePair<Uint8Array, Uint8Array>;
|
|
74
74
|
//#endregion
|
|
75
|
+
//#region src/web/types.d.ts
|
|
76
|
+
/**
|
|
77
|
+
* Represents a complete entry to be packed into a tar archive.
|
|
78
|
+
*
|
|
79
|
+
* Combines header metadata with optional body data. Used as input to {@link packTar}
|
|
80
|
+
* and the controller returned by {@link createTarPacker}.
|
|
81
|
+
*/
|
|
82
|
+
interface TarEntry {
|
|
83
|
+
header: TarHeader;
|
|
84
|
+
body?: TarEntryData | ReadableStream<Uint8Array>;
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Represents an entry parsed from a tar archive stream.
|
|
88
|
+
*/
|
|
89
|
+
interface ParsedTarEntry {
|
|
90
|
+
header: TarHeader;
|
|
91
|
+
body: ReadableStream<Uint8Array>;
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Represents an extracted entry with fully buffered content.
|
|
95
|
+
*/
|
|
96
|
+
interface ParsedTarEntryWithData {
|
|
97
|
+
header: TarHeader;
|
|
98
|
+
data: Uint8Array;
|
|
99
|
+
}
|
|
100
|
+
//#endregion
|
|
75
101
|
//#region src/web/helpers.d.ts
|
|
76
102
|
/**
|
|
77
103
|
* Packs an array of tar entries into a single `Uint8Array` buffer.
|
package/dist/web/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { createTarPacker as createTarPacker$1, createTarUnpacker, normalizeBody, streamToBuffer, transformHeader } from "../unpacker-
|
|
1
|
+
import { createTarPacker as createTarPacker$1, createTarUnpacker, normalizeBody, streamToBuffer, transformHeader } from "../unpacker-yB6Ahxxk.js";
|
|
2
2
|
|
|
3
3
|
//#region src/web/compression.ts
|
|
4
4
|
function createGzipEncoder() {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "modern-tar",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.5.0",
|
|
4
4
|
"description": "Zero dependency streaming tar parser and writer for JavaScript.",
|
|
5
5
|
"author": "Ayuhito <hello@ayuhito.com>",
|
|
6
6
|
"license": "MIT",
|
|
@@ -16,7 +16,7 @@
|
|
|
16
16
|
},
|
|
17
17
|
"devDependencies": {
|
|
18
18
|
"@biomejs/biome": "2.2.5",
|
|
19
|
-
"@types/node": "^24.
|
|
19
|
+
"@types/node": "^24.7.1",
|
|
20
20
|
"@vitest/coverage-v8": "^3.2.4",
|
|
21
21
|
"tsdown": "^0.15.6",
|
|
22
22
|
"typescript": "^5.9.3",
|