modern-tar 0.4.1 → 0.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/fs/index.d.ts +22 -27
- package/dist/fs/index.js +166 -131
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -197,7 +197,7 @@ await pipeline(sourceStream, extractStream);
|
|
|
197
197
|
#### Archive Creation
|
|
198
198
|
|
|
199
199
|
```typescript
|
|
200
|
-
import {
|
|
200
|
+
import { packTar, type TarSource } from 'modern-tar/fs';
|
|
201
201
|
import { createWriteStream } from 'node:fs';
|
|
202
202
|
import { pipeline } from 'node:stream/promises';
|
|
203
203
|
|
|
@@ -209,7 +209,7 @@ const sources: TarSource[] = [
|
|
|
209
209
|
{ type: 'content', content: '#!/bin/bash\necho "Executable"', target: 'bin/script.sh', mode: 0o755 }
|
|
210
210
|
];
|
|
211
211
|
|
|
212
|
-
const archiveStream =
|
|
212
|
+
const archiveStream = packTar(sources);
|
|
213
213
|
await pipeline(archiveStream, createWriteStream('project.tar'));
|
|
214
214
|
```
|
|
215
215
|
|
package/dist/fs/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { TarEntryData, TarHeader, UnpackOptions } from "../types-D-xPQp4Z.js";
|
|
2
|
-
import { Stats } from "node:fs";
|
|
3
2
|
import { Readable, Writable } from "node:stream";
|
|
3
|
+
import { Stats } from "node:fs";
|
|
4
4
|
|
|
5
5
|
//#region src/fs/types.d.ts
|
|
6
6
|
|
|
@@ -19,6 +19,11 @@ interface PackOptionsFS {
|
|
|
19
19
|
map?: (header: TarHeader) => TarHeader;
|
|
20
20
|
/** Base directory for symlink security validation, when `dereference` is set to true. */
|
|
21
21
|
baseDir?: string;
|
|
22
|
+
/**
|
|
23
|
+
* Maximum number of concurrent filesystem operations during packing.
|
|
24
|
+
* @default os.cpus().length || 8
|
|
25
|
+
*/
|
|
26
|
+
concurrency?: number;
|
|
22
27
|
}
|
|
23
28
|
/**
|
|
24
29
|
* Filesystem-specific configuration options for extracting tar archives to the filesystem.
|
|
@@ -76,34 +81,15 @@ type TarSource = FileSource | DirectorySource | ContentSource;
|
|
|
76
81
|
//#endregion
|
|
77
82
|
//#region src/fs/pack.d.ts
|
|
78
83
|
/**
|
|
79
|
-
*
|
|
80
|
-
* array of sources (files, directories, or raw content).
|
|
81
|
-
*
|
|
82
|
-
* @param sources - An array of {@link TarSource} objects describing what to include.
|
|
83
|
-
* @param options - Optional packing configuration using {@link PackOptionsFS}.
|
|
84
|
-
* @returns A Node.js [`Readable`](https://nodejs.org/api/stream.html#class-streamreadable)
|
|
85
|
-
* stream that outputs the tar archive bytes.
|
|
86
|
-
*
|
|
87
|
-
* @example
|
|
88
|
-
* ```typescript
|
|
89
|
-
* import { packTarSources, TarSource } from 'modern-tar/fs';
|
|
90
|
-
*
|
|
91
|
-
* const sources: TarSource[] = [
|
|
92
|
-
* { type: 'file', source: './package.json', target: 'project/package.json' },
|
|
93
|
-
* { type: 'directory', source: './src', target: 'project/src' },
|
|
94
|
-
* { type: 'content', content: 'hello world', target: 'project/hello.txt' }
|
|
95
|
-
* ];
|
|
96
|
-
*
|
|
97
|
-
* const archiveStream = packTarSources(sources);
|
|
98
|
-
* await pipeline(archiveStream, createWriteStream('project.tar'));
|
|
99
|
-
* ```
|
|
84
|
+
* @deprecated Use `packTar` instead. This function is now an alias for `packTar`.
|
|
100
85
|
*/
|
|
101
|
-
declare
|
|
86
|
+
declare const packTarSources: typeof packTar;
|
|
102
87
|
/**
|
|
103
|
-
* Pack a directory into a Node.js `Readable` stream
|
|
104
|
-
*
|
|
88
|
+
* Pack a directory or multiple sources into a Node.js `Readable` stream containing
|
|
89
|
+
* tar archive bytes. Can pack either a single directory or an array of sources
|
|
90
|
+
* (files, directories, or raw content).
|
|
105
91
|
*
|
|
106
|
-
* @param
|
|
92
|
+
* @param sources - Either a directory path string or an array of {@link TarSource} objects.
|
|
107
93
|
* @param options - Optional packing configuration using {@link PackOptionsFS}.
|
|
108
94
|
* @returns Node.js [`Readable`](https://nodejs.org/api/stream.html#class-streamreadable) stream of tar archive bytes
|
|
109
95
|
*
|
|
@@ -117,6 +103,15 @@ declare function packTarSources(sources: TarSource[], options?: PackOptionsFS):
|
|
|
117
103
|
* const tarStream = packTar('/home/user/project');
|
|
118
104
|
* await pipeline(tarStream, createWriteStream('project.tar'));
|
|
119
105
|
*
|
|
106
|
+
* // Pack multiple sources
|
|
107
|
+
* const sources = [
|
|
108
|
+
* { type: 'file', source: './package.json', target: 'project/package.json' },
|
|
109
|
+
* { type: 'directory', source: './src', target: 'project/src' },
|
|
110
|
+
* { type: 'content', content: 'hello world', target: 'project/hello.txt' }
|
|
111
|
+
* ];
|
|
112
|
+
* const archiveStream = packTar(sources);
|
|
113
|
+
* await pipeline(archiveStream, createWriteStream('project.tar'));
|
|
114
|
+
*
|
|
120
115
|
* // With filtering and transformation
|
|
121
116
|
* const filteredStream = packTar('/my/project', {
|
|
122
117
|
* filter: (path, stats) => !path.includes('node_modules'),
|
|
@@ -125,7 +120,7 @@ declare function packTarSources(sources: TarSource[], options?: PackOptionsFS):
|
|
|
125
120
|
* });
|
|
126
121
|
* ```
|
|
127
122
|
*/
|
|
128
|
-
declare function packTar(
|
|
123
|
+
declare function packTar(sources: TarSource[] | string, options?: PackOptionsFS): Readable;
|
|
129
124
|
//#endregion
|
|
130
125
|
//#region src/fs/unpack.d.ts
|
|
131
126
|
/**
|
package/dist/fs/index.js
CHANGED
|
@@ -1,149 +1,184 @@
|
|
|
1
1
|
import { createTarPacker, createTarUnpacker, normalizeBody, transformHeader } from "../unpacker-DBTDVhe4.js";
|
|
2
|
-
import { createReadStream, createWriteStream } from "node:fs";
|
|
3
2
|
import * as fs from "node:fs/promises";
|
|
3
|
+
import { cpus } from "node:os";
|
|
4
4
|
import * as path from "node:path";
|
|
5
5
|
import { PassThrough, Readable, Writable } from "node:stream";
|
|
6
|
-
import {
|
|
6
|
+
import { createWriteStream } from "node:fs";
|
|
7
7
|
import { pipeline } from "node:stream/promises";
|
|
8
8
|
|
|
9
9
|
//#region src/fs/pack.ts
|
|
10
|
-
|
|
11
|
-
|
|
10
|
+
const packTarSources = packTar;
|
|
11
|
+
function packTar(sources, options = {}) {
|
|
12
12
|
const stream = new Readable({ read() {} });
|
|
13
|
-
const packer = createTarPacker((chunk) => stream.push(Buffer.from(chunk)), (error) => stream.destroy(error), () => stream.push(null));
|
|
14
13
|
(async () => {
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
if (filter && !filter(source.source, stat)) break;
|
|
61
|
-
let header = {
|
|
62
|
-
name: target.endsWith("/") ? target : `${target}/`,
|
|
63
|
-
size: 0,
|
|
64
|
-
mode: stat.mode,
|
|
65
|
-
mtime: stat.mtime,
|
|
66
|
-
uid: stat.uid,
|
|
67
|
-
gid: stat.gid,
|
|
68
|
-
type: "directory"
|
|
69
|
-
};
|
|
70
|
-
if (map) header = map(header);
|
|
71
|
-
packer.add(header);
|
|
72
|
-
packer.endEntry();
|
|
73
|
-
const dirents = await fs.readdir(source.source, { withFileTypes: true });
|
|
74
|
-
for (let i = dirents.length - 1; i >= 0; i--) {
|
|
75
|
-
const dirent = dirents[i];
|
|
76
|
-
const childSourcePath = path.join(source.source, dirent.name);
|
|
77
|
-
const childTargetPath = `${target.replace(/\/$/, "")}/${dirent.name}`;
|
|
78
|
-
if (baseDir && dereference && await isSymlinkUnsafe(childSourcePath, baseDir)) continue;
|
|
79
|
-
stack.push({
|
|
80
|
-
type: dirent.isDirectory() ? "directory" : "file",
|
|
81
|
-
source: childSourcePath,
|
|
82
|
-
target: childTargetPath
|
|
83
|
-
});
|
|
14
|
+
const packer = createTarPacker((chunk) => stream.push(Buffer.from(chunk)), stream.destroy.bind(stream), () => stream.push(null));
|
|
15
|
+
const { dereference = false, filter, map, baseDir, concurrency = cpus().length || 8 } = options;
|
|
16
|
+
const isDir = typeof sources === "string";
|
|
17
|
+
const directoryPath = isDir ? path.resolve(sources) : null;
|
|
18
|
+
const jobs = isDir ? (await fs.readdir(directoryPath, { withFileTypes: true })).map((entry) => ({
|
|
19
|
+
type: entry.isDirectory() ? "directory" : "file",
|
|
20
|
+
source: path.join(directoryPath, entry.name),
|
|
21
|
+
target: entry.name
|
|
22
|
+
})) : sources;
|
|
23
|
+
const results = /* @__PURE__ */ new Map();
|
|
24
|
+
const resolvers = /* @__PURE__ */ new Map();
|
|
25
|
+
const seenInodes = /* @__PURE__ */ new Map();
|
|
26
|
+
let jobIndex = 0;
|
|
27
|
+
let writeIndex = 0;
|
|
28
|
+
let activeWorkers = 0;
|
|
29
|
+
let allJobsQueued = false;
|
|
30
|
+
const writer = async () => {
|
|
31
|
+
const readBuffer = Buffer.alloc(64 * 1024);
|
|
32
|
+
while (true) {
|
|
33
|
+
if (stream.destroyed) return;
|
|
34
|
+
if (allJobsQueued && writeIndex >= jobs.length) break;
|
|
35
|
+
if (!results.has(writeIndex)) {
|
|
36
|
+
await new Promise((resolve) => resolvers.set(writeIndex, resolve));
|
|
37
|
+
continue;
|
|
38
|
+
}
|
|
39
|
+
const result = results.get(writeIndex);
|
|
40
|
+
results.delete(writeIndex);
|
|
41
|
+
resolvers.delete(writeIndex);
|
|
42
|
+
if (!result) {
|
|
43
|
+
writeIndex++;
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
46
|
+
packer.add(result.header);
|
|
47
|
+
if (result.body) if (result.body instanceof Uint8Array) {
|
|
48
|
+
if (result.body.length > 0) packer.write(result.body);
|
|
49
|
+
} else {
|
|
50
|
+
const { handle, size } = result.body;
|
|
51
|
+
try {
|
|
52
|
+
let bytesLeft = size;
|
|
53
|
+
while (bytesLeft > 0 && !stream.destroyed) {
|
|
54
|
+
const toRead = Math.min(bytesLeft, readBuffer.length);
|
|
55
|
+
const { bytesRead } = await handle.read(readBuffer, 0, toRead, null);
|
|
56
|
+
if (bytesRead === 0) break;
|
|
57
|
+
packer.write(readBuffer.subarray(0, bytesRead));
|
|
58
|
+
bytesLeft -= bytesRead;
|
|
84
59
|
}
|
|
85
|
-
|
|
60
|
+
} catch (error) {
|
|
61
|
+
stream.destroy(error);
|
|
62
|
+
return;
|
|
63
|
+
} finally {
|
|
64
|
+
await handle.close();
|
|
86
65
|
}
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
66
|
+
}
|
|
67
|
+
packer.endEntry();
|
|
68
|
+
writeIndex++;
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
const controller = () => {
|
|
72
|
+
if (stream.destroyed || allJobsQueued) return;
|
|
73
|
+
while (activeWorkers < concurrency && jobIndex < jobs.length) {
|
|
74
|
+
activeWorkers++;
|
|
75
|
+
const currentIndex = jobIndex++;
|
|
76
|
+
processJob(jobs[currentIndex], currentIndex).catch(stream.destroy.bind(stream)).finally(() => {
|
|
77
|
+
activeWorkers--;
|
|
78
|
+
controller();
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
if (activeWorkers === 0 && jobIndex >= jobs.length) {
|
|
82
|
+
allJobsQueued = true;
|
|
83
|
+
resolvers.get(writeIndex)?.();
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
const processJob = async (job, index) => {
|
|
87
|
+
let jobResult = null;
|
|
88
|
+
const target = job.target.replace(/\\/g, "/");
|
|
89
|
+
try {
|
|
90
|
+
if (job.type === "content") {
|
|
91
|
+
const data = await normalizeBody(job.content);
|
|
92
|
+
const stat$1 = {
|
|
93
|
+
size: data.length,
|
|
94
|
+
isFile: () => true,
|
|
95
|
+
isDirectory: () => false,
|
|
96
|
+
isSymbolicLink: () => false,
|
|
97
|
+
mode: job.mode ?? 420,
|
|
98
|
+
mtime: /* @__PURE__ */ new Date(),
|
|
99
|
+
uid: process.getuid?.() ?? 0,
|
|
100
|
+
gid: process.getgid?.() ?? 0
|
|
101
|
+
};
|
|
102
|
+
if (filter && !filter(target, stat$1)) return;
|
|
103
|
+
let header$1 = {
|
|
104
|
+
name: target,
|
|
105
|
+
type: "file",
|
|
106
|
+
size: stat$1.size,
|
|
107
|
+
mode: stat$1.mode,
|
|
108
|
+
mtime: stat$1.mtime,
|
|
109
|
+
uid: stat$1.uid,
|
|
110
|
+
gid: stat$1.gid
|
|
111
|
+
};
|
|
112
|
+
if (map) header$1 = map(header$1);
|
|
113
|
+
jobResult = {
|
|
114
|
+
header: header$1,
|
|
115
|
+
body: data
|
|
116
|
+
};
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
let stat = await fs.lstat(job.source, { bigint: true });
|
|
120
|
+
if (dereference && stat.isSymbolicLink()) {
|
|
121
|
+
const linkTarget = await fs.readlink(job.source);
|
|
122
|
+
const resolved = path.resolve(path.dirname(job.source), linkTarget);
|
|
123
|
+
const resolvedBase = baseDir ?? directoryPath ?? process.cwd();
|
|
124
|
+
if (!resolved.startsWith(resolvedBase + path.sep) && resolved !== resolvedBase) return;
|
|
125
|
+
stat = await fs.stat(job.source, { bigint: true });
|
|
126
|
+
}
|
|
127
|
+
if (filter && !filter(target, stat)) return;
|
|
128
|
+
let header = {
|
|
129
|
+
name: target,
|
|
130
|
+
size: 0,
|
|
131
|
+
mode: Number(stat.mode),
|
|
132
|
+
mtime: stat.mtime,
|
|
133
|
+
uid: Number(stat.uid),
|
|
134
|
+
gid: Number(stat.gid),
|
|
135
|
+
type: "file"
|
|
136
|
+
};
|
|
137
|
+
let body;
|
|
138
|
+
if (stat.isDirectory()) {
|
|
139
|
+
header.type = "directory";
|
|
140
|
+
header.name = target.endsWith("/") ? target : `${target}/`;
|
|
141
|
+
try {
|
|
142
|
+
for (const d of await fs.readdir(job.source, { withFileTypes: true })) jobs.push({
|
|
143
|
+
type: d.isDirectory() ? "directory" : "file",
|
|
144
|
+
source: path.join(job.source, d.name),
|
|
145
|
+
target: `${header.name}${d.name}`
|
|
146
|
+
});
|
|
147
|
+
} catch {}
|
|
148
|
+
} else if (stat.isSymbolicLink()) {
|
|
149
|
+
header.type = "symlink";
|
|
150
|
+
header.linkname = await fs.readlink(job.source);
|
|
151
|
+
} else if (stat.isFile()) {
|
|
152
|
+
header.size = Number(stat.size);
|
|
153
|
+
if (stat.nlink > 1 && seenInodes.has(stat.ino)) {
|
|
154
|
+
header.type = "link";
|
|
155
|
+
header.linkname = seenInodes.get(stat.ino);
|
|
156
|
+
header.size = 0;
|
|
157
|
+
} else {
|
|
158
|
+
if (stat.nlink > 1) seenInodes.set(stat.ino, target);
|
|
159
|
+
if (header.size > 0) if (header.size < 32 * 1024) body = await fs.readFile(job.source);
|
|
160
|
+
else body = {
|
|
161
|
+
handle: await fs.open(job.source, "r"),
|
|
162
|
+
size: header.size
|
|
94
163
|
};
|
|
95
|
-
if (map) header = map(header);
|
|
96
|
-
packer.add(header);
|
|
97
|
-
if (data.length > 0) packer.write(data);
|
|
98
|
-
packer.endEntry();
|
|
99
|
-
break;
|
|
100
164
|
}
|
|
101
|
-
}
|
|
165
|
+
} else return;
|
|
166
|
+
if (map) header = map(header);
|
|
167
|
+
jobResult = {
|
|
168
|
+
header,
|
|
169
|
+
body
|
|
170
|
+
};
|
|
171
|
+
} finally {
|
|
172
|
+
results.set(index, jobResult);
|
|
173
|
+
resolvers.get(index)?.();
|
|
102
174
|
}
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
})();
|
|
175
|
+
};
|
|
176
|
+
controller();
|
|
177
|
+
await writer();
|
|
178
|
+
if (!stream.destroyed) packer.finalize();
|
|
179
|
+
})().catch((error) => stream.destroy(error));
|
|
108
180
|
return stream;
|
|
109
181
|
}
|
|
110
|
-
function packTar(directoryPath, options = {}) {
|
|
111
|
-
const stream = new Readable({ read() {} });
|
|
112
|
-
(async () => {
|
|
113
|
-
try {
|
|
114
|
-
const resolvedPath = path.resolve(directoryPath);
|
|
115
|
-
const allSources = (await fs.readdir(resolvedPath, { withFileTypes: true })).map((dirent) => ({
|
|
116
|
-
type: dirent.isDirectory() ? "directory" : "file",
|
|
117
|
-
source: path.join(resolvedPath, dirent.name),
|
|
118
|
-
target: dirent.name
|
|
119
|
-
}));
|
|
120
|
-
const sources = [];
|
|
121
|
-
for (const source of allSources) if (source.type === "content" || !options.dereference || !await isSymlinkUnsafe(source.source, resolvedPath)) sources.push(source);
|
|
122
|
-
const sourceStream = packTarSources(sources, {
|
|
123
|
-
...options,
|
|
124
|
-
baseDir: resolvedPath
|
|
125
|
-
});
|
|
126
|
-
sourceStream.on("data", (chunk) => stream.push(chunk));
|
|
127
|
-
sourceStream.on("end", () => stream.push(null));
|
|
128
|
-
sourceStream.on("error", (err) => stream.destroy(err));
|
|
129
|
-
} catch (error) {
|
|
130
|
-
stream.destroy(error);
|
|
131
|
-
}
|
|
132
|
-
})();
|
|
133
|
-
return stream;
|
|
134
|
-
}
|
|
135
|
-
async function isSymlinkUnsafe(sourcePath, baseDir) {
|
|
136
|
-
try {
|
|
137
|
-
if ((await fs.lstat(sourcePath)).isSymbolicLink()) {
|
|
138
|
-
const linkTarget = await fs.readlink(sourcePath);
|
|
139
|
-
const resolvedTarget = path.resolve(path.dirname(sourcePath), linkTarget);
|
|
140
|
-
return !(resolvedTarget === baseDir || resolvedTarget.startsWith(baseDir + path.sep));
|
|
141
|
-
}
|
|
142
|
-
} catch {
|
|
143
|
-
return true;
|
|
144
|
-
}
|
|
145
|
-
return false;
|
|
146
|
-
}
|
|
147
182
|
|
|
148
183
|
//#endregion
|
|
149
184
|
//#region src/fs/path.ts
|
|
@@ -337,7 +372,7 @@ function createFSHandler(directoryPath, options) {
|
|
|
337
372
|
handler: {
|
|
338
373
|
onHeader(header) {
|
|
339
374
|
if (signal.aborted) return;
|
|
340
|
-
activeEntryStream = new PassThrough();
|
|
375
|
+
activeEntryStream = new PassThrough({ highWaterMark: header.size > 1048576 ? 524288 : void 0 });
|
|
341
376
|
const entryStream = activeEntryStream;
|
|
342
377
|
const startOperation = () => {
|
|
343
378
|
let opPromise;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "modern-tar",
|
|
3
|
-
"version": "0.4.
|
|
3
|
+
"version": "0.4.2",
|
|
4
4
|
"description": "Zero dependency streaming tar parser and writer for JavaScript.",
|
|
5
5
|
"author": "Ayuhito <hello@ayuhito.com>",
|
|
6
6
|
"license": "MIT",
|
|
@@ -16,7 +16,7 @@
|
|
|
16
16
|
},
|
|
17
17
|
"devDependencies": {
|
|
18
18
|
"@biomejs/biome": "2.2.5",
|
|
19
|
-
"@types/node": "^24.
|
|
19
|
+
"@types/node": "^24.7.1",
|
|
20
20
|
"@vitest/coverage-v8": "^3.2.4",
|
|
21
21
|
"tsdown": "^0.15.6",
|
|
22
22
|
"typescript": "^5.9.3",
|