tar-vern 0.3.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README_pack.md +40 -6
- package/dist/extractor.d.ts +21 -0
- package/dist/extractor.d.ts.map +1 -0
- package/dist/index.cjs +377 -42
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +365 -30
- package/dist/index.js.map +1 -1
- package/dist/packer.d.ts +2 -1
- package/dist/packer.d.ts.map +1 -1
- package/dist/types.d.ts +27 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/utils.d.ts +34 -8
- package/dist/utils.d.ts.map +1 -1
- package/package.json +19 -3
- package/LICENSE +0 -21
- package/README.md +0 -212
- package/dist/generated/packageMetadata.d.ts +0 -16
- package/dist/generated/packageMetadata.d.ts.map +0 -1
package/dist/index.cjs
CHANGED
@@ -1,10 +1,11 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version:
|
3
|
+
* version: 1.1.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
7
7
|
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
+
* git.commit.hash: 6d4ff13b538b16545ccc55b2e74f8e5f73999a34
|
8
9
|
*/
|
9
10
|
"use strict";
|
10
11
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
@@ -12,15 +13,20 @@ const stream = require("stream");
|
|
12
13
|
const zlib = require("zlib");
|
13
14
|
const fs = require("fs");
|
14
15
|
const promises = require("fs/promises");
|
16
|
+
const promises$1 = require("stream/promises");
|
17
|
+
const path = require("path");
|
18
|
+
const MAX_NAME = 100;
|
19
|
+
const MAX_PREFIX = 155;
|
15
20
|
const getUName = (candidateName, candidateId, reflectStat) => {
|
16
21
|
return candidateName ?? (reflectStat === "all" ? candidateId.toString() : "root");
|
17
22
|
};
|
18
23
|
const getBuffer = (data) => {
|
19
24
|
return Buffer.isBuffer(data) ? data : Buffer.from(data, "utf8");
|
20
25
|
};
|
21
|
-
const createDirectoryItem = async (
|
26
|
+
const createDirectoryItem = async (path2, reflectStat, options, signal) => {
|
22
27
|
const rs = reflectStat ?? "none";
|
23
28
|
if (rs !== "none" && options?.directoryPath) {
|
29
|
+
signal?.throwIfAborted();
|
24
30
|
const stats = await promises.stat(options.directoryPath);
|
25
31
|
const mode = options?.mode ?? stats.mode;
|
26
32
|
const uid = options?.uid ?? stats.uid;
|
@@ -30,7 +36,7 @@ const createDirectoryItem = async (path, reflectStat, options) => {
|
|
30
36
|
const gname = getUName(options?.gname, stats.gid, rs);
|
31
37
|
return {
|
32
38
|
kind: "directory",
|
33
|
-
path,
|
39
|
+
path: path2,
|
34
40
|
mode,
|
35
41
|
uname,
|
36
42
|
gname,
|
@@ -47,7 +53,7 @@ const createDirectoryItem = async (path, reflectStat, options) => {
|
|
47
53
|
const gname = getUName(options?.gname, void 0, rs);
|
48
54
|
return {
|
49
55
|
kind: "directory",
|
50
|
-
path,
|
56
|
+
path: path2,
|
51
57
|
mode,
|
52
58
|
uname,
|
53
59
|
gname,
|
@@ -57,7 +63,8 @@ const createDirectoryItem = async (path, reflectStat, options) => {
|
|
57
63
|
};
|
58
64
|
}
|
59
65
|
};
|
60
|
-
const createFileItem = async (
|
66
|
+
const createFileItem = async (path2, content, options, signal) => {
|
67
|
+
signal?.throwIfAborted();
|
61
68
|
const mode = options?.mode ?? 420;
|
62
69
|
const uid = options?.uid ?? 0;
|
63
70
|
const gid = options?.gid ?? 0;
|
@@ -66,7 +73,7 @@ const createFileItem = async (path, content, options) => {
|
|
66
73
|
const gname = options?.gname ?? "root";
|
67
74
|
return {
|
68
75
|
kind: "file",
|
69
|
-
path,
|
76
|
+
path: path2,
|
70
77
|
mode,
|
71
78
|
uname,
|
72
79
|
gname,
|
@@ -76,7 +83,7 @@ const createFileItem = async (path, content, options) => {
|
|
76
83
|
content
|
77
84
|
};
|
78
85
|
};
|
79
|
-
const createReadableFileItem = async (
|
86
|
+
const createReadableFileItem = async (path2, readable, options, signal) => {
|
80
87
|
const mode = options?.mode ?? 420;
|
81
88
|
const uid = options?.uid ?? 0;
|
82
89
|
const gid = options?.gid ?? 0;
|
@@ -88,13 +95,14 @@ const createReadableFileItem = async (path, readable, options) => {
|
|
88
95
|
const chunks = [];
|
89
96
|
length = 0;
|
90
97
|
for await (const chunk of readable) {
|
98
|
+
signal?.throwIfAborted();
|
91
99
|
const buffer = getBuffer(chunk);
|
92
100
|
chunks.push(buffer);
|
93
101
|
length += buffer.length;
|
94
102
|
}
|
95
103
|
return {
|
96
104
|
kind: "file",
|
97
|
-
path,
|
105
|
+
path: path2,
|
98
106
|
mode,
|
99
107
|
uname,
|
100
108
|
gname,
|
@@ -104,13 +112,13 @@ const createReadableFileItem = async (path, readable, options) => {
|
|
104
112
|
content: {
|
105
113
|
kind: "readable",
|
106
114
|
length,
|
107
|
-
readable: stream.Readable.from(chunks)
|
115
|
+
readable: stream.Readable.from(chunks, { signal })
|
108
116
|
}
|
109
117
|
};
|
110
118
|
} else {
|
111
119
|
return {
|
112
120
|
kind: "file",
|
113
|
-
path,
|
121
|
+
path: path2,
|
114
122
|
mode,
|
115
123
|
uname,
|
116
124
|
gname,
|
@@ -125,7 +133,7 @@ const createReadableFileItem = async (path, readable, options) => {
|
|
125
133
|
};
|
126
134
|
}
|
127
135
|
};
|
128
|
-
const createGeneratorFileItem = async (
|
136
|
+
const createGeneratorFileItem = async (path2, generator, options, signal) => {
|
129
137
|
const mode = options?.mode ?? 420;
|
130
138
|
const uid = options?.uid ?? 0;
|
131
139
|
const gid = options?.gid ?? 0;
|
@@ -137,13 +145,14 @@ const createGeneratorFileItem = async (path, generator, options) => {
|
|
137
145
|
const chunks = [];
|
138
146
|
length = 0;
|
139
147
|
for await (const chunk of generator) {
|
148
|
+
signal?.throwIfAborted();
|
140
149
|
const buffer = getBuffer(chunk);
|
141
150
|
chunks.push(buffer);
|
142
151
|
length += buffer.length;
|
143
152
|
}
|
144
153
|
return {
|
145
154
|
kind: "file",
|
146
|
-
path,
|
155
|
+
path: path2,
|
147
156
|
mode,
|
148
157
|
uname,
|
149
158
|
gname,
|
@@ -153,13 +162,13 @@ const createGeneratorFileItem = async (path, generator, options) => {
|
|
153
162
|
content: {
|
154
163
|
kind: "readable",
|
155
164
|
length,
|
156
|
-
readable: stream.Readable.from(chunks)
|
165
|
+
readable: stream.Readable.from(chunks, { signal })
|
157
166
|
}
|
158
167
|
};
|
159
168
|
} else {
|
160
169
|
return {
|
161
170
|
kind: "file",
|
162
|
-
path,
|
171
|
+
path: path2,
|
163
172
|
mode,
|
164
173
|
uname,
|
165
174
|
gname,
|
@@ -174,17 +183,18 @@ const createGeneratorFileItem = async (path, generator, options) => {
|
|
174
183
|
};
|
175
184
|
}
|
176
185
|
};
|
177
|
-
const createReadFileItem = async (
|
186
|
+
const createReadFileItem = async (path2, filePath, reflectStat, options, signal) => {
|
178
187
|
const rs = reflectStat ?? "exceptName";
|
188
|
+
signal?.throwIfAborted();
|
179
189
|
const stats = await promises.stat(filePath);
|
180
|
-
const reader = fs.createReadStream(filePath);
|
190
|
+
const reader = fs.createReadStream(filePath, { signal });
|
181
191
|
const mode = options?.mode ?? (rs !== "none" ? stats.mode : void 0);
|
182
192
|
const uid = options?.uid ?? (rs !== "none" ? stats.uid : void 0);
|
183
193
|
const gid = options?.gid ?? (rs !== "none" ? stats.gid : void 0);
|
184
194
|
const date = options?.date ?? (rs !== "none" ? stats.mtime : void 0);
|
185
195
|
const uname = getUName(options?.uname, stats.uid, rs);
|
186
196
|
const gname = getUName(options?.gname, stats.gid, rs);
|
187
|
-
return await createReadableFileItem(
|
197
|
+
return await createReadableFileItem(path2, reader, {
|
188
198
|
length: stats.size,
|
189
199
|
mode,
|
190
200
|
uname,
|
@@ -192,16 +202,85 @@ const createReadFileItem = async (path, filePath, reflectStat, options) => {
|
|
192
202
|
uid,
|
193
203
|
gid,
|
194
204
|
date
|
195
|
-
});
|
205
|
+
}, signal);
|
206
|
+
};
|
207
|
+
const storeReaderToFile = async (reader, path2, signal) => {
|
208
|
+
const writer = fs.createWriteStream(path2, { signal });
|
209
|
+
await promises$1.pipeline(reader, writer, { signal });
|
196
210
|
};
|
197
|
-
const
|
198
|
-
const
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
211
|
+
const getAllFilesInDirectory = async (baseDir, signal) => {
|
212
|
+
const collectFiles = async (currentDir, relativePath) => {
|
213
|
+
signal?.throwIfAborted();
|
214
|
+
try {
|
215
|
+
const entries = await promises.readdir(currentDir, { withFileTypes: true });
|
216
|
+
const result = [];
|
217
|
+
const tasks = entries.map(async (entry) => {
|
218
|
+
signal?.throwIfAborted();
|
219
|
+
const entryRelativePath = path.join(relativePath, entry.name);
|
220
|
+
if (entry.isDirectory()) {
|
221
|
+
const entryFullPath = path.join(currentDir, entry.name);
|
222
|
+
const directoryContents = await collectFiles(entryFullPath, entryRelativePath);
|
223
|
+
return [entryRelativePath, ...directoryContents];
|
224
|
+
} else {
|
225
|
+
return [entryRelativePath];
|
226
|
+
}
|
227
|
+
});
|
228
|
+
const allResults = await Promise.all(tasks);
|
229
|
+
for (const entryResults of allResults) {
|
230
|
+
result.push(...entryResults);
|
231
|
+
}
|
232
|
+
return result;
|
233
|
+
} catch (error) {
|
234
|
+
console.warn(`Warning: Could not read directory ${currentDir}:`, error);
|
235
|
+
return [];
|
236
|
+
}
|
237
|
+
};
|
238
|
+
return await collectFiles(baseDir, "");
|
239
|
+
};
|
240
|
+
const createEntryItemGenerator = async function* (baseDir, relativePaths, reflectStat, signal) {
|
241
|
+
const rs = reflectStat ?? "exceptName";
|
242
|
+
const pathsToProcess = relativePaths ?? await getAllFilesInDirectory(baseDir, signal);
|
243
|
+
for (const relativePath of pathsToProcess) {
|
244
|
+
signal?.throwIfAborted();
|
245
|
+
const fsPath = path.join(baseDir, relativePath);
|
246
|
+
try {
|
247
|
+
signal?.throwIfAborted();
|
248
|
+
const stats = await promises.stat(fsPath);
|
249
|
+
if (stats.isDirectory()) {
|
250
|
+
yield await createDirectoryItem(relativePath, rs, {
|
251
|
+
directoryPath: fsPath
|
252
|
+
}, signal);
|
253
|
+
} else if (stats.isFile()) {
|
254
|
+
yield await createReadFileItem(relativePath, fsPath, rs, void 0, signal);
|
255
|
+
}
|
256
|
+
} catch (error) {
|
257
|
+
console.warn(`Warning: Could not access ${fsPath}:`, error);
|
258
|
+
continue;
|
259
|
+
}
|
260
|
+
}
|
261
|
+
};
|
262
|
+
const extractTo = async (iterator, basePath, signal) => {
|
263
|
+
for await (const entry of iterator) {
|
264
|
+
signal?.throwIfAborted();
|
265
|
+
const targetPath = path.join(basePath, entry.path);
|
266
|
+
if (entry.kind === "directory") {
|
267
|
+
try {
|
268
|
+
signal?.throwIfAborted();
|
269
|
+
await promises.mkdir(targetPath, { recursive: true, mode: entry.mode });
|
270
|
+
} catch (error) {
|
271
|
+
if (error.code !== "EEXIST") {
|
272
|
+
throw error;
|
273
|
+
}
|
274
|
+
}
|
275
|
+
} else if (entry.kind === "file") {
|
276
|
+
const parentDir = path.dirname(targetPath);
|
277
|
+
signal?.throwIfAborted();
|
278
|
+
await promises.mkdir(parentDir, { recursive: true });
|
279
|
+
const fileEntry = entry;
|
280
|
+
const content = await fileEntry.getContent("buffer");
|
281
|
+
await promises.writeFile(targetPath, content, { mode: entry.mode, signal });
|
282
|
+
}
|
283
|
+
}
|
205
284
|
};
|
206
285
|
const utf8ByteLength = (str) => {
|
207
286
|
return Buffer.byteLength(str, "utf8");
|
@@ -219,13 +298,11 @@ const truncateUtf8Safe = (str, maxBytes) => {
|
|
219
298
|
}
|
220
299
|
return str.slice(0, i);
|
221
300
|
};
|
222
|
-
const
|
223
|
-
|
224
|
-
|
225
|
-
if (utf8ByteLength(path) <= MAX_NAME) {
|
226
|
-
return { prefix: "", name: path };
|
301
|
+
const splitPath = (path2) => {
|
302
|
+
if (utf8ByteLength(path2) <= MAX_NAME) {
|
303
|
+
return { prefix: "", name: path2 };
|
227
304
|
}
|
228
|
-
const parts =
|
305
|
+
const parts = path2.split("/");
|
229
306
|
let name = parts.pop() ?? "";
|
230
307
|
let prefix = parts.join("/");
|
231
308
|
if (utf8ByteLength(name) > MAX_NAME) {
|
@@ -249,9 +326,9 @@ const getPaddedBytes = (buffer) => {
|
|
249
326
|
}
|
250
327
|
};
|
251
328
|
const terminatorBytes = Buffer.alloc(1024, 0);
|
252
|
-
const createTarHeader = (type,
|
329
|
+
const createTarHeader = (type, path2, size, mode, uname, gname, uid, gid, date) => {
|
253
330
|
const buffer = Buffer.alloc(512, 0);
|
254
|
-
const { name, prefix } = splitPath(
|
331
|
+
const { name, prefix } = splitPath(path2);
|
255
332
|
buffer.write(name, 0, 100, "utf8");
|
256
333
|
getOctalBytes(mode & 4095, 8).copy(buffer, 100);
|
257
334
|
getOctalBytes(uid, 8).copy(buffer, 108);
|
@@ -301,10 +378,11 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
301
378
|
const totalPaddedContentBytes = getPaddedBytes(contentBytes);
|
302
379
|
yield totalPaddedContentBytes;
|
303
380
|
} else {
|
381
|
+
const content = entryItemContent;
|
304
382
|
const tarHeaderBytes = createTarHeader(
|
305
383
|
"file",
|
306
384
|
entryItem.path,
|
307
|
-
|
385
|
+
content.length,
|
308
386
|
entryItem.mode,
|
309
387
|
entryItem.uname,
|
310
388
|
entryItem.gname,
|
@@ -314,10 +392,10 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
314
392
|
);
|
315
393
|
yield tarHeaderBytes;
|
316
394
|
let position = 0;
|
317
|
-
switch (
|
395
|
+
switch (content.kind) {
|
318
396
|
// Content is a generator
|
319
397
|
case "generator": {
|
320
|
-
for await (const contentBytes of
|
398
|
+
for await (const contentBytes of content.generator) {
|
321
399
|
signal?.throwIfAborted();
|
322
400
|
yield contentBytes;
|
323
401
|
position += contentBytes.length;
|
@@ -326,9 +404,9 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
326
404
|
}
|
327
405
|
// Content is a readable stream
|
328
406
|
case "readable": {
|
329
|
-
for await (const
|
407
|
+
for await (const chunk of content.readable) {
|
330
408
|
signal?.throwIfAborted();
|
331
|
-
const contentBytes = getBuffer(
|
409
|
+
const contentBytes = getBuffer(chunk);
|
332
410
|
yield contentBytes;
|
333
411
|
position += contentBytes.length;
|
334
412
|
}
|
@@ -366,23 +444,280 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
366
444
|
switch (ct) {
|
367
445
|
// No compression
|
368
446
|
case "none": {
|
369
|
-
return stream.Readable.from(entryItemIterator());
|
447
|
+
return stream.Readable.from(entryItemIterator(), { signal });
|
370
448
|
}
|
371
449
|
// Gzip compression
|
372
450
|
case "gzip": {
|
373
451
|
const gzipStream = zlib.createGzip({ level: 9 });
|
374
|
-
const entryItemStream = stream.Readable.from(entryItemIterator());
|
452
|
+
const entryItemStream = stream.Readable.from(entryItemIterator(), { signal });
|
375
453
|
entryItemStream.pipe(gzipStream);
|
376
454
|
return gzipStream;
|
377
455
|
}
|
378
456
|
}
|
379
457
|
};
|
458
|
+
const parseOctalBytes = (buffer, offset, length) => {
|
459
|
+
const str = buffer.subarray(offset, offset + length).toString("ascii").replace(/\0/g, "").trim();
|
460
|
+
return str ? parseInt(str, 8) : 0;
|
461
|
+
};
|
462
|
+
const parseString = (buffer, offset, length) => {
|
463
|
+
return buffer.subarray(offset, offset + length).toString("utf8").replace(/\0/g, "").trim();
|
464
|
+
};
|
465
|
+
const readExactBytes = async (iterator, size, signal) => {
|
466
|
+
const chunks = [];
|
467
|
+
let totalRead = 0;
|
468
|
+
while (totalRead < size) {
|
469
|
+
signal?.throwIfAborted();
|
470
|
+
const { value, done } = await iterator.next();
|
471
|
+
if (done) {
|
472
|
+
if (totalRead === 0) {
|
473
|
+
return void 0;
|
474
|
+
} else {
|
475
|
+
throw new Error(`Unexpected end of stream: expected ${size} bytes, got ${totalRead} bytes`);
|
476
|
+
}
|
477
|
+
}
|
478
|
+
const chunk = getBuffer(value);
|
479
|
+
const needed = size - totalRead;
|
480
|
+
if (chunk.length <= needed) {
|
481
|
+
chunks.push(chunk);
|
482
|
+
totalRead += chunk.length;
|
483
|
+
} else {
|
484
|
+
chunks.push(chunk.subarray(0, needed));
|
485
|
+
await iterator.return?.(chunk.subarray(needed));
|
486
|
+
totalRead = size;
|
487
|
+
}
|
488
|
+
}
|
489
|
+
return Buffer.concat(chunks, size);
|
490
|
+
};
|
491
|
+
const skipExactBytes = async (iterator, size, signal) => {
|
492
|
+
let totalSkipped = 0;
|
493
|
+
while (totalSkipped < size) {
|
494
|
+
signal?.throwIfAborted();
|
495
|
+
const { value, done } = await iterator.next();
|
496
|
+
if (done) {
|
497
|
+
throw new Error(`Unexpected end of stream: expected to skip ${size} bytes, skipped ${totalSkipped} bytes`);
|
498
|
+
}
|
499
|
+
const chunk = getBuffer(value);
|
500
|
+
const needed = size - totalSkipped;
|
501
|
+
if (chunk.length <= needed) {
|
502
|
+
totalSkipped += chunk.length;
|
503
|
+
} else {
|
504
|
+
await iterator.return?.(chunk.subarray(needed));
|
505
|
+
totalSkipped = size;
|
506
|
+
}
|
507
|
+
}
|
508
|
+
};
|
509
|
+
const skipPaddingBytesTo512Boundary = async (iterator, contentSize, signal) => {
|
510
|
+
const padding = (512 - contentSize % 512) % 512;
|
511
|
+
if (padding > 0) {
|
512
|
+
await skipExactBytes(iterator, padding, signal);
|
513
|
+
}
|
514
|
+
};
|
515
|
+
const parseTarHeader = (buffer) => {
|
516
|
+
if (buffer.every((b) => b === 0)) {
|
517
|
+
return void 0;
|
518
|
+
}
|
519
|
+
const name = parseString(buffer, 0, 100);
|
520
|
+
const mode = parseOctalBytes(buffer, 100, 8);
|
521
|
+
const uid = parseOctalBytes(buffer, 108, 8);
|
522
|
+
const gid = parseOctalBytes(buffer, 116, 8);
|
523
|
+
const size = parseOctalBytes(buffer, 124, 12);
|
524
|
+
const mtime = new Date(parseOctalBytes(buffer, 136, 12) * 1e3);
|
525
|
+
const checksum = parseOctalBytes(buffer, 148, 8);
|
526
|
+
const typeflag = parseString(buffer, 156, 1);
|
527
|
+
const magic = parseString(buffer, 257, 6);
|
528
|
+
const uname = parseString(buffer, 265, 32);
|
529
|
+
const gname = parseString(buffer, 297, 32);
|
530
|
+
const prefix = parseString(buffer, 345, 155);
|
531
|
+
if (magic !== "ustar") {
|
532
|
+
throw new Error(`Invalid tar format: magic="${magic}"`);
|
533
|
+
}
|
534
|
+
let calculatedSum = 0;
|
535
|
+
for (let i = 0; i < 512; i++) {
|
536
|
+
if (i >= 148 && i < 156) {
|
537
|
+
calculatedSum += 32;
|
538
|
+
} else {
|
539
|
+
calculatedSum += buffer[i];
|
540
|
+
}
|
541
|
+
}
|
542
|
+
if (calculatedSum !== checksum) {
|
543
|
+
throw new Error(`Invalid checksum: expected ${checksum}, got ${calculatedSum}`);
|
544
|
+
}
|
545
|
+
let path2 = prefix ? `${prefix}/${name}` : name;
|
546
|
+
if (path2.endsWith("/")) {
|
547
|
+
path2 = path2.slice(0, -1);
|
548
|
+
}
|
549
|
+
const kind = typeflag === "5" ? "directory" : "file";
|
550
|
+
return {
|
551
|
+
kind,
|
552
|
+
path: path2,
|
553
|
+
size,
|
554
|
+
mode,
|
555
|
+
uid,
|
556
|
+
gid,
|
557
|
+
mtime,
|
558
|
+
uname: uname || uid.toString(),
|
559
|
+
gname: gname || gid.toString(),
|
560
|
+
checksum,
|
561
|
+
consumed: false
|
562
|
+
};
|
563
|
+
};
|
564
|
+
const createBufferedAsyncIterator = (iterable, signal) => {
|
565
|
+
const buffer = [];
|
566
|
+
const iterator = iterable[Symbol.asyncIterator]();
|
567
|
+
return {
|
568
|
+
next: async () => {
|
569
|
+
signal?.throwIfAborted();
|
570
|
+
if (buffer.length > 0) {
|
571
|
+
return { value: buffer.shift(), done: false };
|
572
|
+
}
|
573
|
+
return iterator.next();
|
574
|
+
},
|
575
|
+
return: async (value) => {
|
576
|
+
if (value !== void 0) {
|
577
|
+
buffer.unshift(value);
|
578
|
+
}
|
579
|
+
return { value: void 0, done: false };
|
580
|
+
}
|
581
|
+
};
|
582
|
+
};
|
583
|
+
const createReadableFromIterator = (iterator, size, signal, consumedRef) => {
|
584
|
+
const generator = async function* () {
|
585
|
+
let remainingBytes = size;
|
586
|
+
while (remainingBytes > 0) {
|
587
|
+
signal?.throwIfAborted();
|
588
|
+
const { value, done } = await iterator.next();
|
589
|
+
if (done) {
|
590
|
+
throw new Error(`Unexpected end of stream: expected ${size} bytes, remaining ${remainingBytes} bytes`);
|
591
|
+
}
|
592
|
+
const chunk = getBuffer(value);
|
593
|
+
if (chunk.length <= remainingBytes) {
|
594
|
+
remainingBytes -= chunk.length;
|
595
|
+
yield chunk;
|
596
|
+
} else {
|
597
|
+
const needed = chunk.subarray(0, remainingBytes);
|
598
|
+
const excess = chunk.subarray(remainingBytes);
|
599
|
+
remainingBytes = 0;
|
600
|
+
await iterator.return?.(excess);
|
601
|
+
yield needed;
|
602
|
+
break;
|
603
|
+
}
|
604
|
+
}
|
605
|
+
await skipPaddingBytesTo512Boundary(iterator, size, signal);
|
606
|
+
consumedRef.consumed = true;
|
607
|
+
};
|
608
|
+
return stream.Readable.from(generator(), { signal });
|
609
|
+
};
|
610
|
+
const createTarExtractor = async function* (readable, compressionType, signal) {
|
611
|
+
const ct = compressionType ?? "none";
|
612
|
+
let inputStream;
|
613
|
+
switch (ct) {
|
614
|
+
case "gzip":
|
615
|
+
const gunzip = zlib.createGunzip();
|
616
|
+
readable.pipe(gunzip);
|
617
|
+
inputStream = gunzip;
|
618
|
+
break;
|
619
|
+
case "none":
|
620
|
+
default:
|
621
|
+
inputStream = readable;
|
622
|
+
break;
|
623
|
+
}
|
624
|
+
const iterator = createBufferedAsyncIterator(inputStream, signal);
|
625
|
+
let header;
|
626
|
+
while (true) {
|
627
|
+
signal?.throwIfAborted();
|
628
|
+
if (header?.kind === "file" && !header.consumed) {
|
629
|
+
await skipExactBytes(iterator, header.size, signal);
|
630
|
+
await skipPaddingBytesTo512Boundary(iterator, header.size, signal);
|
631
|
+
header.consumed = true;
|
632
|
+
}
|
633
|
+
let headerBuffer;
|
634
|
+
try {
|
635
|
+
headerBuffer = await readExactBytes(iterator, 512, signal);
|
636
|
+
} catch (error) {
|
637
|
+
if (error instanceof Error && error.message.includes("Unexpected end of stream")) {
|
638
|
+
throw new Error("Invalid tar format: incomplete header");
|
639
|
+
}
|
640
|
+
throw error;
|
641
|
+
}
|
642
|
+
if (headerBuffer === void 0) {
|
643
|
+
break;
|
644
|
+
}
|
645
|
+
header = parseTarHeader(headerBuffer);
|
646
|
+
if (!header) {
|
647
|
+
const secondBlock = await readExactBytes(iterator, 512, signal);
|
648
|
+
if (secondBlock === void 0 || secondBlock.every((b) => b === 0)) {
|
649
|
+
break;
|
650
|
+
}
|
651
|
+
throw new Error("Invalid tar format: expected terminator block");
|
652
|
+
}
|
653
|
+
if (header.kind === "directory") {
|
654
|
+
yield {
|
655
|
+
kind: "directory",
|
656
|
+
path: header.path,
|
657
|
+
mode: header.mode,
|
658
|
+
uid: header.uid,
|
659
|
+
gid: header.gid,
|
660
|
+
uname: header.uname,
|
661
|
+
gname: header.gname,
|
662
|
+
date: header.mtime
|
663
|
+
};
|
664
|
+
} else {
|
665
|
+
const currentHeader = header;
|
666
|
+
yield {
|
667
|
+
kind: "file",
|
668
|
+
path: currentHeader.path,
|
669
|
+
mode: currentHeader.mode,
|
670
|
+
uid: currentHeader.uid,
|
671
|
+
gid: currentHeader.gid,
|
672
|
+
uname: currentHeader.uname,
|
673
|
+
gname: currentHeader.gname,
|
674
|
+
date: currentHeader.mtime,
|
675
|
+
getContent: async (type) => {
|
676
|
+
if (currentHeader.consumed) {
|
677
|
+
throw new Error("Content has already been consumed. Multiple calls to getContent are not supported.");
|
678
|
+
}
|
679
|
+
switch (type) {
|
680
|
+
// For string
|
681
|
+
case "string": {
|
682
|
+
const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);
|
683
|
+
if (dataBuffer === void 0) {
|
684
|
+
throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);
|
685
|
+
}
|
686
|
+
await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);
|
687
|
+
currentHeader.consumed = true;
|
688
|
+
return dataBuffer.toString("utf8");
|
689
|
+
}
|
690
|
+
// For buffer
|
691
|
+
case "buffer": {
|
692
|
+
const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);
|
693
|
+
if (dataBuffer === void 0) {
|
694
|
+
throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);
|
695
|
+
}
|
696
|
+
await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);
|
697
|
+
currentHeader.consumed = true;
|
698
|
+
return dataBuffer;
|
699
|
+
}
|
700
|
+
// For Readble stream
|
701
|
+
case "readable": {
|
702
|
+
const readable2 = createReadableFromIterator(iterator, currentHeader.size, signal, currentHeader);
|
703
|
+
return readable2;
|
704
|
+
}
|
705
|
+
default:
|
706
|
+
throw new Error(`Unsupported content type: ${type}`);
|
707
|
+
}
|
708
|
+
}
|
709
|
+
};
|
710
|
+
}
|
711
|
+
}
|
712
|
+
};
|
380
713
|
exports.createDirectoryItem = createDirectoryItem;
|
714
|
+
exports.createEntryItemGenerator = createEntryItemGenerator;
|
381
715
|
exports.createFileItem = createFileItem;
|
382
716
|
exports.createGeneratorFileItem = createGeneratorFileItem;
|
383
717
|
exports.createReadFileItem = createReadFileItem;
|
384
718
|
exports.createReadableFileItem = createReadableFileItem;
|
719
|
+
exports.createTarExtractor = createTarExtractor;
|
385
720
|
exports.createTarPacker = createTarPacker;
|
386
|
-
exports.
|
721
|
+
exports.extractTo = extractTo;
|
387
722
|
exports.storeReaderToFile = storeReaderToFile;
|
388
723
|
//# sourceMappingURL=index.cjs.map
|