tar-vern 0.2.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README_pack.md +28 -6
- package/dist/extractor.d.ts +21 -0
- package/dist/extractor.d.ts.map +1 -0
- package/dist/index.cjs +347 -41
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +335 -29
- package/dist/index.js.map +1 -1
- package/dist/packer.d.ts +2 -1
- package/dist/packer.d.ts.map +1 -1
- package/dist/types.d.ts +27 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/utils.d.ts +34 -8
- package/dist/utils.d.ts.map +1 -1
- package/package.json +19 -3
- package/LICENSE +0 -21
- package/README.md +0 -212
- package/dist/generated/packageMetadata.d.ts +0 -16
- package/dist/generated/packageMetadata.d.ts.map +0 -1
package/dist/index.js
CHANGED
@@ -1,24 +1,30 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version: 0.
|
3
|
+
* version: 1.0.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
7
7
|
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
+
* git.commit.hash: def794cc361e3023c973a6dc7719d38fc08ac363
|
8
9
|
*/
|
9
10
|
import { Readable } from "stream";
|
10
|
-
import { createGzip } from "zlib";
|
11
|
+
import { createGzip, createGunzip } from "zlib";
|
11
12
|
import { createReadStream, createWriteStream } from "fs";
|
12
|
-
import { stat } from "fs/promises";
|
13
|
+
import { stat, mkdir, writeFile } from "fs/promises";
|
14
|
+
import { pipeline } from "stream/promises";
|
15
|
+
import { join, dirname } from "path";
|
16
|
+
const MAX_NAME = 100;
|
17
|
+
const MAX_PREFIX = 155;
|
13
18
|
const getUName = (candidateName, candidateId, reflectStat) => {
|
14
19
|
return candidateName ?? (reflectStat === "all" ? candidateId.toString() : "root");
|
15
20
|
};
|
16
21
|
const getBuffer = (data) => {
|
17
22
|
return Buffer.isBuffer(data) ? data : Buffer.from(data, "utf8");
|
18
23
|
};
|
19
|
-
const createDirectoryItem = async (path, reflectStat, options) => {
|
24
|
+
const createDirectoryItem = async (path, reflectStat, options, signal) => {
|
20
25
|
const rs = reflectStat ?? "none";
|
21
26
|
if (rs !== "none" && options?.directoryPath) {
|
27
|
+
signal?.throwIfAborted();
|
22
28
|
const stats = await stat(options.directoryPath);
|
23
29
|
const mode = options?.mode ?? stats.mode;
|
24
30
|
const uid = options?.uid ?? stats.uid;
|
@@ -55,7 +61,8 @@ const createDirectoryItem = async (path, reflectStat, options) => {
|
|
55
61
|
};
|
56
62
|
}
|
57
63
|
};
|
58
|
-
const createFileItem = async (path, content, options) => {
|
64
|
+
const createFileItem = async (path, content, options, signal) => {
|
65
|
+
signal?.throwIfAborted();
|
59
66
|
const mode = options?.mode ?? 420;
|
60
67
|
const uid = options?.uid ?? 0;
|
61
68
|
const gid = options?.gid ?? 0;
|
@@ -74,7 +81,7 @@ const createFileItem = async (path, content, options) => {
|
|
74
81
|
content
|
75
82
|
};
|
76
83
|
};
|
77
|
-
const createReadableFileItem = async (path, readable, options) => {
|
84
|
+
const createReadableFileItem = async (path, readable, options, signal) => {
|
78
85
|
const mode = options?.mode ?? 420;
|
79
86
|
const uid = options?.uid ?? 0;
|
80
87
|
const gid = options?.gid ?? 0;
|
@@ -86,6 +93,7 @@ const createReadableFileItem = async (path, readable, options) => {
|
|
86
93
|
const chunks = [];
|
87
94
|
length = 0;
|
88
95
|
for await (const chunk of readable) {
|
96
|
+
signal?.throwIfAborted();
|
89
97
|
const buffer = getBuffer(chunk);
|
90
98
|
chunks.push(buffer);
|
91
99
|
length += buffer.length;
|
@@ -102,7 +110,7 @@ const createReadableFileItem = async (path, readable, options) => {
|
|
102
110
|
content: {
|
103
111
|
kind: "readable",
|
104
112
|
length,
|
105
|
-
readable: Readable.from(chunks)
|
113
|
+
readable: Readable.from(chunks, { signal })
|
106
114
|
}
|
107
115
|
};
|
108
116
|
} else {
|
@@ -123,7 +131,7 @@ const createReadableFileItem = async (path, readable, options) => {
|
|
123
131
|
};
|
124
132
|
}
|
125
133
|
};
|
126
|
-
const createGeneratorFileItem = async (path, generator, options) => {
|
134
|
+
const createGeneratorFileItem = async (path, generator, options, signal) => {
|
127
135
|
const mode = options?.mode ?? 420;
|
128
136
|
const uid = options?.uid ?? 0;
|
129
137
|
const gid = options?.gid ?? 0;
|
@@ -135,6 +143,7 @@ const createGeneratorFileItem = async (path, generator, options) => {
|
|
135
143
|
const chunks = [];
|
136
144
|
length = 0;
|
137
145
|
for await (const chunk of generator) {
|
146
|
+
signal?.throwIfAborted();
|
138
147
|
const buffer = getBuffer(chunk);
|
139
148
|
chunks.push(buffer);
|
140
149
|
length += buffer.length;
|
@@ -151,7 +160,7 @@ const createGeneratorFileItem = async (path, generator, options) => {
|
|
151
160
|
content: {
|
152
161
|
kind: "readable",
|
153
162
|
length,
|
154
|
-
readable: Readable.from(chunks)
|
163
|
+
readable: Readable.from(chunks, { signal })
|
155
164
|
}
|
156
165
|
};
|
157
166
|
} else {
|
@@ -172,10 +181,11 @@ const createGeneratorFileItem = async (path, generator, options) => {
|
|
172
181
|
};
|
173
182
|
}
|
174
183
|
};
|
175
|
-
const createReadFileItem = async (path, filePath, reflectStat, options) => {
|
184
|
+
const createReadFileItem = async (path, filePath, reflectStat, options, signal) => {
|
176
185
|
const rs = reflectStat ?? "exceptName";
|
186
|
+
signal?.throwIfAborted();
|
177
187
|
const stats = await stat(filePath);
|
178
|
-
const reader = createReadStream(filePath);
|
188
|
+
const reader = createReadStream(filePath, { signal });
|
179
189
|
const mode = options?.mode ?? (rs !== "none" ? stats.mode : void 0);
|
180
190
|
const uid = options?.uid ?? (rs !== "none" ? stats.uid : void 0);
|
181
191
|
const gid = options?.gid ?? (rs !== "none" ? stats.gid : void 0);
|
@@ -190,15 +200,55 @@ const createReadFileItem = async (path, filePath, reflectStat, options) => {
|
|
190
200
|
uid,
|
191
201
|
gid,
|
192
202
|
date
|
193
|
-
});
|
203
|
+
}, signal);
|
204
|
+
};
|
205
|
+
const storeReaderToFile = async (reader, path, signal) => {
|
206
|
+
const writer = createWriteStream(path, { signal });
|
207
|
+
await pipeline(reader, writer, { signal });
|
208
|
+
};
|
209
|
+
const createEntryItemGenerator = async function* (baseDir, relativePaths, reflectStat, signal) {
|
210
|
+
const rs = reflectStat ?? "exceptName";
|
211
|
+
for (const relativePath of relativePaths) {
|
212
|
+
signal?.throwIfAborted();
|
213
|
+
const fsPath = join(baseDir, relativePath);
|
214
|
+
try {
|
215
|
+
signal?.throwIfAborted();
|
216
|
+
const stats = await stat(fsPath);
|
217
|
+
if (stats.isDirectory()) {
|
218
|
+
yield await createDirectoryItem(relativePath, rs, {
|
219
|
+
directoryPath: fsPath
|
220
|
+
}, signal);
|
221
|
+
} else if (stats.isFile()) {
|
222
|
+
yield await createReadFileItem(relativePath, fsPath, rs, void 0, signal);
|
223
|
+
}
|
224
|
+
} catch (error) {
|
225
|
+
console.warn(`Warning: Could not access ${fsPath}:`, error);
|
226
|
+
continue;
|
227
|
+
}
|
228
|
+
}
|
194
229
|
};
|
195
|
-
const
|
196
|
-
const
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
230
|
+
const extractTo = async (iterator, basePath, signal) => {
|
231
|
+
for await (const entry of iterator) {
|
232
|
+
signal?.throwIfAborted();
|
233
|
+
const targetPath = join(basePath, entry.path);
|
234
|
+
if (entry.kind === "directory") {
|
235
|
+
try {
|
236
|
+
signal?.throwIfAborted();
|
237
|
+
await mkdir(targetPath, { recursive: true, mode: entry.mode });
|
238
|
+
} catch (error) {
|
239
|
+
if (error.code !== "EEXIST") {
|
240
|
+
throw error;
|
241
|
+
}
|
242
|
+
}
|
243
|
+
} else if (entry.kind === "file") {
|
244
|
+
const parentDir = dirname(targetPath);
|
245
|
+
signal?.throwIfAborted();
|
246
|
+
await mkdir(parentDir, { recursive: true });
|
247
|
+
const fileEntry = entry;
|
248
|
+
const content = await fileEntry.getContent("buffer");
|
249
|
+
await writeFile(targetPath, content, { mode: entry.mode, signal });
|
250
|
+
}
|
251
|
+
}
|
202
252
|
};
|
203
253
|
const utf8ByteLength = (str) => {
|
204
254
|
return Buffer.byteLength(str, "utf8");
|
@@ -216,8 +266,6 @@ const truncateUtf8Safe = (str, maxBytes) => {
|
|
216
266
|
}
|
217
267
|
return str.slice(0, i);
|
218
268
|
};
|
219
|
-
const MAX_NAME = 100;
|
220
|
-
const MAX_PREFIX = 155;
|
221
269
|
const splitPath = (path) => {
|
222
270
|
if (utf8ByteLength(path) <= MAX_NAME) {
|
223
271
|
return { prefix: "", name: path };
|
@@ -298,10 +346,11 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
298
346
|
const totalPaddedContentBytes = getPaddedBytes(contentBytes);
|
299
347
|
yield totalPaddedContentBytes;
|
300
348
|
} else {
|
349
|
+
const content = entryItemContent;
|
301
350
|
const tarHeaderBytes = createTarHeader(
|
302
351
|
"file",
|
303
352
|
entryItem.path,
|
304
|
-
|
353
|
+
content.length,
|
305
354
|
entryItem.mode,
|
306
355
|
entryItem.uname,
|
307
356
|
entryItem.gname,
|
@@ -311,10 +360,10 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
311
360
|
);
|
312
361
|
yield tarHeaderBytes;
|
313
362
|
let position = 0;
|
314
|
-
switch (
|
363
|
+
switch (content.kind) {
|
315
364
|
// Content is a generator
|
316
365
|
case "generator": {
|
317
|
-
for await (const contentBytes of
|
366
|
+
for await (const contentBytes of content.generator) {
|
318
367
|
signal?.throwIfAborted();
|
319
368
|
yield contentBytes;
|
320
369
|
position += contentBytes.length;
|
@@ -323,9 +372,9 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
323
372
|
}
|
324
373
|
// Content is a readable stream
|
325
374
|
case "readable": {
|
326
|
-
for await (const
|
375
|
+
for await (const chunk of content.readable) {
|
327
376
|
signal?.throwIfAborted();
|
328
|
-
const contentBytes = getBuffer(
|
377
|
+
const contentBytes = getBuffer(chunk);
|
329
378
|
yield contentBytes;
|
330
379
|
position += contentBytes.length;
|
331
380
|
}
|
@@ -363,25 +412,282 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
363
412
|
switch (ct) {
|
364
413
|
// No compression
|
365
414
|
case "none": {
|
366
|
-
return Readable.from(entryItemIterator());
|
415
|
+
return Readable.from(entryItemIterator(), { signal });
|
367
416
|
}
|
368
417
|
// Gzip compression
|
369
418
|
case "gzip": {
|
370
419
|
const gzipStream = createGzip({ level: 9 });
|
371
|
-
const entryItemStream = Readable.from(entryItemIterator());
|
420
|
+
const entryItemStream = Readable.from(entryItemIterator(), { signal });
|
372
421
|
entryItemStream.pipe(gzipStream);
|
373
422
|
return gzipStream;
|
374
423
|
}
|
375
424
|
}
|
376
425
|
};
|
426
|
+
const parseOctalBytes = (buffer, offset, length) => {
|
427
|
+
const str = buffer.subarray(offset, offset + length).toString("ascii").replace(/\0/g, "").trim();
|
428
|
+
return str ? parseInt(str, 8) : 0;
|
429
|
+
};
|
430
|
+
const parseString = (buffer, offset, length) => {
|
431
|
+
return buffer.subarray(offset, offset + length).toString("utf8").replace(/\0/g, "").trim();
|
432
|
+
};
|
433
|
+
const readExactBytes = async (iterator, size, signal) => {
|
434
|
+
const chunks = [];
|
435
|
+
let totalRead = 0;
|
436
|
+
while (totalRead < size) {
|
437
|
+
signal?.throwIfAborted();
|
438
|
+
const { value, done } = await iterator.next();
|
439
|
+
if (done) {
|
440
|
+
if (totalRead === 0) {
|
441
|
+
return void 0;
|
442
|
+
} else {
|
443
|
+
throw new Error(`Unexpected end of stream: expected ${size} bytes, got ${totalRead} bytes`);
|
444
|
+
}
|
445
|
+
}
|
446
|
+
const chunk = getBuffer(value);
|
447
|
+
const needed = size - totalRead;
|
448
|
+
if (chunk.length <= needed) {
|
449
|
+
chunks.push(chunk);
|
450
|
+
totalRead += chunk.length;
|
451
|
+
} else {
|
452
|
+
chunks.push(chunk.subarray(0, needed));
|
453
|
+
await iterator.return?.(chunk.subarray(needed));
|
454
|
+
totalRead = size;
|
455
|
+
}
|
456
|
+
}
|
457
|
+
return Buffer.concat(chunks, size);
|
458
|
+
};
|
459
|
+
const skipExactBytes = async (iterator, size, signal) => {
|
460
|
+
let totalSkipped = 0;
|
461
|
+
while (totalSkipped < size) {
|
462
|
+
signal?.throwIfAborted();
|
463
|
+
const { value, done } = await iterator.next();
|
464
|
+
if (done) {
|
465
|
+
throw new Error(`Unexpected end of stream: expected to skip ${size} bytes, skipped ${totalSkipped} bytes`);
|
466
|
+
}
|
467
|
+
const chunk = getBuffer(value);
|
468
|
+
const needed = size - totalSkipped;
|
469
|
+
if (chunk.length <= needed) {
|
470
|
+
totalSkipped += chunk.length;
|
471
|
+
} else {
|
472
|
+
await iterator.return?.(chunk.subarray(needed));
|
473
|
+
totalSkipped = size;
|
474
|
+
}
|
475
|
+
}
|
476
|
+
};
|
477
|
+
const skipPaddingBytesTo512Boundary = async (iterator, contentSize, signal) => {
|
478
|
+
const padding = (512 - contentSize % 512) % 512;
|
479
|
+
if (padding > 0) {
|
480
|
+
await skipExactBytes(iterator, padding, signal);
|
481
|
+
}
|
482
|
+
};
|
483
|
+
const parseTarHeader = (buffer) => {
|
484
|
+
if (buffer.every((b) => b === 0)) {
|
485
|
+
return void 0;
|
486
|
+
}
|
487
|
+
const name = parseString(buffer, 0, 100);
|
488
|
+
const mode = parseOctalBytes(buffer, 100, 8);
|
489
|
+
const uid = parseOctalBytes(buffer, 108, 8);
|
490
|
+
const gid = parseOctalBytes(buffer, 116, 8);
|
491
|
+
const size = parseOctalBytes(buffer, 124, 12);
|
492
|
+
const mtime = new Date(parseOctalBytes(buffer, 136, 12) * 1e3);
|
493
|
+
const checksum = parseOctalBytes(buffer, 148, 8);
|
494
|
+
const typeflag = parseString(buffer, 156, 1);
|
495
|
+
const magic = parseString(buffer, 257, 6);
|
496
|
+
const uname = parseString(buffer, 265, 32);
|
497
|
+
const gname = parseString(buffer, 297, 32);
|
498
|
+
const prefix = parseString(buffer, 345, 155);
|
499
|
+
if (magic !== "ustar") {
|
500
|
+
throw new Error(`Invalid tar format: magic="${magic}"`);
|
501
|
+
}
|
502
|
+
let calculatedSum = 0;
|
503
|
+
for (let i = 0; i < 512; i++) {
|
504
|
+
if (i >= 148 && i < 156) {
|
505
|
+
calculatedSum += 32;
|
506
|
+
} else {
|
507
|
+
calculatedSum += buffer[i];
|
508
|
+
}
|
509
|
+
}
|
510
|
+
if (calculatedSum !== checksum) {
|
511
|
+
throw new Error(`Invalid checksum: expected ${checksum}, got ${calculatedSum}`);
|
512
|
+
}
|
513
|
+
let path = prefix ? `${prefix}/${name}` : name;
|
514
|
+
if (path.endsWith("/")) {
|
515
|
+
path = path.slice(0, -1);
|
516
|
+
}
|
517
|
+
const kind = typeflag === "5" ? "directory" : "file";
|
518
|
+
return {
|
519
|
+
kind,
|
520
|
+
path,
|
521
|
+
size,
|
522
|
+
mode,
|
523
|
+
uid,
|
524
|
+
gid,
|
525
|
+
mtime,
|
526
|
+
uname: uname || uid.toString(),
|
527
|
+
gname: gname || gid.toString(),
|
528
|
+
checksum,
|
529
|
+
consumed: false
|
530
|
+
};
|
531
|
+
};
|
532
|
+
const createBufferedAsyncIterator = (iterable, signal) => {
|
533
|
+
const buffer = [];
|
534
|
+
const iterator = iterable[Symbol.asyncIterator]();
|
535
|
+
return {
|
536
|
+
next: async () => {
|
537
|
+
signal?.throwIfAborted();
|
538
|
+
if (buffer.length > 0) {
|
539
|
+
return { value: buffer.shift(), done: false };
|
540
|
+
}
|
541
|
+
return iterator.next();
|
542
|
+
},
|
543
|
+
return: async (value) => {
|
544
|
+
if (value !== void 0) {
|
545
|
+
buffer.unshift(value);
|
546
|
+
}
|
547
|
+
return { value: void 0, done: false };
|
548
|
+
}
|
549
|
+
};
|
550
|
+
};
|
551
|
+
const createReadableFromIterator = (iterator, size, signal, consumedRef) => {
|
552
|
+
const generator = async function* () {
|
553
|
+
let remainingBytes = size;
|
554
|
+
while (remainingBytes > 0) {
|
555
|
+
signal?.throwIfAborted();
|
556
|
+
const { value, done } = await iterator.next();
|
557
|
+
if (done) {
|
558
|
+
throw new Error(`Unexpected end of stream: expected ${size} bytes, remaining ${remainingBytes} bytes`);
|
559
|
+
}
|
560
|
+
const chunk = getBuffer(value);
|
561
|
+
if (chunk.length <= remainingBytes) {
|
562
|
+
remainingBytes -= chunk.length;
|
563
|
+
yield chunk;
|
564
|
+
} else {
|
565
|
+
const needed = chunk.subarray(0, remainingBytes);
|
566
|
+
const excess = chunk.subarray(remainingBytes);
|
567
|
+
remainingBytes = 0;
|
568
|
+
await iterator.return?.(excess);
|
569
|
+
yield needed;
|
570
|
+
break;
|
571
|
+
}
|
572
|
+
}
|
573
|
+
await skipPaddingBytesTo512Boundary(iterator, size, signal);
|
574
|
+
consumedRef.consumed = true;
|
575
|
+
};
|
576
|
+
return Readable.from(generator(), { signal });
|
577
|
+
};
|
578
|
+
const createTarExtractor = async function* (readable, compressionType, signal) {
|
579
|
+
const ct = compressionType ?? "none";
|
580
|
+
let inputStream;
|
581
|
+
switch (ct) {
|
582
|
+
case "gzip":
|
583
|
+
const gunzip = createGunzip();
|
584
|
+
readable.pipe(gunzip);
|
585
|
+
inputStream = gunzip;
|
586
|
+
break;
|
587
|
+
case "none":
|
588
|
+
default:
|
589
|
+
inputStream = readable;
|
590
|
+
break;
|
591
|
+
}
|
592
|
+
const iterator = createBufferedAsyncIterator(inputStream, signal);
|
593
|
+
let header;
|
594
|
+
while (true) {
|
595
|
+
signal?.throwIfAborted();
|
596
|
+
if (header?.kind === "file" && !header.consumed) {
|
597
|
+
await skipExactBytes(iterator, header.size, signal);
|
598
|
+
await skipPaddingBytesTo512Boundary(iterator, header.size, signal);
|
599
|
+
header.consumed = true;
|
600
|
+
}
|
601
|
+
let headerBuffer;
|
602
|
+
try {
|
603
|
+
headerBuffer = await readExactBytes(iterator, 512, signal);
|
604
|
+
} catch (error) {
|
605
|
+
if (error instanceof Error && error.message.includes("Unexpected end of stream")) {
|
606
|
+
throw new Error("Invalid tar format: incomplete header");
|
607
|
+
}
|
608
|
+
throw error;
|
609
|
+
}
|
610
|
+
if (headerBuffer === void 0) {
|
611
|
+
break;
|
612
|
+
}
|
613
|
+
header = parseTarHeader(headerBuffer);
|
614
|
+
if (!header) {
|
615
|
+
const secondBlock = await readExactBytes(iterator, 512, signal);
|
616
|
+
if (secondBlock === void 0 || secondBlock.every((b) => b === 0)) {
|
617
|
+
break;
|
618
|
+
}
|
619
|
+
throw new Error("Invalid tar format: expected terminator block");
|
620
|
+
}
|
621
|
+
if (header.kind === "directory") {
|
622
|
+
yield {
|
623
|
+
kind: "directory",
|
624
|
+
path: header.path,
|
625
|
+
mode: header.mode,
|
626
|
+
uid: header.uid,
|
627
|
+
gid: header.gid,
|
628
|
+
uname: header.uname,
|
629
|
+
gname: header.gname,
|
630
|
+
date: header.mtime
|
631
|
+
};
|
632
|
+
} else {
|
633
|
+
const currentHeader = header;
|
634
|
+
yield {
|
635
|
+
kind: "file",
|
636
|
+
path: currentHeader.path,
|
637
|
+
mode: currentHeader.mode,
|
638
|
+
uid: currentHeader.uid,
|
639
|
+
gid: currentHeader.gid,
|
640
|
+
uname: currentHeader.uname,
|
641
|
+
gname: currentHeader.gname,
|
642
|
+
date: currentHeader.mtime,
|
643
|
+
getContent: async (type) => {
|
644
|
+
if (currentHeader.consumed) {
|
645
|
+
throw new Error("Content has already been consumed. Multiple calls to getContent are not supported.");
|
646
|
+
}
|
647
|
+
switch (type) {
|
648
|
+
// For string
|
649
|
+
case "string": {
|
650
|
+
const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);
|
651
|
+
if (dataBuffer === void 0) {
|
652
|
+
throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);
|
653
|
+
}
|
654
|
+
await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);
|
655
|
+
currentHeader.consumed = true;
|
656
|
+
return dataBuffer.toString("utf8");
|
657
|
+
}
|
658
|
+
// For buffer
|
659
|
+
case "buffer": {
|
660
|
+
const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);
|
661
|
+
if (dataBuffer === void 0) {
|
662
|
+
throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);
|
663
|
+
}
|
664
|
+
await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);
|
665
|
+
currentHeader.consumed = true;
|
666
|
+
return dataBuffer;
|
667
|
+
}
|
668
|
+
// For Readble stream
|
669
|
+
case "readable": {
|
670
|
+
const readable2 = createReadableFromIterator(iterator, currentHeader.size, signal, currentHeader);
|
671
|
+
return readable2;
|
672
|
+
}
|
673
|
+
default:
|
674
|
+
throw new Error(`Unsupported content type: ${type}`);
|
675
|
+
}
|
676
|
+
}
|
677
|
+
};
|
678
|
+
}
|
679
|
+
}
|
680
|
+
};
|
377
681
|
export {
|
378
682
|
createDirectoryItem,
|
683
|
+
createEntryItemGenerator,
|
379
684
|
createFileItem,
|
380
685
|
createGeneratorFileItem,
|
381
686
|
createReadFileItem,
|
382
687
|
createReadableFileItem,
|
688
|
+
createTarExtractor,
|
383
689
|
createTarPacker,
|
384
|
-
|
690
|
+
extractTo,
|
385
691
|
storeReaderToFile
|
386
692
|
};
|
387
693
|
//# sourceMappingURL=index.js.map
|