tar-vern 0.3.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README_pack.md +28 -6
- package/dist/extractor.d.ts +21 -0
- package/dist/extractor.d.ts.map +1 -0
- package/dist/index.cjs +347 -42
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +335 -30
- package/dist/index.js.map +1 -1
- package/dist/packer.d.ts +2 -1
- package/dist/packer.d.ts.map +1 -1
- package/dist/types.d.ts +27 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/utils.d.ts +34 -8
- package/dist/utils.d.ts.map +1 -1
- package/package.json +19 -3
- package/LICENSE +0 -21
- package/README.md +0 -212
- package/dist/generated/packageMetadata.d.ts +0 -16
- package/dist/generated/packageMetadata.d.ts.map +0 -1
package/README_pack.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
Tape archiver (tar) library for Typescript implementation.
|
4
4
|
|
5
|
-
[](https://www.repostatus.org/#active)
|
6
6
|
[](https://opensource.org/licenses/MIT)
|
7
7
|
|
8
8
|
----
|
@@ -27,15 +27,15 @@ npm install tar-vern
|
|
27
27
|
|
28
28
|
----
|
29
29
|
|
30
|
-
##
|
31
|
-
|
32
|
-
### Minimum example
|
30
|
+
## Minimal sample code
|
33
31
|
|
34
32
|
tar-vern supplies file and directory information to pack through "TypeScript async generator."
|
35
33
|
This allows you to specify pack data with very concise code.
|
36
34
|
|
37
35
|
```typescript
|
38
|
-
import {
|
36
|
+
import {
|
37
|
+
createTarPacker, storeReaderToFile,
|
38
|
+
createFileItem, createDirectoryItem } from 'tar-vern';
|
39
39
|
|
40
40
|
// Create an async generator for tar entries
|
41
41
|
const itemGenerator = async function*() {
|
@@ -51,11 +51,33 @@ const itemGenerator = async function*() {
|
|
51
51
|
// (Make your own entries with yield expression...)
|
52
52
|
};
|
53
53
|
|
54
|
-
// Create
|
54
|
+
// Create GZipped tar stream and write to file
|
55
55
|
const packer = createTarPacker(itemGenerator(), 'gzip');
|
56
56
|
await storeReaderToFile(packer, 'archive.tar.gz'); // Use helper to awaitable
|
57
57
|
```
|
58
58
|
|
59
|
+
tar-vern provides tar extraction through async generator too, allowing you to process entries as they are extracted from the tar archive.
|
60
|
+
|
61
|
+
```typescript
|
62
|
+
import { createReadStream } from 'fs';
|
63
|
+
import { createTarExtractor } from 'tar-vern';
|
64
|
+
|
65
|
+
// Read GZipped tar file and extract entries
|
66
|
+
const readableStream = createReadStream('archive.tar.gz');
|
67
|
+
|
68
|
+
for await (const extractedItem of createTarExtractor(readableStream), 'gzip') {
|
69
|
+
if (extractedItem.kind === 'file') {
|
70
|
+
console.log(`File: ${extractedItem.path}`);
|
71
|
+
|
72
|
+
// Get content as string or buffer
|
73
|
+
const content = await extractedItem.getContent('string');
|
74
|
+
console.log(`Content: ${content}`);
|
75
|
+
} else {
|
76
|
+
console.log(`Directory: ${extractedItem.path}`);
|
77
|
+
}
|
78
|
+
}
|
79
|
+
```
|
80
|
+
|
59
81
|
----
|
60
82
|
|
61
83
|
For more information, [see repository documents](http://github.com/kekyo/tar-vern/).
|
@@ -0,0 +1,21 @@
|
|
1
|
+
/*!
|
2
|
+
* name: tar-vern
|
3
|
+
* version: 1.0.0
|
4
|
+
* description: Tape archiver library for Typescript
|
5
|
+
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
|
+
* license: MIT
|
7
|
+
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
+
* git.commit.hash: def794cc361e3023c973a6dc7719d38fc08ac363
|
9
|
+
*/
|
10
|
+
|
11
|
+
import { Readable } from 'stream';
|
12
|
+
import { CompressionTypes, ExtractedEntryItem } from './types';
|
13
|
+
/**
|
14
|
+
* Create a tar extractor
|
15
|
+
* @param readable - The readable stream containing tar data
|
16
|
+
* @param compressionType - The compression type (default: 'none')
|
17
|
+
* @param signal - The abort signal
|
18
|
+
* @returns Async generator of entry items
|
19
|
+
*/
|
20
|
+
export declare const createTarExtractor: (readable: Readable, compressionType?: CompressionTypes, signal?: AbortSignal) => AsyncGenerator<ExtractedEntryItem, void, unknown>;
|
21
|
+
//# sourceMappingURL=extractor.d.ts.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"extractor.d.ts","sourceRoot":"","sources":["../src/extractor.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAElC,OAAO,EAAE,gBAAgB,EAA0B,kBAAkB,EAAqB,MAAM,SAAS,CAAC;AAmS1G;;;;;;GAMG;AACH,eAAO,MAAM,kBAAkB,GAC7B,UAAU,QAAQ,EAClB,kBAAkB,gBAAgB,EAClC,SAAS,WAAW,KAAG,cAAc,CAAC,kBAAkB,EAAE,IAAI,EAAE,OAAO,CAyIxE,CAAC"}
|
package/dist/index.cjs
CHANGED
@@ -1,10 +1,11 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version: 0.
|
3
|
+
* version: 1.0.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
7
7
|
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
+
* git.commit.hash: def794cc361e3023c973a6dc7719d38fc08ac363
|
8
9
|
*/
|
9
10
|
"use strict";
|
10
11
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
@@ -12,15 +13,20 @@ const stream = require("stream");
|
|
12
13
|
const zlib = require("zlib");
|
13
14
|
const fs = require("fs");
|
14
15
|
const promises = require("fs/promises");
|
16
|
+
const promises$1 = require("stream/promises");
|
17
|
+
const path = require("path");
|
18
|
+
const MAX_NAME = 100;
|
19
|
+
const MAX_PREFIX = 155;
|
15
20
|
const getUName = (candidateName, candidateId, reflectStat) => {
|
16
21
|
return candidateName ?? (reflectStat === "all" ? candidateId.toString() : "root");
|
17
22
|
};
|
18
23
|
const getBuffer = (data) => {
|
19
24
|
return Buffer.isBuffer(data) ? data : Buffer.from(data, "utf8");
|
20
25
|
};
|
21
|
-
const createDirectoryItem = async (
|
26
|
+
const createDirectoryItem = async (path2, reflectStat, options, signal) => {
|
22
27
|
const rs = reflectStat ?? "none";
|
23
28
|
if (rs !== "none" && options?.directoryPath) {
|
29
|
+
signal?.throwIfAborted();
|
24
30
|
const stats = await promises.stat(options.directoryPath);
|
25
31
|
const mode = options?.mode ?? stats.mode;
|
26
32
|
const uid = options?.uid ?? stats.uid;
|
@@ -30,7 +36,7 @@ const createDirectoryItem = async (path, reflectStat, options) => {
|
|
30
36
|
const gname = getUName(options?.gname, stats.gid, rs);
|
31
37
|
return {
|
32
38
|
kind: "directory",
|
33
|
-
path,
|
39
|
+
path: path2,
|
34
40
|
mode,
|
35
41
|
uname,
|
36
42
|
gname,
|
@@ -47,7 +53,7 @@ const createDirectoryItem = async (path, reflectStat, options) => {
|
|
47
53
|
const gname = getUName(options?.gname, void 0, rs);
|
48
54
|
return {
|
49
55
|
kind: "directory",
|
50
|
-
path,
|
56
|
+
path: path2,
|
51
57
|
mode,
|
52
58
|
uname,
|
53
59
|
gname,
|
@@ -57,7 +63,8 @@ const createDirectoryItem = async (path, reflectStat, options) => {
|
|
57
63
|
};
|
58
64
|
}
|
59
65
|
};
|
60
|
-
const createFileItem = async (
|
66
|
+
const createFileItem = async (path2, content, options, signal) => {
|
67
|
+
signal?.throwIfAborted();
|
61
68
|
const mode = options?.mode ?? 420;
|
62
69
|
const uid = options?.uid ?? 0;
|
63
70
|
const gid = options?.gid ?? 0;
|
@@ -66,7 +73,7 @@ const createFileItem = async (path, content, options) => {
|
|
66
73
|
const gname = options?.gname ?? "root";
|
67
74
|
return {
|
68
75
|
kind: "file",
|
69
|
-
path,
|
76
|
+
path: path2,
|
70
77
|
mode,
|
71
78
|
uname,
|
72
79
|
gname,
|
@@ -76,7 +83,7 @@ const createFileItem = async (path, content, options) => {
|
|
76
83
|
content
|
77
84
|
};
|
78
85
|
};
|
79
|
-
const createReadableFileItem = async (
|
86
|
+
const createReadableFileItem = async (path2, readable, options, signal) => {
|
80
87
|
const mode = options?.mode ?? 420;
|
81
88
|
const uid = options?.uid ?? 0;
|
82
89
|
const gid = options?.gid ?? 0;
|
@@ -88,13 +95,14 @@ const createReadableFileItem = async (path, readable, options) => {
|
|
88
95
|
const chunks = [];
|
89
96
|
length = 0;
|
90
97
|
for await (const chunk of readable) {
|
98
|
+
signal?.throwIfAborted();
|
91
99
|
const buffer = getBuffer(chunk);
|
92
100
|
chunks.push(buffer);
|
93
101
|
length += buffer.length;
|
94
102
|
}
|
95
103
|
return {
|
96
104
|
kind: "file",
|
97
|
-
path,
|
105
|
+
path: path2,
|
98
106
|
mode,
|
99
107
|
uname,
|
100
108
|
gname,
|
@@ -104,13 +112,13 @@ const createReadableFileItem = async (path, readable, options) => {
|
|
104
112
|
content: {
|
105
113
|
kind: "readable",
|
106
114
|
length,
|
107
|
-
readable: stream.Readable.from(chunks)
|
115
|
+
readable: stream.Readable.from(chunks, { signal })
|
108
116
|
}
|
109
117
|
};
|
110
118
|
} else {
|
111
119
|
return {
|
112
120
|
kind: "file",
|
113
|
-
path,
|
121
|
+
path: path2,
|
114
122
|
mode,
|
115
123
|
uname,
|
116
124
|
gname,
|
@@ -125,7 +133,7 @@ const createReadableFileItem = async (path, readable, options) => {
|
|
125
133
|
};
|
126
134
|
}
|
127
135
|
};
|
128
|
-
const createGeneratorFileItem = async (
|
136
|
+
const createGeneratorFileItem = async (path2, generator, options, signal) => {
|
129
137
|
const mode = options?.mode ?? 420;
|
130
138
|
const uid = options?.uid ?? 0;
|
131
139
|
const gid = options?.gid ?? 0;
|
@@ -137,13 +145,14 @@ const createGeneratorFileItem = async (path, generator, options) => {
|
|
137
145
|
const chunks = [];
|
138
146
|
length = 0;
|
139
147
|
for await (const chunk of generator) {
|
148
|
+
signal?.throwIfAborted();
|
140
149
|
const buffer = getBuffer(chunk);
|
141
150
|
chunks.push(buffer);
|
142
151
|
length += buffer.length;
|
143
152
|
}
|
144
153
|
return {
|
145
154
|
kind: "file",
|
146
|
-
path,
|
155
|
+
path: path2,
|
147
156
|
mode,
|
148
157
|
uname,
|
149
158
|
gname,
|
@@ -153,13 +162,13 @@ const createGeneratorFileItem = async (path, generator, options) => {
|
|
153
162
|
content: {
|
154
163
|
kind: "readable",
|
155
164
|
length,
|
156
|
-
readable: stream.Readable.from(chunks)
|
165
|
+
readable: stream.Readable.from(chunks, { signal })
|
157
166
|
}
|
158
167
|
};
|
159
168
|
} else {
|
160
169
|
return {
|
161
170
|
kind: "file",
|
162
|
-
path,
|
171
|
+
path: path2,
|
163
172
|
mode,
|
164
173
|
uname,
|
165
174
|
gname,
|
@@ -174,17 +183,18 @@ const createGeneratorFileItem = async (path, generator, options) => {
|
|
174
183
|
};
|
175
184
|
}
|
176
185
|
};
|
177
|
-
const createReadFileItem = async (
|
186
|
+
const createReadFileItem = async (path2, filePath, reflectStat, options, signal) => {
|
178
187
|
const rs = reflectStat ?? "exceptName";
|
188
|
+
signal?.throwIfAborted();
|
179
189
|
const stats = await promises.stat(filePath);
|
180
|
-
const reader = fs.createReadStream(filePath);
|
190
|
+
const reader = fs.createReadStream(filePath, { signal });
|
181
191
|
const mode = options?.mode ?? (rs !== "none" ? stats.mode : void 0);
|
182
192
|
const uid = options?.uid ?? (rs !== "none" ? stats.uid : void 0);
|
183
193
|
const gid = options?.gid ?? (rs !== "none" ? stats.gid : void 0);
|
184
194
|
const date = options?.date ?? (rs !== "none" ? stats.mtime : void 0);
|
185
195
|
const uname = getUName(options?.uname, stats.uid, rs);
|
186
196
|
const gname = getUName(options?.gname, stats.gid, rs);
|
187
|
-
return await createReadableFileItem(
|
197
|
+
return await createReadableFileItem(path2, reader, {
|
188
198
|
length: stats.size,
|
189
199
|
mode,
|
190
200
|
uname,
|
@@ -192,16 +202,55 @@ const createReadFileItem = async (path, filePath, reflectStat, options) => {
|
|
192
202
|
uid,
|
193
203
|
gid,
|
194
204
|
date
|
195
|
-
});
|
205
|
+
}, signal);
|
206
|
+
};
|
207
|
+
const storeReaderToFile = async (reader, path2, signal) => {
|
208
|
+
const writer = fs.createWriteStream(path2, { signal });
|
209
|
+
await promises$1.pipeline(reader, writer, { signal });
|
210
|
+
};
|
211
|
+
const createEntryItemGenerator = async function* (baseDir, relativePaths, reflectStat, signal) {
|
212
|
+
const rs = reflectStat ?? "exceptName";
|
213
|
+
for (const relativePath of relativePaths) {
|
214
|
+
signal?.throwIfAborted();
|
215
|
+
const fsPath = path.join(baseDir, relativePath);
|
216
|
+
try {
|
217
|
+
signal?.throwIfAborted();
|
218
|
+
const stats = await promises.stat(fsPath);
|
219
|
+
if (stats.isDirectory()) {
|
220
|
+
yield await createDirectoryItem(relativePath, rs, {
|
221
|
+
directoryPath: fsPath
|
222
|
+
}, signal);
|
223
|
+
} else if (stats.isFile()) {
|
224
|
+
yield await createReadFileItem(relativePath, fsPath, rs, void 0, signal);
|
225
|
+
}
|
226
|
+
} catch (error) {
|
227
|
+
console.warn(`Warning: Could not access ${fsPath}:`, error);
|
228
|
+
continue;
|
229
|
+
}
|
230
|
+
}
|
196
231
|
};
|
197
|
-
const
|
198
|
-
const
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
232
|
+
const extractTo = async (iterator, basePath, signal) => {
|
233
|
+
for await (const entry of iterator) {
|
234
|
+
signal?.throwIfAborted();
|
235
|
+
const targetPath = path.join(basePath, entry.path);
|
236
|
+
if (entry.kind === "directory") {
|
237
|
+
try {
|
238
|
+
signal?.throwIfAborted();
|
239
|
+
await promises.mkdir(targetPath, { recursive: true, mode: entry.mode });
|
240
|
+
} catch (error) {
|
241
|
+
if (error.code !== "EEXIST") {
|
242
|
+
throw error;
|
243
|
+
}
|
244
|
+
}
|
245
|
+
} else if (entry.kind === "file") {
|
246
|
+
const parentDir = path.dirname(targetPath);
|
247
|
+
signal?.throwIfAborted();
|
248
|
+
await promises.mkdir(parentDir, { recursive: true });
|
249
|
+
const fileEntry = entry;
|
250
|
+
const content = await fileEntry.getContent("buffer");
|
251
|
+
await promises.writeFile(targetPath, content, { mode: entry.mode, signal });
|
252
|
+
}
|
253
|
+
}
|
205
254
|
};
|
206
255
|
const utf8ByteLength = (str) => {
|
207
256
|
return Buffer.byteLength(str, "utf8");
|
@@ -219,13 +268,11 @@ const truncateUtf8Safe = (str, maxBytes) => {
|
|
219
268
|
}
|
220
269
|
return str.slice(0, i);
|
221
270
|
};
|
222
|
-
const
|
223
|
-
|
224
|
-
|
225
|
-
if (utf8ByteLength(path) <= MAX_NAME) {
|
226
|
-
return { prefix: "", name: path };
|
271
|
+
const splitPath = (path2) => {
|
272
|
+
if (utf8ByteLength(path2) <= MAX_NAME) {
|
273
|
+
return { prefix: "", name: path2 };
|
227
274
|
}
|
228
|
-
const parts =
|
275
|
+
const parts = path2.split("/");
|
229
276
|
let name = parts.pop() ?? "";
|
230
277
|
let prefix = parts.join("/");
|
231
278
|
if (utf8ByteLength(name) > MAX_NAME) {
|
@@ -249,9 +296,9 @@ const getPaddedBytes = (buffer) => {
|
|
249
296
|
}
|
250
297
|
};
|
251
298
|
const terminatorBytes = Buffer.alloc(1024, 0);
|
252
|
-
const createTarHeader = (type,
|
299
|
+
const createTarHeader = (type, path2, size, mode, uname, gname, uid, gid, date) => {
|
253
300
|
const buffer = Buffer.alloc(512, 0);
|
254
|
-
const { name, prefix } = splitPath(
|
301
|
+
const { name, prefix } = splitPath(path2);
|
255
302
|
buffer.write(name, 0, 100, "utf8");
|
256
303
|
getOctalBytes(mode & 4095, 8).copy(buffer, 100);
|
257
304
|
getOctalBytes(uid, 8).copy(buffer, 108);
|
@@ -301,10 +348,11 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
301
348
|
const totalPaddedContentBytes = getPaddedBytes(contentBytes);
|
302
349
|
yield totalPaddedContentBytes;
|
303
350
|
} else {
|
351
|
+
const content = entryItemContent;
|
304
352
|
const tarHeaderBytes = createTarHeader(
|
305
353
|
"file",
|
306
354
|
entryItem.path,
|
307
|
-
|
355
|
+
content.length,
|
308
356
|
entryItem.mode,
|
309
357
|
entryItem.uname,
|
310
358
|
entryItem.gname,
|
@@ -314,10 +362,10 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
314
362
|
);
|
315
363
|
yield tarHeaderBytes;
|
316
364
|
let position = 0;
|
317
|
-
switch (
|
365
|
+
switch (content.kind) {
|
318
366
|
// Content is a generator
|
319
367
|
case "generator": {
|
320
|
-
for await (const contentBytes of
|
368
|
+
for await (const contentBytes of content.generator) {
|
321
369
|
signal?.throwIfAborted();
|
322
370
|
yield contentBytes;
|
323
371
|
position += contentBytes.length;
|
@@ -326,9 +374,9 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
326
374
|
}
|
327
375
|
// Content is a readable stream
|
328
376
|
case "readable": {
|
329
|
-
for await (const
|
377
|
+
for await (const chunk of content.readable) {
|
330
378
|
signal?.throwIfAborted();
|
331
|
-
const contentBytes = getBuffer(
|
379
|
+
const contentBytes = getBuffer(chunk);
|
332
380
|
yield contentBytes;
|
333
381
|
position += contentBytes.length;
|
334
382
|
}
|
@@ -366,23 +414,280 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
366
414
|
switch (ct) {
|
367
415
|
// No compression
|
368
416
|
case "none": {
|
369
|
-
return stream.Readable.from(entryItemIterator());
|
417
|
+
return stream.Readable.from(entryItemIterator(), { signal });
|
370
418
|
}
|
371
419
|
// Gzip compression
|
372
420
|
case "gzip": {
|
373
421
|
const gzipStream = zlib.createGzip({ level: 9 });
|
374
|
-
const entryItemStream = stream.Readable.from(entryItemIterator());
|
422
|
+
const entryItemStream = stream.Readable.from(entryItemIterator(), { signal });
|
375
423
|
entryItemStream.pipe(gzipStream);
|
376
424
|
return gzipStream;
|
377
425
|
}
|
378
426
|
}
|
379
427
|
};
|
428
|
+
const parseOctalBytes = (buffer, offset, length) => {
|
429
|
+
const str = buffer.subarray(offset, offset + length).toString("ascii").replace(/\0/g, "").trim();
|
430
|
+
return str ? parseInt(str, 8) : 0;
|
431
|
+
};
|
432
|
+
const parseString = (buffer, offset, length) => {
|
433
|
+
return buffer.subarray(offset, offset + length).toString("utf8").replace(/\0/g, "").trim();
|
434
|
+
};
|
435
|
+
const readExactBytes = async (iterator, size, signal) => {
|
436
|
+
const chunks = [];
|
437
|
+
let totalRead = 0;
|
438
|
+
while (totalRead < size) {
|
439
|
+
signal?.throwIfAborted();
|
440
|
+
const { value, done } = await iterator.next();
|
441
|
+
if (done) {
|
442
|
+
if (totalRead === 0) {
|
443
|
+
return void 0;
|
444
|
+
} else {
|
445
|
+
throw new Error(`Unexpected end of stream: expected ${size} bytes, got ${totalRead} bytes`);
|
446
|
+
}
|
447
|
+
}
|
448
|
+
const chunk = getBuffer(value);
|
449
|
+
const needed = size - totalRead;
|
450
|
+
if (chunk.length <= needed) {
|
451
|
+
chunks.push(chunk);
|
452
|
+
totalRead += chunk.length;
|
453
|
+
} else {
|
454
|
+
chunks.push(chunk.subarray(0, needed));
|
455
|
+
await iterator.return?.(chunk.subarray(needed));
|
456
|
+
totalRead = size;
|
457
|
+
}
|
458
|
+
}
|
459
|
+
return Buffer.concat(chunks, size);
|
460
|
+
};
|
461
|
+
const skipExactBytes = async (iterator, size, signal) => {
|
462
|
+
let totalSkipped = 0;
|
463
|
+
while (totalSkipped < size) {
|
464
|
+
signal?.throwIfAborted();
|
465
|
+
const { value, done } = await iterator.next();
|
466
|
+
if (done) {
|
467
|
+
throw new Error(`Unexpected end of stream: expected to skip ${size} bytes, skipped ${totalSkipped} bytes`);
|
468
|
+
}
|
469
|
+
const chunk = getBuffer(value);
|
470
|
+
const needed = size - totalSkipped;
|
471
|
+
if (chunk.length <= needed) {
|
472
|
+
totalSkipped += chunk.length;
|
473
|
+
} else {
|
474
|
+
await iterator.return?.(chunk.subarray(needed));
|
475
|
+
totalSkipped = size;
|
476
|
+
}
|
477
|
+
}
|
478
|
+
};
|
479
|
+
const skipPaddingBytesTo512Boundary = async (iterator, contentSize, signal) => {
|
480
|
+
const padding = (512 - contentSize % 512) % 512;
|
481
|
+
if (padding > 0) {
|
482
|
+
await skipExactBytes(iterator, padding, signal);
|
483
|
+
}
|
484
|
+
};
|
485
|
+
const parseTarHeader = (buffer) => {
|
486
|
+
if (buffer.every((b) => b === 0)) {
|
487
|
+
return void 0;
|
488
|
+
}
|
489
|
+
const name = parseString(buffer, 0, 100);
|
490
|
+
const mode = parseOctalBytes(buffer, 100, 8);
|
491
|
+
const uid = parseOctalBytes(buffer, 108, 8);
|
492
|
+
const gid = parseOctalBytes(buffer, 116, 8);
|
493
|
+
const size = parseOctalBytes(buffer, 124, 12);
|
494
|
+
const mtime = new Date(parseOctalBytes(buffer, 136, 12) * 1e3);
|
495
|
+
const checksum = parseOctalBytes(buffer, 148, 8);
|
496
|
+
const typeflag = parseString(buffer, 156, 1);
|
497
|
+
const magic = parseString(buffer, 257, 6);
|
498
|
+
const uname = parseString(buffer, 265, 32);
|
499
|
+
const gname = parseString(buffer, 297, 32);
|
500
|
+
const prefix = parseString(buffer, 345, 155);
|
501
|
+
if (magic !== "ustar") {
|
502
|
+
throw new Error(`Invalid tar format: magic="${magic}"`);
|
503
|
+
}
|
504
|
+
let calculatedSum = 0;
|
505
|
+
for (let i = 0; i < 512; i++) {
|
506
|
+
if (i >= 148 && i < 156) {
|
507
|
+
calculatedSum += 32;
|
508
|
+
} else {
|
509
|
+
calculatedSum += buffer[i];
|
510
|
+
}
|
511
|
+
}
|
512
|
+
if (calculatedSum !== checksum) {
|
513
|
+
throw new Error(`Invalid checksum: expected ${checksum}, got ${calculatedSum}`);
|
514
|
+
}
|
515
|
+
let path2 = prefix ? `${prefix}/${name}` : name;
|
516
|
+
if (path2.endsWith("/")) {
|
517
|
+
path2 = path2.slice(0, -1);
|
518
|
+
}
|
519
|
+
const kind = typeflag === "5" ? "directory" : "file";
|
520
|
+
return {
|
521
|
+
kind,
|
522
|
+
path: path2,
|
523
|
+
size,
|
524
|
+
mode,
|
525
|
+
uid,
|
526
|
+
gid,
|
527
|
+
mtime,
|
528
|
+
uname: uname || uid.toString(),
|
529
|
+
gname: gname || gid.toString(),
|
530
|
+
checksum,
|
531
|
+
consumed: false
|
532
|
+
};
|
533
|
+
};
|
534
|
+
const createBufferedAsyncIterator = (iterable, signal) => {
|
535
|
+
const buffer = [];
|
536
|
+
const iterator = iterable[Symbol.asyncIterator]();
|
537
|
+
return {
|
538
|
+
next: async () => {
|
539
|
+
signal?.throwIfAborted();
|
540
|
+
if (buffer.length > 0) {
|
541
|
+
return { value: buffer.shift(), done: false };
|
542
|
+
}
|
543
|
+
return iterator.next();
|
544
|
+
},
|
545
|
+
return: async (value) => {
|
546
|
+
if (value !== void 0) {
|
547
|
+
buffer.unshift(value);
|
548
|
+
}
|
549
|
+
return { value: void 0, done: false };
|
550
|
+
}
|
551
|
+
};
|
552
|
+
};
|
553
|
+
const createReadableFromIterator = (iterator, size, signal, consumedRef) => {
|
554
|
+
const generator = async function* () {
|
555
|
+
let remainingBytes = size;
|
556
|
+
while (remainingBytes > 0) {
|
557
|
+
signal?.throwIfAborted();
|
558
|
+
const { value, done } = await iterator.next();
|
559
|
+
if (done) {
|
560
|
+
throw new Error(`Unexpected end of stream: expected ${size} bytes, remaining ${remainingBytes} bytes`);
|
561
|
+
}
|
562
|
+
const chunk = getBuffer(value);
|
563
|
+
if (chunk.length <= remainingBytes) {
|
564
|
+
remainingBytes -= chunk.length;
|
565
|
+
yield chunk;
|
566
|
+
} else {
|
567
|
+
const needed = chunk.subarray(0, remainingBytes);
|
568
|
+
const excess = chunk.subarray(remainingBytes);
|
569
|
+
remainingBytes = 0;
|
570
|
+
await iterator.return?.(excess);
|
571
|
+
yield needed;
|
572
|
+
break;
|
573
|
+
}
|
574
|
+
}
|
575
|
+
await skipPaddingBytesTo512Boundary(iterator, size, signal);
|
576
|
+
consumedRef.consumed = true;
|
577
|
+
};
|
578
|
+
return stream.Readable.from(generator(), { signal });
|
579
|
+
};
|
580
|
+
const createTarExtractor = async function* (readable, compressionType, signal) {
|
581
|
+
const ct = compressionType ?? "none";
|
582
|
+
let inputStream;
|
583
|
+
switch (ct) {
|
584
|
+
case "gzip":
|
585
|
+
const gunzip = zlib.createGunzip();
|
586
|
+
readable.pipe(gunzip);
|
587
|
+
inputStream = gunzip;
|
588
|
+
break;
|
589
|
+
case "none":
|
590
|
+
default:
|
591
|
+
inputStream = readable;
|
592
|
+
break;
|
593
|
+
}
|
594
|
+
const iterator = createBufferedAsyncIterator(inputStream, signal);
|
595
|
+
let header;
|
596
|
+
while (true) {
|
597
|
+
signal?.throwIfAborted();
|
598
|
+
if (header?.kind === "file" && !header.consumed) {
|
599
|
+
await skipExactBytes(iterator, header.size, signal);
|
600
|
+
await skipPaddingBytesTo512Boundary(iterator, header.size, signal);
|
601
|
+
header.consumed = true;
|
602
|
+
}
|
603
|
+
let headerBuffer;
|
604
|
+
try {
|
605
|
+
headerBuffer = await readExactBytes(iterator, 512, signal);
|
606
|
+
} catch (error) {
|
607
|
+
if (error instanceof Error && error.message.includes("Unexpected end of stream")) {
|
608
|
+
throw new Error("Invalid tar format: incomplete header");
|
609
|
+
}
|
610
|
+
throw error;
|
611
|
+
}
|
612
|
+
if (headerBuffer === void 0) {
|
613
|
+
break;
|
614
|
+
}
|
615
|
+
header = parseTarHeader(headerBuffer);
|
616
|
+
if (!header) {
|
617
|
+
const secondBlock = await readExactBytes(iterator, 512, signal);
|
618
|
+
if (secondBlock === void 0 || secondBlock.every((b) => b === 0)) {
|
619
|
+
break;
|
620
|
+
}
|
621
|
+
throw new Error("Invalid tar format: expected terminator block");
|
622
|
+
}
|
623
|
+
if (header.kind === "directory") {
|
624
|
+
yield {
|
625
|
+
kind: "directory",
|
626
|
+
path: header.path,
|
627
|
+
mode: header.mode,
|
628
|
+
uid: header.uid,
|
629
|
+
gid: header.gid,
|
630
|
+
uname: header.uname,
|
631
|
+
gname: header.gname,
|
632
|
+
date: header.mtime
|
633
|
+
};
|
634
|
+
} else {
|
635
|
+
const currentHeader = header;
|
636
|
+
yield {
|
637
|
+
kind: "file",
|
638
|
+
path: currentHeader.path,
|
639
|
+
mode: currentHeader.mode,
|
640
|
+
uid: currentHeader.uid,
|
641
|
+
gid: currentHeader.gid,
|
642
|
+
uname: currentHeader.uname,
|
643
|
+
gname: currentHeader.gname,
|
644
|
+
date: currentHeader.mtime,
|
645
|
+
getContent: async (type) => {
|
646
|
+
if (currentHeader.consumed) {
|
647
|
+
throw new Error("Content has already been consumed. Multiple calls to getContent are not supported.");
|
648
|
+
}
|
649
|
+
switch (type) {
|
650
|
+
// For string
|
651
|
+
case "string": {
|
652
|
+
const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);
|
653
|
+
if (dataBuffer === void 0) {
|
654
|
+
throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);
|
655
|
+
}
|
656
|
+
await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);
|
657
|
+
currentHeader.consumed = true;
|
658
|
+
return dataBuffer.toString("utf8");
|
659
|
+
}
|
660
|
+
// For buffer
|
661
|
+
case "buffer": {
|
662
|
+
const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);
|
663
|
+
if (dataBuffer === void 0) {
|
664
|
+
throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);
|
665
|
+
}
|
666
|
+
await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);
|
667
|
+
currentHeader.consumed = true;
|
668
|
+
return dataBuffer;
|
669
|
+
}
|
670
|
+
// For Readble stream
|
671
|
+
case "readable": {
|
672
|
+
const readable2 = createReadableFromIterator(iterator, currentHeader.size, signal, currentHeader);
|
673
|
+
return readable2;
|
674
|
+
}
|
675
|
+
default:
|
676
|
+
throw new Error(`Unsupported content type: ${type}`);
|
677
|
+
}
|
678
|
+
}
|
679
|
+
};
|
680
|
+
}
|
681
|
+
}
|
682
|
+
};
|
380
683
|
exports.createDirectoryItem = createDirectoryItem;
|
684
|
+
exports.createEntryItemGenerator = createEntryItemGenerator;
|
381
685
|
exports.createFileItem = createFileItem;
|
382
686
|
exports.createGeneratorFileItem = createGeneratorFileItem;
|
383
687
|
exports.createReadFileItem = createReadFileItem;
|
384
688
|
exports.createReadableFileItem = createReadableFileItem;
|
689
|
+
exports.createTarExtractor = createTarExtractor;
|
385
690
|
exports.createTarPacker = createTarPacker;
|
386
|
-
exports.
|
691
|
+
exports.extractTo = extractTo;
|
387
692
|
exports.storeReaderToFile = storeReaderToFile;
|
388
693
|
//# sourceMappingURL=index.cjs.map
|