tar-vern 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +219 -0
- package/README_pack.md +79 -0
- package/dist/generated/packageMetadata.d.ts +16 -0
- package/dist/generated/packageMetadata.d.ts.map +1 -0
- package/dist/index.cjs +324 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +13 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +324 -0
- package/dist/index.js.map +1 -0
- package/dist/packer.d.ts +20 -0
- package/dist/packer.d.ts.map +1 -0
- package/dist/types.d.ts +159 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/utils.d.ts +45 -0
- package/dist/utils.d.ts.map +1 -0
- package/package.json +48 -0
package/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
MIT License
|
2
|
+
|
3
|
+
Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
13
|
+
copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
@@ -0,0 +1,219 @@
|
|
1
|
+
# tar-vern
|
2
|
+
|
3
|
+
Streaming tape archiver (tar) library for TypeScript/JavaScript.
|
4
|
+
|
5
|
+
[](https://www.repostatus.org/#wip)
|
6
|
+
[](https://opensource.org/licenses/MIT)
|
7
|
+
[](https://www.npmjs.com/package/tar-vern)
|
8
|
+
|
9
|
+
----
|
10
|
+
|
11
|
+
## What is this?
|
12
|
+
|
13
|
+
A modern TypeScript library for creating tape archives (tar/ustar format) using streaming API. Supports both files and directories with metadata preservation, GZip compression, readable streaming, and flexible content sources.
|
14
|
+
|
15
|
+
```mermaid
|
16
|
+
graph LR
|
17
|
+
A[Content Items<br/>* Files<br/>* Directories<br/>* String content<br/>* Buffer content<br/>* Async generators] -->|Stream pipe| B[Writer Stream<br/>'createWriteStream']
|
18
|
+
B --> C[TAR File<br/>'foobar.tar']
|
19
|
+
```
|
20
|
+
|
21
|
+
## Features
|
22
|
+
|
23
|
+
- Streaming API: Memory-efficient processing of large files
|
24
|
+
- Multiple content sources: String, Buffer, ReadableStream, file paths and async generators
|
25
|
+
- Metadata preservation: File permissions, ownership, timestamps
|
26
|
+
- Built-in compression: GZip compression support (`tar.gz` format)
|
27
|
+
- No any other dependencies.
|
28
|
+
|
29
|
+
## Installation
|
30
|
+
|
31
|
+
```bash
|
32
|
+
npm install tar-vern
|
33
|
+
```
|
34
|
+
|
35
|
+
----
|
36
|
+
|
37
|
+
## Usage for tar packing
|
38
|
+
|
39
|
+
### Basic example
|
40
|
+
|
41
|
+
```typescript
|
42
|
+
import { createTarPacker, storeReaderToFile } from 'tar-vern';
|
43
|
+
|
44
|
+
// Create an async generator for tar entries
|
45
|
+
const generator = async function*() {
|
46
|
+
// Add a simple text file
|
47
|
+
yield {
|
48
|
+
kind: 'file',
|
49
|
+
path: 'hello.txt',
|
50
|
+
mode: 0o644,
|
51
|
+
uname: 'user',
|
52
|
+
gname: 'group',
|
53
|
+
uid: 1000,
|
54
|
+
gid: 1000,
|
55
|
+
date: new Date(),
|
56
|
+
content: 'Hello, world!' // text contents
|
57
|
+
};
|
58
|
+
|
59
|
+
// Add a directory
|
60
|
+
yield {
|
61
|
+
kind: 'directory',
|
62
|
+
path: 'mydir',
|
63
|
+
mode: 0o755,
|
64
|
+
uname: 'user',
|
65
|
+
gname: 'group',
|
66
|
+
uid: 1000,
|
67
|
+
gid: 1000,
|
68
|
+
date: new Date()
|
69
|
+
};
|
70
|
+
};
|
71
|
+
|
72
|
+
// Create tar stream and write to file
|
73
|
+
const packer = createTarPacker(generator());
|
74
|
+
await storeReaderToFile(packer, 'archive.tar'); // Use helper to awaitable
|
75
|
+
```
|
76
|
+
|
77
|
+
### With GZip compression
|
78
|
+
|
79
|
+
Supported `CompressionTypes`:
|
80
|
+
|
81
|
+
|`CompressionTypes`|Details|
|
82
|
+
|:----|:----|
|
83
|
+
|`none`|Uncompression (default)|
|
84
|
+
|`gzip`|Combined GZip compression stream|
|
85
|
+
|
86
|
+
```typescript
|
87
|
+
import { createTarPacker, storeReaderToFile, CompressionTypes } from 'tar-vern';
|
88
|
+
|
89
|
+
const generator = async function*() {
|
90
|
+
yield {
|
91
|
+
kind: 'file',
|
92
|
+
path: 'data.txt',
|
93
|
+
mode: 0o644,
|
94
|
+
uname: 'user',
|
95
|
+
gname: 'group',
|
96
|
+
uid: 1000,
|
97
|
+
gid: 1000,
|
98
|
+
date: new Date(),
|
99
|
+
content: 'Large amount of data...'
|
100
|
+
};
|
101
|
+
};
|
102
|
+
|
103
|
+
// Create compressed tar stream
|
104
|
+
const packer = createTarPacker(generator(), 'gzip');
|
105
|
+
await storeReaderToFile(packer, 'archive.tar.gz');
|
106
|
+
```
|
107
|
+
|
108
|
+
### Helper functions
|
109
|
+
|
110
|
+
```typescript
|
111
|
+
import {
|
112
|
+
createReadFileItem,
|
113
|
+
createDirectoryItem,
|
114
|
+
createReadableItem,
|
115
|
+
storeReaderToFile
|
116
|
+
} from 'tar-vern';
|
117
|
+
import { createReadStream } from 'fs';
|
118
|
+
|
119
|
+
// Configuration easier with item creation functions
|
120
|
+
const generator = async function*() {
|
121
|
+
// Add file from filesystem (auto-detects metadata)
|
122
|
+
yield await createReadFileItem('archived-name.txt', '/path/to/real/source.txt');
|
123
|
+
|
124
|
+
// Add directory from filesystem
|
125
|
+
yield await createDirectoryItem('dir/sub/name', 'exceptName', {
|
126
|
+
directoryPath: '/path/to/real/dir'
|
127
|
+
});
|
128
|
+
|
129
|
+
// Add from readable stream
|
130
|
+
const stream = createReadStream('/path/to/large-file.bin');
|
131
|
+
yield await createReadableItem('large-file.bin', stream);
|
132
|
+
};
|
133
|
+
|
134
|
+
// The `packer` generally `stream.Readable`
|
135
|
+
const packer = createTarPacker(generator());
|
136
|
+
|
137
|
+
// Safer awaitable store file from `stream.Readable`
|
138
|
+
await storeReaderToFile(packer, 'output.tar');
|
139
|
+
```
|
140
|
+
|
141
|
+
### Content types
|
142
|
+
|
143
|
+
```typescript
|
144
|
+
const generator = async function*() {
|
145
|
+
// String content
|
146
|
+
yield {
|
147
|
+
kind: 'file',
|
148
|
+
path: 'text.txt',
|
149
|
+
content: 'Text content' // Store with utf8 encoding
|
150
|
+
// ... other properties
|
151
|
+
};
|
152
|
+
|
153
|
+
// Buffer content
|
154
|
+
yield {
|
155
|
+
kind: 'file',
|
156
|
+
path: 'binary.bin',
|
157
|
+
content: Buffer.from([0x48, 0x65, 0x6c, 0x6c, 0x6f])
|
158
|
+
// ... other properties
|
159
|
+
};
|
160
|
+
|
161
|
+
// Readable stream content
|
162
|
+
yield {
|
163
|
+
kind: 'file',
|
164
|
+
path: 'stream.dat',
|
165
|
+
content: {
|
166
|
+
kind: 'readable',
|
167
|
+
length: 1024,
|
168
|
+
readable: myReadableStream
|
169
|
+
}
|
170
|
+
// ... other properties
|
171
|
+
};
|
172
|
+
|
173
|
+
// Async generator content
|
174
|
+
yield {
|
175
|
+
kind: 'file',
|
176
|
+
path: 'generated.dat',
|
177
|
+
content: {
|
178
|
+
kind: 'generator',
|
179
|
+
length: 2048,
|
180
|
+
generator: myAsyncGenerator // (each yielding `Buffer` instance)
|
181
|
+
}
|
182
|
+
// ... other properties
|
183
|
+
};
|
184
|
+
};
|
185
|
+
```
|
186
|
+
|
187
|
+
### Stat reflection options
|
188
|
+
|
189
|
+
```typescript
|
190
|
+
import { createReadFileItem, ReflectStats } from 'tar-vern';
|
191
|
+
|
192
|
+
// Don't reflect any file stats (use provided options only)
|
193
|
+
yield await createReadFileItem('file.txt', '/source.txt',
|
194
|
+
'none', { // Don't reflect
|
195
|
+
mode: 0o644,
|
196
|
+
uid: 1000,
|
197
|
+
gid: 1000
|
198
|
+
});
|
199
|
+
|
200
|
+
// Reflect all stats except username/groupname
|
201
|
+
yield await createReadFileItem('file.txt', '/source.txt',
|
202
|
+
'exceptName'); // except names
|
203
|
+
|
204
|
+
// Reflect all stats including numeric uid/gid as names
|
205
|
+
yield await createReadFileItem('file.txt', '/source.txt',
|
206
|
+
'all'); // reflect all stats
|
207
|
+
```
|
208
|
+
|
209
|
+
----
|
210
|
+
|
211
|
+
## Usage for tar unpacking
|
212
|
+
|
213
|
+
TODO:
|
214
|
+
|
215
|
+
----
|
216
|
+
|
217
|
+
## License
|
218
|
+
|
219
|
+
Under MIT.
|
package/README_pack.md
ADDED
@@ -0,0 +1,79 @@
|
|
1
|
+
# Tape archiver library for Typescript
|
2
|
+
|
3
|
+
Tape archiver (tar) library for Typescript implementation.
|
4
|
+
|
5
|
+
[](https://www.repostatus.org/#wip)
|
6
|
+
[](https://opensource.org/licenses/MIT)
|
7
|
+
|
8
|
+
----
|
9
|
+
|
10
|
+
## What is this?
|
11
|
+
|
12
|
+
A modern TypeScript library for creating tape archives (tar/ustar format) using streaming API. Supports both files and directories with metadata preservation, GZip compression, readable streaming, and flexible content sources.
|
13
|
+
|
14
|
+
## Features
|
15
|
+
|
16
|
+
- Streaming API: Memory-efficient processing of large files
|
17
|
+
- Multiple content sources: String, Buffer, ReadableStream, file paths and async generators
|
18
|
+
- Metadata preservation: File permissions, ownership, timestamps
|
19
|
+
- Built-in compression: GZip compression support (`tar.gz` format)
|
20
|
+
- No any other dependencies.
|
21
|
+
|
22
|
+
## Installation
|
23
|
+
|
24
|
+
```bash
|
25
|
+
npm install tar-vern
|
26
|
+
```
|
27
|
+
|
28
|
+
----
|
29
|
+
|
30
|
+
## Usage for tar packing
|
31
|
+
|
32
|
+
### Basic example
|
33
|
+
|
34
|
+
```typescript
|
35
|
+
import { createTarPacker, storeReaderToFile } from 'tar-vern';
|
36
|
+
import { createWriteStream } from 'fs';
|
37
|
+
|
38
|
+
// Create an async generator for tar entries
|
39
|
+
const generator = async function*() {
|
40
|
+
// Add a simple text file
|
41
|
+
yield {
|
42
|
+
kind: 'file',
|
43
|
+
path: 'hello.txt',
|
44
|
+
mode: 0o644,
|
45
|
+
uname: 'user',
|
46
|
+
gname: 'group',
|
47
|
+
uid: 1000,
|
48
|
+
gid: 1000,
|
49
|
+
date: new Date(),
|
50
|
+
content: 'Hello, world!' // text contents
|
51
|
+
};
|
52
|
+
|
53
|
+
// Add a directory
|
54
|
+
yield {
|
55
|
+
kind: 'directory',
|
56
|
+
path: 'mydir',
|
57
|
+
mode: 0o755,
|
58
|
+
uname: 'user',
|
59
|
+
gname: 'group',
|
60
|
+
uid: 1000,
|
61
|
+
gid: 1000,
|
62
|
+
date: new Date()
|
63
|
+
};
|
64
|
+
};
|
65
|
+
|
66
|
+
// Create tar stream and write to file
|
67
|
+
const packer = createTarPacker(generator());
|
68
|
+
await storeReaderToFile(packer, 'archive.tar'); // Use helper to awaitable
|
69
|
+
```
|
70
|
+
|
71
|
+
----
|
72
|
+
|
73
|
+
For more information, [see repository documents](http://github.com/kekyo/tar-vern/).
|
74
|
+
|
75
|
+
----
|
76
|
+
|
77
|
+
## License
|
78
|
+
|
79
|
+
Under MIT.
|
@@ -0,0 +1,16 @@
|
|
1
|
+
/*!
|
2
|
+
* name: tar-vern
|
3
|
+
* version: 0.1.0
|
4
|
+
* description: Tape archiver library for Typescript
|
5
|
+
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
|
+
* license: MIT
|
7
|
+
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
+
*/
|
9
|
+
|
10
|
+
export declare const name = "tar-vern";
|
11
|
+
export declare const version = "0.1.0";
|
12
|
+
export declare const description = "Tape archiver library for Typescript";
|
13
|
+
export declare const author = "Kouji Matsui (@kekyo@mi.kekyo.net)";
|
14
|
+
export declare const license = "MIT";
|
15
|
+
export declare const repository_url = "https://github.com/kekyo/tar-vern.git";
|
16
|
+
//# sourceMappingURL=packageMetadata.d.ts.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"packageMetadata.d.ts","sourceRoot":"","sources":["../../src/generated/packageMetadata.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,IAAI,aAAa,CAAC;AAC/B,eAAO,MAAM,OAAO,UAAU,CAAC;AAC/B,eAAO,MAAM,WAAW,yCAAyC,CAAC;AAClE,eAAO,MAAM,MAAM,uCAAuC,CAAC;AAC3D,eAAO,MAAM,OAAO,QAAQ,CAAC;AAC7B,eAAO,MAAM,cAAc,0CAA0C,CAAC"}
|
package/dist/index.cjs
ADDED
@@ -0,0 +1,324 @@
|
|
1
|
+
/*!
|
2
|
+
* name: tar-vern
|
3
|
+
* version: 0.1.0
|
4
|
+
* description: Tape archiver library for Typescript
|
5
|
+
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
|
+
* license: MIT
|
7
|
+
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
+
*/
|
9
|
+
"use strict";
|
10
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
11
|
+
const stream = require("stream");
|
12
|
+
const zlib = require("zlib");
|
13
|
+
const fs = require("fs");
|
14
|
+
const promises = require("fs/promises");
|
15
|
+
const utf8ByteLength = (str) => {
|
16
|
+
return Buffer.byteLength(str, "utf8");
|
17
|
+
};
|
18
|
+
const truncateUtf8Safe = (str, maxBytes) => {
|
19
|
+
let total = 0;
|
20
|
+
let i = 0;
|
21
|
+
while (i < str.length) {
|
22
|
+
const codePoint = str.codePointAt(i);
|
23
|
+
const char = String.fromCodePoint(codePoint);
|
24
|
+
const charBytes = Buffer.byteLength(char, "utf8");
|
25
|
+
if (total + charBytes > maxBytes) break;
|
26
|
+
total += charBytes;
|
27
|
+
i += char.length;
|
28
|
+
}
|
29
|
+
return str.slice(0, i);
|
30
|
+
};
|
31
|
+
const MAX_NAME = 100;
|
32
|
+
const MAX_PREFIX = 155;
|
33
|
+
const splitPath = (path) => {
|
34
|
+
if (utf8ByteLength(path) <= MAX_NAME) {
|
35
|
+
return { prefix: "", name: path };
|
36
|
+
}
|
37
|
+
const parts = path.split("/");
|
38
|
+
let name = parts.pop() ?? "";
|
39
|
+
let prefix = parts.join("/");
|
40
|
+
if (utf8ByteLength(name) > MAX_NAME) {
|
41
|
+
name = truncateUtf8Safe(name, MAX_NAME);
|
42
|
+
}
|
43
|
+
while (utf8ByteLength(prefix) > MAX_PREFIX) {
|
44
|
+
prefix = truncateUtf8Safe(prefix, MAX_PREFIX);
|
45
|
+
}
|
46
|
+
return { prefix, name };
|
47
|
+
};
|
48
|
+
const getOctalBytes = (value, length) => {
|
49
|
+
const str = value.toString(8).padStart(length - 1, "0") + "\0";
|
50
|
+
return Buffer.from(str, "ascii");
|
51
|
+
};
|
52
|
+
const getPaddedBytes = (buffer) => {
|
53
|
+
const extra = buffer.length % 512;
|
54
|
+
if (extra === 0) {
|
55
|
+
return buffer;
|
56
|
+
} else {
|
57
|
+
return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);
|
58
|
+
}
|
59
|
+
};
|
60
|
+
const terminatorBytes = Buffer.alloc(1024, 0);
|
61
|
+
const createTarHeader = (type, path, size, mode, uname, gname, uid, gid, date) => {
|
62
|
+
const buffer = Buffer.alloc(512, 0);
|
63
|
+
const { name, prefix } = splitPath(path);
|
64
|
+
buffer.write(name, 0, 100, "utf8");
|
65
|
+
getOctalBytes(mode & 4095, 8).copy(buffer, 100);
|
66
|
+
getOctalBytes(uid, 8).copy(buffer, 108);
|
67
|
+
getOctalBytes(gid, 8).copy(buffer, 116);
|
68
|
+
getOctalBytes(size, 12).copy(buffer, 124);
|
69
|
+
getOctalBytes(Math.floor(date.getTime() / 1e3), 12).copy(buffer, 136);
|
70
|
+
Buffer.from(" ", "ascii").copy(buffer, 148);
|
71
|
+
if (type === "file") {
|
72
|
+
buffer.write("0", 156, 1, "ascii");
|
73
|
+
} else {
|
74
|
+
buffer.write("5", 156, 1, "ascii");
|
75
|
+
}
|
76
|
+
buffer.write("ustar\0", 257, 6, "ascii");
|
77
|
+
buffer.write("00", 263, 2, "ascii");
|
78
|
+
buffer.write(uname, 265, 32, "utf8");
|
79
|
+
buffer.write(gname, 297, 32, "utf8");
|
80
|
+
buffer.write(prefix, 345, 155, "utf8");
|
81
|
+
let sum = 0;
|
82
|
+
for (let i = 0; i < 512; i++) {
|
83
|
+
sum += buffer[i];
|
84
|
+
}
|
85
|
+
getOctalBytes(sum, 8).copy(buffer, 148);
|
86
|
+
return buffer;
|
87
|
+
};
|
88
|
+
const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
89
|
+
const entryItemIterator = async function* () {
|
90
|
+
for await (const entryItem of entryItemGenerator) {
|
91
|
+
signal?.throwIfAborted();
|
92
|
+
switch (entryItem.kind) {
|
93
|
+
// Entry is a file
|
94
|
+
case "file": {
|
95
|
+
const entryItemContent = entryItem.content;
|
96
|
+
if (typeof entryItemContent === "string") {
|
97
|
+
const contentBytes = Buffer.from(entryItemContent, "utf8");
|
98
|
+
const tarHeaderBytes = createTarHeader(
|
99
|
+
"file",
|
100
|
+
entryItem.path,
|
101
|
+
contentBytes.length,
|
102
|
+
entryItem.mode,
|
103
|
+
entryItem.uname,
|
104
|
+
entryItem.gname,
|
105
|
+
entryItem.uid,
|
106
|
+
entryItem.gid,
|
107
|
+
entryItem.date
|
108
|
+
);
|
109
|
+
yield tarHeaderBytes;
|
110
|
+
const paddedContentBytes = getPaddedBytes(contentBytes);
|
111
|
+
yield paddedContentBytes;
|
112
|
+
} else if (Buffer.isBuffer(entryItemContent)) {
|
113
|
+
const tarHeaderBytes = createTarHeader(
|
114
|
+
"file",
|
115
|
+
entryItem.path,
|
116
|
+
entryItemContent.length,
|
117
|
+
entryItem.mode,
|
118
|
+
entryItem.uname,
|
119
|
+
entryItem.gname,
|
120
|
+
entryItem.uid,
|
121
|
+
entryItem.gid,
|
122
|
+
entryItem.date
|
123
|
+
);
|
124
|
+
yield tarHeaderBytes;
|
125
|
+
const paddedContentBytes = getPaddedBytes(entryItemContent);
|
126
|
+
yield paddedContentBytes;
|
127
|
+
} else {
|
128
|
+
const tarHeaderBytes = createTarHeader(
|
129
|
+
"file",
|
130
|
+
entryItem.path,
|
131
|
+
entryItemContent.length,
|
132
|
+
entryItem.mode,
|
133
|
+
entryItem.uname,
|
134
|
+
entryItem.gname,
|
135
|
+
entryItem.uid,
|
136
|
+
entryItem.gid,
|
137
|
+
entryItem.date
|
138
|
+
);
|
139
|
+
yield tarHeaderBytes;
|
140
|
+
switch (entryItemContent.kind) {
|
141
|
+
// Content is a generator
|
142
|
+
case "generator": {
|
143
|
+
let position = 0;
|
144
|
+
for await (const contentFragmentBytes of entryItemContent.generator) {
|
145
|
+
signal?.throwIfAborted();
|
146
|
+
yield contentFragmentBytes;
|
147
|
+
position += contentFragmentBytes.length;
|
148
|
+
}
|
149
|
+
if (position % 512 !== 0) {
|
150
|
+
signal?.throwIfAborted();
|
151
|
+
yield Buffer.alloc(512 - position % 512, 0);
|
152
|
+
}
|
153
|
+
break;
|
154
|
+
}
|
155
|
+
// Content is a readable stream
|
156
|
+
case "readable": {
|
157
|
+
let position = 0;
|
158
|
+
for await (const content of entryItemContent.readable) {
|
159
|
+
signal?.throwIfAborted();
|
160
|
+
if (typeof content === "string") {
|
161
|
+
const stringBytes = Buffer.from(content, "utf8");
|
162
|
+
yield stringBytes;
|
163
|
+
position += stringBytes.length;
|
164
|
+
} else if (Buffer.isBuffer(content)) {
|
165
|
+
yield content;
|
166
|
+
position += content.length;
|
167
|
+
}
|
168
|
+
}
|
169
|
+
if (position % 512 !== 0) {
|
170
|
+
signal?.throwIfAborted();
|
171
|
+
yield Buffer.alloc(512 - position % 512, 0);
|
172
|
+
}
|
173
|
+
break;
|
174
|
+
}
|
175
|
+
}
|
176
|
+
}
|
177
|
+
break;
|
178
|
+
}
|
179
|
+
// Entry is a directory
|
180
|
+
case "directory": {
|
181
|
+
const tarHeaderBytes = createTarHeader(
|
182
|
+
"directory",
|
183
|
+
entryItem.path,
|
184
|
+
0,
|
185
|
+
entryItem.mode,
|
186
|
+
entryItem.uname,
|
187
|
+
entryItem.gname,
|
188
|
+
entryItem.uid,
|
189
|
+
entryItem.gid,
|
190
|
+
entryItem.date
|
191
|
+
);
|
192
|
+
yield tarHeaderBytes;
|
193
|
+
break;
|
194
|
+
}
|
195
|
+
}
|
196
|
+
}
|
197
|
+
yield terminatorBytes;
|
198
|
+
};
|
199
|
+
const ct = compressionType ?? "none";
|
200
|
+
switch (ct) {
|
201
|
+
// No compression
|
202
|
+
case "none": {
|
203
|
+
return stream.Readable.from(entryItemIterator());
|
204
|
+
}
|
205
|
+
// Gzip compression
|
206
|
+
case "gzip": {
|
207
|
+
const gzipStream = zlib.createGzip({ level: 9 });
|
208
|
+
const entryItemStream = stream.Readable.from(entryItemIterator());
|
209
|
+
entryItemStream.pipe(gzipStream);
|
210
|
+
return gzipStream;
|
211
|
+
}
|
212
|
+
}
|
213
|
+
};
|
214
|
+
const getUName = (candidateName, candidateId, reflectStat) => {
|
215
|
+
return candidateName ?? (reflectStat === "all" ? candidateId.toString() : "root");
|
216
|
+
};
|
217
|
+
const createDirectoryItem = async (path, reflectStat, options) => {
|
218
|
+
const rs = reflectStat ?? "none";
|
219
|
+
if (rs !== "none" && options?.directoryPath) {
|
220
|
+
const stats = await promises.stat(options.directoryPath);
|
221
|
+
const mode = options?.mode ?? stats.mode;
|
222
|
+
const uid = options?.uid ?? stats.uid;
|
223
|
+
const gid = options?.gid ?? stats.gid;
|
224
|
+
const date = options?.date ?? stats.mtime;
|
225
|
+
const uname = getUName(options?.uname, stats.uid, rs);
|
226
|
+
const gname = getUName(options?.gname, stats.gid, rs);
|
227
|
+
return {
|
228
|
+
kind: "directory",
|
229
|
+
path,
|
230
|
+
mode,
|
231
|
+
uname,
|
232
|
+
gname,
|
233
|
+
uid,
|
234
|
+
gid,
|
235
|
+
date
|
236
|
+
};
|
237
|
+
} else {
|
238
|
+
const mode = options?.mode ?? 493;
|
239
|
+
const uid = options?.uid ?? 0;
|
240
|
+
const gid = options?.gid ?? 0;
|
241
|
+
const date = options?.date ?? /* @__PURE__ */ new Date();
|
242
|
+
const uname = getUName(options?.uname, void 0, rs);
|
243
|
+
const gname = getUName(options?.gname, void 0, rs);
|
244
|
+
return {
|
245
|
+
kind: "directory",
|
246
|
+
path,
|
247
|
+
mode,
|
248
|
+
uname,
|
249
|
+
gname,
|
250
|
+
uid,
|
251
|
+
gid,
|
252
|
+
date
|
253
|
+
};
|
254
|
+
}
|
255
|
+
};
|
256
|
+
const createReadableItem = async (path, reader, options) => {
|
257
|
+
let readable = reader;
|
258
|
+
let length = options?.length;
|
259
|
+
if (!length) {
|
260
|
+
const chunks = [];
|
261
|
+
length = 0;
|
262
|
+
for await (const chunk of reader) {
|
263
|
+
const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, "utf8");
|
264
|
+
chunks.push(buffer);
|
265
|
+
length += buffer.length;
|
266
|
+
}
|
267
|
+
readable = stream.Readable.from(chunks);
|
268
|
+
}
|
269
|
+
const mode = options?.mode ?? 420;
|
270
|
+
const uid = options?.uid ?? 0;
|
271
|
+
const gid = options?.gid ?? 0;
|
272
|
+
const date = options?.date ?? /* @__PURE__ */ new Date();
|
273
|
+
const uname = options?.uname ?? "root";
|
274
|
+
const gname = options?.gname ?? "root";
|
275
|
+
return {
|
276
|
+
kind: "file",
|
277
|
+
path,
|
278
|
+
mode,
|
279
|
+
uname,
|
280
|
+
gname,
|
281
|
+
uid,
|
282
|
+
gid,
|
283
|
+
date,
|
284
|
+
content: {
|
285
|
+
kind: "readable",
|
286
|
+
length,
|
287
|
+
readable
|
288
|
+
}
|
289
|
+
};
|
290
|
+
};
|
291
|
+
const createReadFileItem = async (path, filePath, reflectStat, options) => {
|
292
|
+
const rs = reflectStat ?? "exceptName";
|
293
|
+
const stats = await promises.stat(filePath);
|
294
|
+
const reader = fs.createReadStream(filePath);
|
295
|
+
const mode = options?.mode ?? (rs !== "none" ? stats.mode : void 0);
|
296
|
+
const uid = options?.uid ?? (rs !== "none" ? stats.uid : void 0);
|
297
|
+
const gid = options?.gid ?? (rs !== "none" ? stats.gid : void 0);
|
298
|
+
const date = options?.date ?? (rs !== "none" ? stats.mtime : void 0);
|
299
|
+
const uname = getUName(options?.uname, stats.uid, rs);
|
300
|
+
const gname = getUName(options?.gname, stats.gid, rs);
|
301
|
+
return await createReadableItem(path, reader, {
|
302
|
+
length: stats.size,
|
303
|
+
mode,
|
304
|
+
uname,
|
305
|
+
gname,
|
306
|
+
uid,
|
307
|
+
gid,
|
308
|
+
date
|
309
|
+
});
|
310
|
+
};
|
311
|
+
const storeReaderToFile = (reader, path) => {
|
312
|
+
const writer = fs.createWriteStream(path);
|
313
|
+
reader.pipe(writer);
|
314
|
+
return new Promise((res, rej) => {
|
315
|
+
writer.on("finish", res);
|
316
|
+
writer.on("error", rej);
|
317
|
+
});
|
318
|
+
};
|
319
|
+
exports.createDirectoryItem = createDirectoryItem;
|
320
|
+
exports.createReadFileItem = createReadFileItem;
|
321
|
+
exports.createReadableItem = createReadableItem;
|
322
|
+
exports.createTarPacker = createTarPacker;
|
323
|
+
exports.storeReaderToFile = storeReaderToFile;
|
324
|
+
//# sourceMappingURL=index.cjs.map
|