tar-vern 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +98 -105
- package/README_pack.md +16 -28
- package/dist/generated/packageMetadata.d.ts +2 -2
- package/dist/index.cjs +211 -147
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +211 -147
- package/dist/index.js.map +1 -1
- package/dist/packer.d.ts +1 -1
- package/dist/packer.d.ts.map +1 -1
- package/dist/types.d.ts +6 -3
- package/dist/types.d.ts.map +1 -1
- package/dist/utils.d.ts +26 -4
- package/dist/utils.d.ts.map +1 -1
- package/package.json +1 -2
package/dist/index.cjs
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version: 0.
|
3
|
+
* version: 0.3.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
@@ -12,6 +12,197 @@ const stream = require("stream");
|
|
12
12
|
const zlib = require("zlib");
|
13
13
|
const fs = require("fs");
|
14
14
|
const promises = require("fs/promises");
|
15
|
+
const getUName = (candidateName, candidateId, reflectStat) => {
|
16
|
+
return candidateName ?? (reflectStat === "all" ? candidateId.toString() : "root");
|
17
|
+
};
|
18
|
+
const getBuffer = (data) => {
|
19
|
+
return Buffer.isBuffer(data) ? data : Buffer.from(data, "utf8");
|
20
|
+
};
|
21
|
+
const createDirectoryItem = async (path, reflectStat, options) => {
|
22
|
+
const rs = reflectStat ?? "none";
|
23
|
+
if (rs !== "none" && options?.directoryPath) {
|
24
|
+
const stats = await promises.stat(options.directoryPath);
|
25
|
+
const mode = options?.mode ?? stats.mode;
|
26
|
+
const uid = options?.uid ?? stats.uid;
|
27
|
+
const gid = options?.gid ?? stats.gid;
|
28
|
+
const date = options?.date ?? stats.mtime;
|
29
|
+
const uname = getUName(options?.uname, stats.uid, rs);
|
30
|
+
const gname = getUName(options?.gname, stats.gid, rs);
|
31
|
+
return {
|
32
|
+
kind: "directory",
|
33
|
+
path,
|
34
|
+
mode,
|
35
|
+
uname,
|
36
|
+
gname,
|
37
|
+
uid,
|
38
|
+
gid,
|
39
|
+
date
|
40
|
+
};
|
41
|
+
} else {
|
42
|
+
const mode = options?.mode ?? 493;
|
43
|
+
const uid = options?.uid ?? 0;
|
44
|
+
const gid = options?.gid ?? 0;
|
45
|
+
const date = options?.date ?? /* @__PURE__ */ new Date();
|
46
|
+
const uname = getUName(options?.uname, void 0, rs);
|
47
|
+
const gname = getUName(options?.gname, void 0, rs);
|
48
|
+
return {
|
49
|
+
kind: "directory",
|
50
|
+
path,
|
51
|
+
mode,
|
52
|
+
uname,
|
53
|
+
gname,
|
54
|
+
uid,
|
55
|
+
gid,
|
56
|
+
date
|
57
|
+
};
|
58
|
+
}
|
59
|
+
};
|
60
|
+
const createFileItem = async (path, content, options) => {
|
61
|
+
const mode = options?.mode ?? 420;
|
62
|
+
const uid = options?.uid ?? 0;
|
63
|
+
const gid = options?.gid ?? 0;
|
64
|
+
const date = options?.date ?? /* @__PURE__ */ new Date();
|
65
|
+
const uname = options?.uname ?? "root";
|
66
|
+
const gname = options?.gname ?? "root";
|
67
|
+
return {
|
68
|
+
kind: "file",
|
69
|
+
path,
|
70
|
+
mode,
|
71
|
+
uname,
|
72
|
+
gname,
|
73
|
+
uid,
|
74
|
+
gid,
|
75
|
+
date,
|
76
|
+
content
|
77
|
+
};
|
78
|
+
};
|
79
|
+
const createReadableFileItem = async (path, readable, options) => {
|
80
|
+
const mode = options?.mode ?? 420;
|
81
|
+
const uid = options?.uid ?? 0;
|
82
|
+
const gid = options?.gid ?? 0;
|
83
|
+
const date = options?.date ?? /* @__PURE__ */ new Date();
|
84
|
+
const uname = options?.uname ?? "root";
|
85
|
+
const gname = options?.gname ?? "root";
|
86
|
+
let length = options?.length;
|
87
|
+
if (!length) {
|
88
|
+
const chunks = [];
|
89
|
+
length = 0;
|
90
|
+
for await (const chunk of readable) {
|
91
|
+
const buffer = getBuffer(chunk);
|
92
|
+
chunks.push(buffer);
|
93
|
+
length += buffer.length;
|
94
|
+
}
|
95
|
+
return {
|
96
|
+
kind: "file",
|
97
|
+
path,
|
98
|
+
mode,
|
99
|
+
uname,
|
100
|
+
gname,
|
101
|
+
uid,
|
102
|
+
gid,
|
103
|
+
date,
|
104
|
+
content: {
|
105
|
+
kind: "readable",
|
106
|
+
length,
|
107
|
+
readable: stream.Readable.from(chunks)
|
108
|
+
}
|
109
|
+
};
|
110
|
+
} else {
|
111
|
+
return {
|
112
|
+
kind: "file",
|
113
|
+
path,
|
114
|
+
mode,
|
115
|
+
uname,
|
116
|
+
gname,
|
117
|
+
uid,
|
118
|
+
gid,
|
119
|
+
date,
|
120
|
+
content: {
|
121
|
+
kind: "readable",
|
122
|
+
length,
|
123
|
+
readable
|
124
|
+
}
|
125
|
+
};
|
126
|
+
}
|
127
|
+
};
|
128
|
+
const createGeneratorFileItem = async (path, generator, options) => {
|
129
|
+
const mode = options?.mode ?? 420;
|
130
|
+
const uid = options?.uid ?? 0;
|
131
|
+
const gid = options?.gid ?? 0;
|
132
|
+
const date = options?.date ?? /* @__PURE__ */ new Date();
|
133
|
+
const uname = options?.uname ?? "root";
|
134
|
+
const gname = options?.gname ?? "root";
|
135
|
+
let length = options?.length;
|
136
|
+
if (!length) {
|
137
|
+
const chunks = [];
|
138
|
+
length = 0;
|
139
|
+
for await (const chunk of generator) {
|
140
|
+
const buffer = getBuffer(chunk);
|
141
|
+
chunks.push(buffer);
|
142
|
+
length += buffer.length;
|
143
|
+
}
|
144
|
+
return {
|
145
|
+
kind: "file",
|
146
|
+
path,
|
147
|
+
mode,
|
148
|
+
uname,
|
149
|
+
gname,
|
150
|
+
uid,
|
151
|
+
gid,
|
152
|
+
date,
|
153
|
+
content: {
|
154
|
+
kind: "readable",
|
155
|
+
length,
|
156
|
+
readable: stream.Readable.from(chunks)
|
157
|
+
}
|
158
|
+
};
|
159
|
+
} else {
|
160
|
+
return {
|
161
|
+
kind: "file",
|
162
|
+
path,
|
163
|
+
mode,
|
164
|
+
uname,
|
165
|
+
gname,
|
166
|
+
uid,
|
167
|
+
gid,
|
168
|
+
date,
|
169
|
+
content: {
|
170
|
+
kind: "generator",
|
171
|
+
length,
|
172
|
+
generator
|
173
|
+
}
|
174
|
+
};
|
175
|
+
}
|
176
|
+
};
|
177
|
+
const createReadFileItem = async (path, filePath, reflectStat, options) => {
|
178
|
+
const rs = reflectStat ?? "exceptName";
|
179
|
+
const stats = await promises.stat(filePath);
|
180
|
+
const reader = fs.createReadStream(filePath);
|
181
|
+
const mode = options?.mode ?? (rs !== "none" ? stats.mode : void 0);
|
182
|
+
const uid = options?.uid ?? (rs !== "none" ? stats.uid : void 0);
|
183
|
+
const gid = options?.gid ?? (rs !== "none" ? stats.gid : void 0);
|
184
|
+
const date = options?.date ?? (rs !== "none" ? stats.mtime : void 0);
|
185
|
+
const uname = getUName(options?.uname, stats.uid, rs);
|
186
|
+
const gname = getUName(options?.gname, stats.gid, rs);
|
187
|
+
return await createReadableFileItem(path, reader, {
|
188
|
+
length: stats.size,
|
189
|
+
mode,
|
190
|
+
uname,
|
191
|
+
gname,
|
192
|
+
uid,
|
193
|
+
gid,
|
194
|
+
date
|
195
|
+
});
|
196
|
+
};
|
197
|
+
const storeReaderToFile = (reader, path) => {
|
198
|
+
const writer = fs.createWriteStream(path);
|
199
|
+
reader.pipe(writer);
|
200
|
+
return new Promise((res, rej) => {
|
201
|
+
writer.on("finish", res);
|
202
|
+
writer.on("error", rej);
|
203
|
+
reader.on("error", rej);
|
204
|
+
});
|
205
|
+
};
|
15
206
|
const utf8ByteLength = (str) => {
|
16
207
|
return Buffer.byteLength(str, "utf8");
|
17
208
|
};
|
@@ -93,8 +284,8 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
93
284
|
// Entry is a file
|
94
285
|
case "file": {
|
95
286
|
const entryItemContent = entryItem.content;
|
96
|
-
if (typeof entryItemContent === "string") {
|
97
|
-
const contentBytes =
|
287
|
+
if (typeof entryItemContent === "string" || Buffer.isBuffer(entryItemContent)) {
|
288
|
+
const contentBytes = getBuffer(entryItemContent);
|
98
289
|
const tarHeaderBytes = createTarHeader(
|
99
290
|
"file",
|
100
291
|
entryItem.path,
|
@@ -107,23 +298,8 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
107
298
|
entryItem.date
|
108
299
|
);
|
109
300
|
yield tarHeaderBytes;
|
110
|
-
const
|
111
|
-
yield
|
112
|
-
} else if (Buffer.isBuffer(entryItemContent)) {
|
113
|
-
const tarHeaderBytes = createTarHeader(
|
114
|
-
"file",
|
115
|
-
entryItem.path,
|
116
|
-
entryItemContent.length,
|
117
|
-
entryItem.mode,
|
118
|
-
entryItem.uname,
|
119
|
-
entryItem.gname,
|
120
|
-
entryItem.uid,
|
121
|
-
entryItem.gid,
|
122
|
-
entryItem.date
|
123
|
-
);
|
124
|
-
yield tarHeaderBytes;
|
125
|
-
const paddedContentBytes = getPaddedBytes(entryItemContent);
|
126
|
-
yield paddedContentBytes;
|
301
|
+
const totalPaddedContentBytes = getPaddedBytes(contentBytes);
|
302
|
+
yield totalPaddedContentBytes;
|
127
303
|
} else {
|
128
304
|
const tarHeaderBytes = createTarHeader(
|
129
305
|
"file",
|
@@ -137,42 +313,32 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
137
313
|
entryItem.date
|
138
314
|
);
|
139
315
|
yield tarHeaderBytes;
|
316
|
+
let position = 0;
|
140
317
|
switch (entryItemContent.kind) {
|
141
318
|
// Content is a generator
|
142
319
|
case "generator": {
|
143
|
-
|
144
|
-
for await (const contentFragmentBytes of entryItemContent.generator) {
|
320
|
+
for await (const contentBytes of entryItemContent.generator) {
|
145
321
|
signal?.throwIfAborted();
|
146
|
-
yield
|
147
|
-
position +=
|
148
|
-
}
|
149
|
-
if (position % 512 !== 0) {
|
150
|
-
signal?.throwIfAborted();
|
151
|
-
yield Buffer.alloc(512 - position % 512, 0);
|
322
|
+
yield contentBytes;
|
323
|
+
position += contentBytes.length;
|
152
324
|
}
|
153
325
|
break;
|
154
326
|
}
|
155
327
|
// Content is a readable stream
|
156
328
|
case "readable": {
|
157
|
-
let position = 0;
|
158
329
|
for await (const content of entryItemContent.readable) {
|
159
330
|
signal?.throwIfAborted();
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
position += stringBytes.length;
|
164
|
-
} else if (Buffer.isBuffer(content)) {
|
165
|
-
yield content;
|
166
|
-
position += content.length;
|
167
|
-
}
|
168
|
-
}
|
169
|
-
if (position % 512 !== 0) {
|
170
|
-
signal?.throwIfAborted();
|
171
|
-
yield Buffer.alloc(512 - position % 512, 0);
|
331
|
+
const contentBytes = getBuffer(content);
|
332
|
+
yield contentBytes;
|
333
|
+
position += contentBytes.length;
|
172
334
|
}
|
173
335
|
break;
|
174
336
|
}
|
175
337
|
}
|
338
|
+
if (position % 512 !== 0) {
|
339
|
+
signal?.throwIfAborted();
|
340
|
+
yield Buffer.alloc(512 - position % 512, 0);
|
341
|
+
}
|
176
342
|
}
|
177
343
|
break;
|
178
344
|
}
|
@@ -211,114 +377,12 @@ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
211
377
|
}
|
212
378
|
}
|
213
379
|
};
|
214
|
-
const getUName = (candidateName, candidateId, reflectStat) => {
|
215
|
-
return candidateName ?? (reflectStat === "all" ? candidateId.toString() : "root");
|
216
|
-
};
|
217
|
-
const createDirectoryItem = async (path, reflectStat, options) => {
|
218
|
-
const rs = reflectStat ?? "none";
|
219
|
-
if (rs !== "none" && options?.directoryPath) {
|
220
|
-
const stats = await promises.stat(options.directoryPath);
|
221
|
-
const mode = options?.mode ?? stats.mode;
|
222
|
-
const uid = options?.uid ?? stats.uid;
|
223
|
-
const gid = options?.gid ?? stats.gid;
|
224
|
-
const date = options?.date ?? stats.mtime;
|
225
|
-
const uname = getUName(options?.uname, stats.uid, rs);
|
226
|
-
const gname = getUName(options?.gname, stats.gid, rs);
|
227
|
-
return {
|
228
|
-
kind: "directory",
|
229
|
-
path,
|
230
|
-
mode,
|
231
|
-
uname,
|
232
|
-
gname,
|
233
|
-
uid,
|
234
|
-
gid,
|
235
|
-
date
|
236
|
-
};
|
237
|
-
} else {
|
238
|
-
const mode = options?.mode ?? 493;
|
239
|
-
const uid = options?.uid ?? 0;
|
240
|
-
const gid = options?.gid ?? 0;
|
241
|
-
const date = options?.date ?? /* @__PURE__ */ new Date();
|
242
|
-
const uname = getUName(options?.uname, void 0, rs);
|
243
|
-
const gname = getUName(options?.gname, void 0, rs);
|
244
|
-
return {
|
245
|
-
kind: "directory",
|
246
|
-
path,
|
247
|
-
mode,
|
248
|
-
uname,
|
249
|
-
gname,
|
250
|
-
uid,
|
251
|
-
gid,
|
252
|
-
date
|
253
|
-
};
|
254
|
-
}
|
255
|
-
};
|
256
|
-
const createReadableItem = async (path, reader, options) => {
|
257
|
-
let readable = reader;
|
258
|
-
let length = options?.length;
|
259
|
-
if (!length) {
|
260
|
-
const chunks = [];
|
261
|
-
length = 0;
|
262
|
-
for await (const chunk of reader) {
|
263
|
-
const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, "utf8");
|
264
|
-
chunks.push(buffer);
|
265
|
-
length += buffer.length;
|
266
|
-
}
|
267
|
-
readable = stream.Readable.from(chunks);
|
268
|
-
}
|
269
|
-
const mode = options?.mode ?? 420;
|
270
|
-
const uid = options?.uid ?? 0;
|
271
|
-
const gid = options?.gid ?? 0;
|
272
|
-
const date = options?.date ?? /* @__PURE__ */ new Date();
|
273
|
-
const uname = options?.uname ?? "root";
|
274
|
-
const gname = options?.gname ?? "root";
|
275
|
-
return {
|
276
|
-
kind: "file",
|
277
|
-
path,
|
278
|
-
mode,
|
279
|
-
uname,
|
280
|
-
gname,
|
281
|
-
uid,
|
282
|
-
gid,
|
283
|
-
date,
|
284
|
-
content: {
|
285
|
-
kind: "readable",
|
286
|
-
length,
|
287
|
-
readable
|
288
|
-
}
|
289
|
-
};
|
290
|
-
};
|
291
|
-
const createReadFileItem = async (path, filePath, reflectStat, options) => {
|
292
|
-
const rs = reflectStat ?? "exceptName";
|
293
|
-
const stats = await promises.stat(filePath);
|
294
|
-
const reader = fs.createReadStream(filePath);
|
295
|
-
const mode = options?.mode ?? (rs !== "none" ? stats.mode : void 0);
|
296
|
-
const uid = options?.uid ?? (rs !== "none" ? stats.uid : void 0);
|
297
|
-
const gid = options?.gid ?? (rs !== "none" ? stats.gid : void 0);
|
298
|
-
const date = options?.date ?? (rs !== "none" ? stats.mtime : void 0);
|
299
|
-
const uname = getUName(options?.uname, stats.uid, rs);
|
300
|
-
const gname = getUName(options?.gname, stats.gid, rs);
|
301
|
-
return await createReadableItem(path, reader, {
|
302
|
-
length: stats.size,
|
303
|
-
mode,
|
304
|
-
uname,
|
305
|
-
gname,
|
306
|
-
uid,
|
307
|
-
gid,
|
308
|
-
date
|
309
|
-
});
|
310
|
-
};
|
311
|
-
const storeReaderToFile = (reader, path) => {
|
312
|
-
const writer = fs.createWriteStream(path);
|
313
|
-
reader.pipe(writer);
|
314
|
-
return new Promise((res, rej) => {
|
315
|
-
writer.on("finish", res);
|
316
|
-
writer.on("error", rej);
|
317
|
-
});
|
318
|
-
};
|
319
380
|
exports.createDirectoryItem = createDirectoryItem;
|
381
|
+
exports.createFileItem = createFileItem;
|
382
|
+
exports.createGeneratorFileItem = createGeneratorFileItem;
|
320
383
|
exports.createReadFileItem = createReadFileItem;
|
321
|
-
exports.
|
384
|
+
exports.createReadableFileItem = createReadableFileItem;
|
322
385
|
exports.createTarPacker = createTarPacker;
|
386
|
+
exports.getBuffer = getBuffer;
|
323
387
|
exports.storeReaderToFile = storeReaderToFile;
|
324
388
|
//# sourceMappingURL=index.cjs.map
|
package/dist/index.cjs.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":["../src/packer.ts","../src/utils.ts"],"sourcesContent":["// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { Readable } from \"stream\";\nimport { CompressionTypes, EntryItem } from \"./types\";\nimport { createGzip } from \"zlib\";\n\n/**\n * Get the byte length of a string in UTF-8\n * @param str - The string to get the byte length of\n * @returns The byte length of the string\n */\nconst utf8ByteLength = (str: string) => {\n return Buffer.byteLength(str, \"utf8\");\n}\n\n/**\n * Truncate a string to a maximum byte length in UTF-8\n * @param str - The string to truncate\n * @param maxBytes - The maximum byte length\n * @returns The truncated string\n */\nconst truncateUtf8Safe = (str: string, maxBytes: number) => {\n let total = 0;\n let i = 0;\n while (i < str.length) {\n const codePoint = str.codePointAt(i)!;\n const char = String.fromCodePoint(codePoint);\n const charBytes = Buffer.byteLength(char, \"utf8\");\n if (total + charBytes > maxBytes) break;\n total += charBytes;\n i += char.length;\n }\n return str.slice(0, i);\n}\n\n// Tar specification: name max 100 bytes, prefix max 155 bytes\nconst MAX_NAME = 100;\nconst MAX_PREFIX = 155;\n\n/**\n * Split a path into a name and a prefix\n * @param path - The path to split\n * @returns The name and prefix\n */\nconst splitPath = (path: string) => {\n if (utf8ByteLength(path) <= MAX_NAME) {\n return { prefix: \"\", name: path };\n }\n\n // Split by '/' and find the part that fits in name from the end\n const parts = path.split(\"/\");\n let name = parts.pop() ?? \"\";\n let prefix = parts.join(\"/\");\n\n // Truncate if name exceeds 100 bytes\n if (utf8ByteLength(name) > MAX_NAME) {\n name = truncateUtf8Safe(name, MAX_NAME);\n }\n\n // Truncate if prefix exceeds 155 bytes\n while (utf8ByteLength(prefix) > MAX_PREFIX) {\n prefix = truncateUtf8Safe(prefix, MAX_PREFIX);\n }\n\n return { prefix, name };\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Get octal bytes from a number\n * @param value - The number to get octal bytes from\n * @param length - The length of the octal bytes\n * @returns The octal bytes\n */\nconst getOctalBytes = (value: number, length: number) => {\n const str = value.toString(8).padStart(length - 1, \"0\") + \"\\0\";\n return Buffer.from(str, \"ascii\");\n};\n\n/**\n * Get padded bytes from a buffer\n * @param buffer - The buffer to get padded bytes from\n * @returns The padded bytes\n */\nconst getPaddedBytes = (buffer: Buffer) => {\n const extra = buffer.length % 512;\n if (extra === 0) {\n return buffer;\n } else {\n return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);\n }\n}\n\n/**\n * The terminator bytes\n */\nconst terminatorBytes = Buffer.alloc(1024, 0);\n\n/**\n * Create a tar header\n * @param type - The type of the entry\n * @param path - The path of the entry\n * @param size - The size of the entry\n * @param mode - The mode of the entry\n * @param uname - The user name of the entry\n * @param gname - The group name of the entry\n */\nconst createTarHeader = (\n type: 'file' | 'directory',\n path: string,\n size: number,\n mode: number,\n uname: string,\n gname: string,\n uid: number,\n gid: number,\n date: Date\n) => {\n // Allocate header bytes\n const buffer = Buffer.alloc(512, 0);\n\n // Split path into name and prefix\n const { name, prefix } = splitPath(path);\n\n // Write name, mode, uid, gid, size, mtime, typeflag, prefix, checksum\n buffer.write(name, 0, 100, \"utf8\");\n getOctalBytes(mode & 0o7777, 8).copy(buffer, 100);\n getOctalBytes(uid, 8).copy(buffer, 108);\n getOctalBytes(gid, 8).copy(buffer, 116);\n getOctalBytes(size, 12).copy(buffer, 124);\n getOctalBytes(Math.floor(date.getTime() / 1000), 12).copy(buffer, 136);\n\n // Check sum space\n Buffer.from(\" \", \"ascii\").copy(buffer, 148);\n\n if (type === 'file') {\n buffer.write(\"0\", 156, 1, \"ascii\"); // typeflag (file)\n } else {\n buffer.write(\"5\", 156, 1, \"ascii\"); // typeflag (directory)\n }\n buffer.write(\"ustar\\0\", 257, 6, \"ascii\");\n buffer.write(\"00\", 263, 2, \"ascii\"); // version\n buffer.write(uname, 265, 32, \"utf8\");\n buffer.write(gname, 297, 32, \"utf8\");\n buffer.write(prefix, 345, 155, \"utf8\"); // Path prefix\n\n // Calculate check sum\n let sum = 0;\n for (let i = 0; i < 512; i++) {\n sum += buffer[i];\n }\n getOctalBytes(sum, 8).copy(buffer, 148);\n\n return buffer;\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a tar packer\n * @param entryItemGenerator - The async generator of the entry items\n * @param compressionType - The compression type to use (Default: 'none')\n * @param signal - The abort signal to cancel the tar packer\n * @returns Readable stream of the tar packer\n */\nexport const createTarPacker = (\n entryItemGenerator: AsyncGenerator<EntryItem, void, unknown>,\n compressionType?: CompressionTypes,\n signal?: AbortSignal) => {\n\n // Create async generator function from entry item iterator\n const entryItemIterator = async function*() {\n // Iterate over the entry items\n for await (const entryItem of entryItemGenerator) {\n signal?.throwIfAborted();\n\n switch (entryItem.kind) {\n // Entry is a file\n case 'file': {\n const entryItemContent = entryItem.content;\n // Content is a string\n if (typeof entryItemContent === 'string') {\n // Get content bytes from string\n const contentBytes = Buffer.from(entryItemContent, \"utf8\");\n\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n contentBytes.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n // Content bytes to adjust padding space and produce\n const paddedContentBytes = getPaddedBytes(contentBytes);\n yield paddedContentBytes;\n\n // Content is a buffer\n } else if (Buffer.isBuffer(entryItemContent)) {\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n entryItemContent.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n // Content bytes to adjust padding space and produce\n const paddedContentBytes = getPaddedBytes(entryItemContent);\n yield paddedContentBytes;\n\n } else {\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n entryItemContent.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n switch (entryItemContent.kind) {\n // Content is a generator\n case 'generator': {\n let position = 0;\n for await (const contentFragmentBytes of entryItemContent.generator) {\n signal?.throwIfAborted();\n yield contentFragmentBytes;\n position += contentFragmentBytes.length;\n }\n\n // Padding space\n if (position % 512 !== 0) {\n signal?.throwIfAborted();\n yield Buffer.alloc(512 - (position % 512), 0);\n }\n break;\n }\n // Content is a readable stream\n case 'readable': {\n let position = 0;\n for await (const content of entryItemContent.readable) {\n signal?.throwIfAborted();\n if (typeof content === 'string') {\n const stringBytes = Buffer.from(content, \"utf8\");\n yield stringBytes;\n position += stringBytes.length;\n } else if (Buffer.isBuffer(content)) {\n yield content;\n position += content.length;\n }\n }\n\n // Padding space\n if (position % 512 !== 0) {\n signal?.throwIfAborted();\n yield Buffer.alloc(512 - (position % 512), 0);\n }\n break;\n }\n }\n }\n break;\n }\n // Entry is a directory\n case 'directory': {\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'directory',\n entryItem.path,\n 0,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date\n );\n yield tarHeaderBytes;\n break;\n }\n }\n }\n\n // Terminates for tar stream\n yield terminatorBytes;\n };\n\n const ct = compressionType ?? 'none';\n\n switch (ct) {\n // No compression\n case 'none': {\n // Create readable stream from entry item iterator\n return Readable.from(entryItemIterator());\n }\n // Gzip compression\n case 'gzip': {\n // Create gzip stream\n const gzipStream = createGzip({ level: 9 });\n // Create readable stream from entry item iterator\n const entryItemStream = Readable.from(entryItemIterator());\n // Pipe the entry item stream to the gzip stream\n entryItemStream.pipe(gzipStream);\n // Return the gzip stream\n return gzipStream;\n }\n }\n};\n","// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { createReadStream, createWriteStream } from \"fs\";\nimport { stat } from \"fs/promises\";\nimport { Readable } from \"stream\";\nimport { CreateItemOptions, CreateReadableItemOptions, FileItem, DirectoryItem, ReflectStats, CreateDirectoryItemOptions } from \"./types\";\n\n/**\n * Get the user/group name from the candidate name or ID\n * @param candidateName - The candidate user/group name\n * @param candidateId - The candidate user/group ID\n * @param reflectStat - Whether to reflect the stat (all, exceptName, none)\n * @returns The user/group name\n */\nconst getUName = (candidateName: string | undefined, candidateId: number, reflectStat: ReflectStats | undefined) => {\n return candidateName ?? (reflectStat === 'all' ? candidateId.toString() : 'root');\n}\n\n/**\n * Create a DirectoryItem\n * @param path - The path to the directory in the tar archive\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'none')\n * @param options - Metadata for the directory including path in tar archive\n * @returns A DirectoryItem\n * @remarks When reflectStat is 'all' or 'exceptName', `options.directoryPath` must be provided.\n */\nexport const createDirectoryItem = async (\n path: string,\n reflectStat?: ReflectStats,\n options?: CreateDirectoryItemOptions\n): Promise<DirectoryItem> => {\n const rs = reflectStat ?? 'none';\n\n if (rs !== 'none' && options?.directoryPath) {\n const stats = await stat(options.directoryPath);\n const mode = options?.mode ?? stats.mode;\n const uid = options?.uid ?? stats.uid;\n const gid = options?.gid ?? stats.gid;\n const date = options?.date ?? stats.mtime;\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n } else {\n const mode = options?.mode ?? 0o755;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n const uname = getUName(options?.uname, undefined, rs);\n const gname = getUName(options?.gname, undefined, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n }\n};\n\n/**\n * Create a FileItem from a Readable stream\n * @param path - The path to the file in the tar archive\n * @param reader - The readable stream\n * @param options - Metadata for the file including path in tar archive\n * @returns A FileItem\n */\nexport const createReadableItem = async (\n path: string,\n reader: Readable,\n options?: CreateReadableItemOptions\n): Promise<FileItem> => {\n let readable = reader;\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of reader) {\n const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, 'utf8');\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a new readable stream from the collected chunks\n readable = Readable.from(chunks);\n }\n\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length: length,\n readable: readable\n }\n };\n};\n\n/**\n * Create a FileItem from a local file path\n * @param path - The path to the file in the tar archive\n * @param filePath - The path to the file to read from real filesystem\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'exceptName')\n * @param options - Metadata for the file including path in tar archive\n * @returns A FileItem\n */\nexport const createReadFileItem = async (\n path: string,\n filePath: string,\n reflectStat?: ReflectStats,\n options?: CreateItemOptions\n): Promise<FileItem> => {\n const rs = reflectStat ?? 'exceptName';\n\n // Get file stats to extract metadata\n const stats = await stat(filePath);\n // Create readable stream from file\n const reader = createReadStream(filePath);\n\n const mode = options?.mode ?? (rs !== 'none' ? stats.mode : undefined);\n const uid = options?.uid ?? (rs !== 'none' ? stats.uid : undefined);\n const gid = options?.gid ?? (rs !== 'none' ? stats.gid : undefined);\n const date = options?.date ?? (rs !== 'none' ? stats.mtime : undefined);\n\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n\n // Create a FileItem\n return await createReadableItem(path, reader, {\n length: stats.size, mode, uname, gname, uid, gid, date,\n });\n};\n\n/**\n * Store a readable stream to a file\n * @param reader - The readable stream\n * @param path - The path to the file to store the readable stream to\n * @returns A promise that resolves when the stream is finished\n */\nexport const storeReaderToFile = (reader: Readable, path: string) => {\n const writer = createWriteStream(path);\n reader.pipe(writer);\n return new Promise<void>((res, rej) => {\n writer.on('finish', res);\n writer.on('error', rej);\n });\n};\n"],"names":["Readable","createGzip","stat","createReadStream","createWriteStream"],"mappings":";;;;;;AAcA,MAAM,iBAAiB,CAAC,QAAgB;AACtC,SAAO,OAAO,WAAW,KAAK,MAAM;AACtC;AAQA,MAAM,mBAAmB,CAAC,KAAa,aAAqB;AAC1D,MAAI,QAAQ;AACZ,MAAI,IAAI;AACR,SAAO,IAAI,IAAI,QAAQ;AACrB,UAAM,YAAY,IAAI,YAAY,CAAC;AACnC,UAAM,OAAO,OAAO,cAAc,SAAS;AAC3C,UAAM,YAAY,OAAO,WAAW,MAAM,MAAM;AAChD,QAAI,QAAQ,YAAY,SAAU;AAClC,aAAS;AACT,SAAK,KAAK;AAAA,EACZ;AACA,SAAO,IAAI,MAAM,GAAG,CAAC;AACvB;AAGA,MAAM,WAAW;AACjB,MAAM,aAAa;AAOnB,MAAM,YAAY,CAAC,SAAiB;AAClC,MAAI,eAAe,IAAI,KAAK,UAAU;AACpC,WAAO,EAAE,QAAQ,IAAI,MAAM,KAAA;AAAA,EAC7B;AAGA,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,OAAO,MAAM,IAAA,KAAS;AAC1B,MAAI,SAAS,MAAM,KAAK,GAAG;AAG3B,MAAI,eAAe,IAAI,IAAI,UAAU;AACnC,WAAO,iBAAiB,MAAM,QAAQ;AAAA,EACxC;AAGA,SAAO,eAAe,MAAM,IAAI,YAAY;AAC1C,aAAS,iBAAiB,QAAQ,UAAU;AAAA,EAC9C;AAEA,SAAO,EAAE,QAAQ,KAAA;AACnB;AAUA,MAAM,gBAAgB,CAAC,OAAe,WAAmB;AACvD,QAAM,MAAM,MAAM,SAAS,CAAC,EAAE,SAAS,SAAS,GAAG,GAAG,IAAI;AAC1D,SAAO,OAAO,KAAK,KAAK,OAAO;AACjC;AAOA,MAAM,iBAAiB,CAAC,WAAmB;AACzC,QAAM,QAAQ,OAAO,SAAS;AAC9B,MAAI,UAAU,GAAG;AACf,WAAO;AAAA,EACT,OAAO;AACL,WAAO,OAAO,OAAO,CAAC,QAAQ,OAAO,MAAM,MAAM,OAAO,CAAC,CAAC,CAAC;AAAA,EAC7D;AACF;AAKA,MAAM,kBAAkB,OAAO,MAAM,MAAM,CAAC;AAW5C,MAAM,kBAAkB,CACtB,MACA,MACA,MACA,MACA,OACA,OACA,KACA,KACA,SACG;AAEH,QAAM,SAAS,OAAO,MAAM,KAAK,CAAC;AAGlC,QAAM,EAAE,MAAM,WAAW,UAAU,IAAI;AAGvC,SAAO,MAAM,MAAM,GAAG,KAAK,MAAM;AACjC,gBAAc,OAAO,MAAQ,CAAC,EAAE,KAAK,QAAQ,GAAG;AAChD,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,MAAM,EAAE,EAAE,KAAK,QAAQ,GAAG;AACxC,gBAAc,KAAK,MAAM,KAAK,QAAA,IAAY,GAAI,GAAG,EAAE,EAAE,KAAK,QAAQ,GAAG;AAGrE,SAAO,KAAK,YAAY,OAAO,EAAE,KAAK,QAAQ,GAAG;AAEjD,MAAI,SAAS,QAAQ;AACnB,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC,OAAO;AACL,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC;AACA,SAAO,MAAM,WAAW,KAAK,GAAG,OAAO;AACvC,SAAO,MAAM,MAAM,KAAK,GAAG,OAAO;AAClC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,QAAQ,KAAK,KAAK,MAAM;AAGrC,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,WAAO,OAAO,CAAC;AAAA,EACjB;AACA,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AAEtC,SAAO;AACT;AAWO,MAAM,kBAAkB,CAC7B,oBACA,iBACA,WAAyB;AAGzB,QAAM,oBAAoB,mBAAkB;AAE1C,qBAAiB,aAAa,oBAAoB;AAChD,cAAQ,eAAA;AAER,cAAQ,UAAU,MAAA;AAAA;AAAA,QAEhB,KAAK,QAAQ;AACX,gBAAM,mBAAmB,UAAU;AAEnC,cAAI,OAAO,qBAAqB,UAAU;AAExC,kBAAM,eAAe,OAAO,KAAK,kBAAkB,MAAM;AAGzD,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,aAAa;AAAA,cACb,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAGN,kBAAM,qBAAqB,eAAe,YAAY;AACtD,kBAAM;AAAA,UAGR,WAAW,OAAO,SAAS,gBAAgB,GAAG;AAE5C,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,iBAAiB;AAAA,cACjB,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAGN,kBAAM,qBAAqB,eAAe,gBAAgB;AAC1D,kBAAM;AAAA,UAER,OAAO;AAEL,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,iBAAiB;AAAA,cACjB,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAEN,oBAAQ,iBAAiB,MAAA;AAAA;AAAA,cAEvB,KAAK,aAAa;AAChB,oBAAI,WAAW;AACf,iCAAiB,wBAAwB,iBAAiB,WAAW;AACnE,0BAAQ,eAAA;AACR,wBAAM;AACN,8BAAY,qBAAqB;AAAA,gBACnC;AAGA,oBAAI,WAAW,QAAQ,GAAG;AACxB,0BAAQ,eAAA;AACR,wBAAM,OAAO,MAAM,MAAO,WAAW,KAAM,CAAC;AAAA,gBAC9C;AACA;AAAA,cACF;AAAA;AAAA,cAEA,KAAK,YAAY;AACf,oBAAI,WAAW;AACf,iCAAiB,WAAW,iBAAiB,UAAU;AACrD,0BAAQ,eAAA;AACR,sBAAI,OAAO,YAAY,UAAU;AAC/B,0BAAM,cAAc,OAAO,KAAK,SAAS,MAAM;AAC/C,0BAAM;AACN,gCAAY,YAAY;AAAA,kBAC1B,WAAW,OAAO,SAAS,OAAO,GAAG;AACnC,0BAAM;AACN,gCAAY,QAAQ;AAAA,kBACtB;AAAA,gBACF;AAGA,oBAAI,WAAW,QAAQ,GAAG;AACxB,0BAAQ,eAAA;AACR,wBAAM,OAAO,MAAM,MAAO,WAAW,KAAM,CAAC;AAAA,gBAC9C;AACA;AAAA,cACF;AAAA,YAAA;AAAA,UAEJ;AACA;AAAA,QACF;AAAA;AAAA,QAEA,KAAK,aAAa;AAEhB,gBAAM,iBAAiB;AAAA,YACrB;AAAA,YACA,UAAU;AAAA,YACV;AAAA,YACA,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,UAAA;AAEZ,gBAAM;AACN;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM;AAAA,EACR;AAEA,QAAM,KAAK,mBAAmB;AAE9B,UAAQ,IAAA;AAAA;AAAA,IAEN,KAAK,QAAQ;AAEX,aAAOA,OAAAA,SAAS,KAAK,mBAAmB;AAAA,IAC1C;AAAA;AAAA,IAEA,KAAK,QAAQ;AAEX,YAAM,aAAaC,KAAAA,WAAW,EAAE,OAAO,GAAG;AAE1C,YAAM,kBAAkBD,OAAAA,SAAS,KAAK,kBAAA,CAAmB;AAEzD,sBAAgB,KAAK,UAAU;AAE/B,aAAO;AAAA,IACT;AAAA,EAAA;AAEJ;ACrTA,MAAM,WAAW,CAAC,eAAmC,aAAqB,gBAA0C;AAClH,SAAO,kBAAkB,gBAAgB,QAAQ,YAAY,aAAa;AAC5E;AAUO,MAAM,sBAAsB,OACjC,MACA,aACA,YAC2B;AAC3B,QAAM,KAAK,eAAe;AAE1B,MAAI,OAAO,UAAU,SAAS,eAAe;AAC3C,UAAM,QAAQ,MAAME,cAAK,QAAQ,aAAa;AAC9C,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC,OAAO;AACL,UAAM,OAAO,SAAS,QAAQ;AAC9B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAClC,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC;AACF;AASO,MAAM,qBAAqB,OAChC,MACA,QACA,YACsB;AACtB,MAAI,WAAW;AAGf,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,QAAQ;AAChC,YAAM,SAAS,OAAO,SAAS,KAAK,IAAI,QAAQ,OAAO,KAAK,OAAO,MAAM;AACzE,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,eAAWF,OAAAA,SAAS,KAAK,MAAM;AAAA,EACjC;AAEA,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,IACpC,SAAS;AAAA,MACP,MAAM;AAAA,MACN;AAAA,MACA;AAAA,IAAA;AAAA,EACF;AAEJ;AAUO,MAAM,qBAAqB,OAChC,MACA,UACA,aACA,YACsB;AACtB,QAAM,KAAK,eAAe;AAG1B,QAAM,QAAQ,MAAME,SAAAA,KAAK,QAAQ;AAEjC,QAAM,SAASC,GAAAA,iBAAiB,QAAQ;AAExC,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,OAAO;AAC5D,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,QAAQ;AAE7D,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AAGpD,SAAO,MAAM,mBAAmB,MAAM,QAAQ;AAAA,IAC5C,QAAQ,MAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,EAAA,CACnD;AACH;AAQO,MAAM,oBAAoB,CAAC,QAAkB,SAAiB;AACnE,QAAM,SAASC,GAAAA,kBAAkB,IAAI;AACrC,SAAO,KAAK,MAAM;AAClB,SAAO,IAAI,QAAc,CAAC,KAAK,QAAQ;AACrC,WAAO,GAAG,UAAU,GAAG;AACvB,WAAO,GAAG,SAAS,GAAG;AAAA,EACxB,CAAC;AACH;;;;;;"}
|
1
|
+
{"version":3,"file":"index.cjs","sources":["../src/utils.ts","../src/packer.ts"],"sourcesContent":["// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { createReadStream, createWriteStream } from \"fs\";\nimport { stat } from \"fs/promises\";\nimport { Readable } from \"stream\";\nimport { CreateItemOptions, CreateReadableFileItemOptions, FileItem, DirectoryItem, ReflectStats, CreateDirectoryItemOptions } from \"./types\";\n\n/**\n * Get the user/group name from the candidate name or ID\n * @param candidateName - The candidate user/group name\n * @param candidateId - The candidate user/group ID\n * @param reflectStat - Whether to reflect the stat (all, exceptName, none)\n * @returns The user/group name\n */\nconst getUName = (candidateName: string | undefined, candidateId: number, reflectStat: ReflectStats | undefined) => {\n return candidateName ?? (reflectStat === 'all' ? candidateId.toString() : 'root');\n}\n\n/**\n * Get a buffer from the string or Buffer\n * @param data - The data to get a buffer from\n * @returns A buffer\n */\nexport const getBuffer = (data: Buffer | string) => {\n return Buffer.isBuffer(data) ? data : Buffer.from(data, 'utf8');\n}\n\n/**\n * Create a DirectoryItem\n * @param path - The path to the directory in the tar archive\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'none')\n * @param options - Metadata for the directory including path in tar archive\n * @returns A DirectoryItem\n * @remarks When reflectStat is 'all' or 'exceptName', `options.directoryPath` must be provided.\n */\nexport const createDirectoryItem = async (\n path: string,\n reflectStat?: ReflectStats,\n options?: CreateDirectoryItemOptions\n): Promise<DirectoryItem> => {\n const rs = reflectStat ?? 'none';\n\n if (rs !== 'none' && options?.directoryPath) {\n const stats = await stat(options.directoryPath);\n const mode = options?.mode ?? stats.mode;\n const uid = options?.uid ?? stats.uid;\n const gid = options?.gid ?? stats.gid;\n const date = options?.date ?? stats.mtime;\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n } else {\n const mode = options?.mode ?? 0o755;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n const uname = getUName(options?.uname, undefined, rs);\n const gname = getUName(options?.gname, undefined, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n }\n};\n\n/**\n * Create a FileItem from content data directly\n * @param path - The path to the file in the tar archive\n * @param content - Content data\n * @param options - Metadata for the file including path in tar archive\n * @returns A FileItem\n */\nexport const createFileItem = async (\n path: string,\n content: string | Buffer,\n options?: CreateItemOptions\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content\n };\n};\n\n/**\n * Create a FileItem from a Readable stream\n * @param path - The path to the file in the tar archive\n * @param readable - The readable stream\n * @param options - Metadata for the file including path in tar archive\n * @returns A FileItem\n */\nexport const createReadableFileItem = async (\n path: string,\n readable: Readable,\n options?: CreateReadableFileItemOptions\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of readable) {\n const buffer = getBuffer(chunk);\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable: Readable.from(chunks)\n }\n };\n } else {\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable\n }\n };\n }\n};\n\n/**\n * Create a FileItem from a generator\n * @param path - The path to the file in the tar archive\n * @param generator - The generator to read the file from\n * @param options - Metadata for the file including path in tar archive\n * @returns A FileItem\n */\nexport const createGeneratorFileItem = async (\n path: string,\n generator: AsyncGenerator<Buffer, void, unknown>,\n options?: CreateReadableFileItemOptions\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of generator) {\n const buffer = getBuffer(chunk);\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable: Readable.from(chunks)\n }\n };\n } else {\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'generator',\n length,\n generator\n }\n };\n }\n};\n\n/**\n * Create a FileItem from a local file path\n * @param path - The path to the file in the tar archive\n * @param filePath - The path to the file to read from real filesystem\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'exceptName')\n * @param options - Metadata for the file including path in tar archive\n * @returns A FileItem\n */\nexport const createReadFileItem = async (\n path: string,\n filePath: string,\n reflectStat?: ReflectStats,\n options?: CreateItemOptions\n): Promise<FileItem> => {\n const rs = reflectStat ?? 'exceptName';\n\n // Get file stats to extract metadata\n const stats = await stat(filePath);\n // Create readable stream from file\n const reader = createReadStream(filePath);\n\n const mode = options?.mode ?? (rs !== 'none' ? stats.mode : undefined);\n const uid = options?.uid ?? (rs !== 'none' ? stats.uid : undefined);\n const gid = options?.gid ?? (rs !== 'none' ? stats.gid : undefined);\n const date = options?.date ?? (rs !== 'none' ? stats.mtime : undefined);\n\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n\n // Create a FileItem\n return await createReadableFileItem(path, reader, {\n length: stats.size, mode, uname, gname, uid, gid, date,\n });\n};\n\n/**\n * Store a readable stream to a file\n * @param reader - The readable stream\n * @param path - The path to the file to store the readable stream to\n * @returns A promise that resolves when the stream is finished\n */\nexport const storeReaderToFile = (reader: Readable, path: string) => {\n const writer = createWriteStream(path);\n reader.pipe(writer);\n return new Promise<void>((res, rej) => {\n writer.on('finish', res);\n writer.on('error', rej);\n reader.on('error', rej);\n });\n};\n","// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { Readable } from \"stream\";\nimport { createGzip } from \"zlib\";\nimport { getBuffer } from \"./utils\";\nimport { CompressionTypes, EntryItem } from \"./types\";\n\n/**\n * Get the byte length of a string in UTF-8\n * @param str - The string to get the byte length of\n * @returns The byte length of the string\n */\nconst utf8ByteLength = (str: string) => {\n return Buffer.byteLength(str, \"utf8\");\n}\n\n/**\n * Truncate a string to a maximum byte length in UTF-8\n * @param str - The string to truncate\n * @param maxBytes - The maximum byte length\n * @returns The truncated string\n */\nconst truncateUtf8Safe = (str: string, maxBytes: number) => {\n let total = 0;\n let i = 0;\n while (i < str.length) {\n const codePoint = str.codePointAt(i)!;\n const char = String.fromCodePoint(codePoint);\n const charBytes = Buffer.byteLength(char, \"utf8\");\n if (total + charBytes > maxBytes) break;\n total += charBytes;\n i += char.length;\n }\n return str.slice(0, i);\n}\n\n// Tar specification: name max 100 bytes, prefix max 155 bytes\nconst MAX_NAME = 100;\nconst MAX_PREFIX = 155;\n\n/**\n * Split a path into a name and a prefix\n * @param path - The path to split\n * @returns The name and prefix\n */\nconst splitPath = (path: string) => {\n if (utf8ByteLength(path) <= MAX_NAME) {\n return { prefix: \"\", name: path };\n }\n\n // Split by '/' and find the part that fits in name from the end\n const parts = path.split(\"/\");\n let name = parts.pop() ?? \"\";\n let prefix = parts.join(\"/\");\n\n // Truncate if name exceeds 100 bytes\n if (utf8ByteLength(name) > MAX_NAME) {\n name = truncateUtf8Safe(name, MAX_NAME);\n }\n\n // Truncate if prefix exceeds 155 bytes\n while (utf8ByteLength(prefix) > MAX_PREFIX) {\n prefix = truncateUtf8Safe(prefix, MAX_PREFIX);\n }\n\n return { prefix, name };\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Get octal bytes from a number\n * @param value - The number to get octal bytes from\n * @param length - The length of the octal bytes\n * @returns The octal bytes\n */\nconst getOctalBytes = (value: number, length: number) => {\n const str = value.toString(8).padStart(length - 1, \"0\") + \"\\0\";\n return Buffer.from(str, \"ascii\");\n};\n\n/**\n * Get padded bytes from a buffer\n * @param buffer - The buffer to get padded bytes from\n * @returns The padded bytes\n */\nconst getPaddedBytes = (buffer: Buffer) => {\n const extra = buffer.length % 512;\n if (extra === 0) {\n return buffer;\n } else {\n return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);\n }\n}\n\n/**\n * The terminator bytes\n */\nconst terminatorBytes = Buffer.alloc(1024, 0);\n\n/**\n * Create a tar header\n * @param type - The type of the entry\n * @param path - The path of the entry\n * @param size - The size of the entry\n * @param mode - The mode of the entry\n * @param uname - The user name of the entry\n * @param gname - The group name of the entry\n */\nconst createTarHeader = (\n type: 'file' | 'directory',\n path: string,\n size: number,\n mode: number,\n uname: string,\n gname: string,\n uid: number,\n gid: number,\n date: Date\n) => {\n // Allocate header bytes\n const buffer = Buffer.alloc(512, 0);\n\n // Split path into name and prefix\n const { name, prefix } = splitPath(path);\n\n // Write name, mode, uid, gid, size, mtime, typeflag, prefix, checksum\n buffer.write(name, 0, 100, \"utf8\");\n getOctalBytes(mode & 0o7777, 8).copy(buffer, 100);\n getOctalBytes(uid, 8).copy(buffer, 108);\n getOctalBytes(gid, 8).copy(buffer, 116);\n getOctalBytes(size, 12).copy(buffer, 124);\n getOctalBytes(Math.floor(date.getTime() / 1000), 12).copy(buffer, 136);\n\n // Check sum space\n Buffer.from(\" \", \"ascii\").copy(buffer, 148);\n\n if (type === 'file') {\n buffer.write(\"0\", 156, 1, \"ascii\"); // typeflag (file)\n } else {\n buffer.write(\"5\", 156, 1, \"ascii\"); // typeflag (directory)\n }\n buffer.write(\"ustar\\0\", 257, 6, \"ascii\");\n buffer.write(\"00\", 263, 2, \"ascii\"); // version\n buffer.write(uname, 265, 32, \"utf8\");\n buffer.write(gname, 297, 32, \"utf8\");\n buffer.write(prefix, 345, 155, \"utf8\"); // Path prefix\n\n // Calculate check sum\n let sum = 0;\n for (let i = 0; i < 512; i++) {\n sum += buffer[i];\n }\n getOctalBytes(sum, 8).copy(buffer, 148);\n\n return buffer;\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a tar packer\n * @param entryItemGenerator - The async generator of the entry items\n * @param compressionType - The compression type to use (Default: 'none')\n * @param signal - The abort signal to cancel the tar packer\n * @returns Readable stream of the tar packer\n */\nexport const createTarPacker = (\n entryItemGenerator: AsyncGenerator<EntryItem, void, unknown>,\n compressionType?: CompressionTypes,\n signal?: AbortSignal) => {\n\n // Create async generator function from entry item iterator\n const entryItemIterator = async function*() {\n // Iterate over the entry items\n for await (const entryItem of entryItemGenerator) {\n signal?.throwIfAborted();\n\n switch (entryItem.kind) {\n // Entry is a file\n case 'file': {\n const entryItemContent = entryItem.content;\n // Content is a string or buffer\n if (typeof entryItemContent === 'string' || Buffer.isBuffer(entryItemContent)) {\n // Get content bytes from string or buffer\n const contentBytes = getBuffer(entryItemContent);\n\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n contentBytes.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n // Content bytes to adjust padding space and produce\n const totalPaddedContentBytes = getPaddedBytes(contentBytes);\n yield totalPaddedContentBytes;\n } else {\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n entryItemContent.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n let position = 0;\n switch (entryItemContent.kind) {\n // Content is a generator\n case 'generator': {\n for await (const contentBytes of entryItemContent.generator) {\n signal?.throwIfAborted();\n yield contentBytes;\n position += contentBytes.length;\n }\n break;\n }\n // Content is a readable stream\n case 'readable': {\n for await (const content of entryItemContent.readable) {\n signal?.throwIfAborted();\n const contentBytes = getBuffer(content);\n yield contentBytes;\n position += contentBytes.length;\n }\n break;\n }\n }\n\n // Padding space\n if (position % 512 !== 0) {\n signal?.throwIfAborted();\n yield Buffer.alloc(512 - (position % 512), 0);\n }\n }\n break;\n }\n // Entry is a directory\n case 'directory': {\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'directory',\n entryItem.path,\n 0,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date\n );\n yield tarHeaderBytes;\n break;\n }\n }\n }\n\n // Terminates for tar stream\n yield terminatorBytes;\n };\n\n const ct = compressionType ?? 'none';\n\n switch (ct) {\n // No compression\n case 'none': {\n // Create readable stream from entry item iterator\n return Readable.from(entryItemIterator());\n }\n // Gzip compression\n case 'gzip': {\n // Create gzip stream\n const gzipStream = createGzip({ level: 9 });\n // Create readable stream from entry item iterator\n const entryItemStream = Readable.from(entryItemIterator());\n // Pipe the entry item stream to the gzip stream\n entryItemStream.pipe(gzipStream);\n // Return the gzip stream\n return gzipStream;\n }\n }\n};\n"],"names":["stat","Readable","createReadStream","createWriteStream","createGzip"],"mappings":";;;;;;AAiBA,MAAM,WAAW,CAAC,eAAmC,aAAqB,gBAA0C;AAClH,SAAO,kBAAkB,gBAAgB,QAAQ,YAAY,aAAa;AAC5E;AAOO,MAAM,YAAY,CAAC,SAA0B;AAClD,SAAO,OAAO,SAAS,IAAI,IAAI,OAAO,OAAO,KAAK,MAAM,MAAM;AAChE;AAUO,MAAM,sBAAsB,OACjC,MACA,aACA,YAC2B;AAC3B,QAAM,KAAK,eAAe;AAE1B,MAAI,OAAO,UAAU,SAAS,eAAe;AAC3C,UAAM,QAAQ,MAAMA,cAAK,QAAQ,aAAa;AAC9C,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC,OAAO;AACL,UAAM,OAAO,SAAS,QAAQ;AAC9B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAClC,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC;AACF;AASO,MAAM,iBAAiB,OAC5B,MACA,SACA,YACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,IACpC;AAAA,EAAA;AAEJ;AASO,MAAM,yBAAyB,OACpC,MACA,UACA,YACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,UAAU;AAClC,YAAM,SAAS,UAAU,KAAK;AAC9B,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA,UAAUC,OAAAA,SAAS,KAAK,MAAM;AAAA,MAAA;AAAA,IAChC;AAAA,EAEJ,OAAO;AAEL,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AACF;AASO,MAAM,0BAA0B,OACrC,MACA,WACA,YACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,WAAW;AACnC,YAAM,SAAS,UAAU,KAAK;AAC9B,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA,UAAUA,OAAAA,SAAS,KAAK,MAAM;AAAA,MAAA;AAAA,IAChC;AAAA,EAEJ,OAAO;AAEL,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AACF;AAUO,MAAM,qBAAqB,OAChC,MACA,UACA,aACA,YACsB;AACtB,QAAM,KAAK,eAAe;AAG1B,QAAM,QAAQ,MAAMD,SAAAA,KAAK,QAAQ;AAEjC,QAAM,SAASE,GAAAA,iBAAiB,QAAQ;AAExC,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,OAAO;AAC5D,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,QAAQ;AAE7D,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AAGpD,SAAO,MAAM,uBAAuB,MAAM,QAAQ;AAAA,IAChD,QAAQ,MAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,EAAA,CACnD;AACH;AAQO,MAAM,oBAAoB,CAAC,QAAkB,SAAiB;AACnE,QAAM,SAASC,GAAAA,kBAAkB,IAAI;AACrC,SAAO,KAAK,MAAM;AAClB,SAAO,IAAI,QAAc,CAAC,KAAK,QAAQ;AACrC,WAAO,GAAG,UAAU,GAAG;AACvB,WAAO,GAAG,SAAS,GAAG;AACtB,WAAO,GAAG,SAAS,GAAG;AAAA,EACxB,CAAC;AACH;ACzPA,MAAM,iBAAiB,CAAC,QAAgB;AACtC,SAAO,OAAO,WAAW,KAAK,MAAM;AACtC;AAQA,MAAM,mBAAmB,CAAC,KAAa,aAAqB;AAC1D,MAAI,QAAQ;AACZ,MAAI,IAAI;AACR,SAAO,IAAI,IAAI,QAAQ;AACrB,UAAM,YAAY,IAAI,YAAY,CAAC;AACnC,UAAM,OAAO,OAAO,cAAc,SAAS;AAC3C,UAAM,YAAY,OAAO,WAAW,MAAM,MAAM;AAChD,QAAI,QAAQ,YAAY,SAAU;AAClC,aAAS;AACT,SAAK,KAAK;AAAA,EACZ;AACA,SAAO,IAAI,MAAM,GAAG,CAAC;AACvB;AAGA,MAAM,WAAW;AACjB,MAAM,aAAa;AAOnB,MAAM,YAAY,CAAC,SAAiB;AAClC,MAAI,eAAe,IAAI,KAAK,UAAU;AACpC,WAAO,EAAE,QAAQ,IAAI,MAAM,KAAA;AAAA,EAC7B;AAGA,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,OAAO,MAAM,IAAA,KAAS;AAC1B,MAAI,SAAS,MAAM,KAAK,GAAG;AAG3B,MAAI,eAAe,IAAI,IAAI,UAAU;AACnC,WAAO,iBAAiB,MAAM,QAAQ;AAAA,EACxC;AAGA,SAAO,eAAe,MAAM,IAAI,YAAY;AAC1C,aAAS,iBAAiB,QAAQ,UAAU;AAAA,EAC9C;AAEA,SAAO,EAAE,QAAQ,KAAA;AACnB;AAUA,MAAM,gBAAgB,CAAC,OAAe,WAAmB;AACvD,QAAM,MAAM,MAAM,SAAS,CAAC,EAAE,SAAS,SAAS,GAAG,GAAG,IAAI;AAC1D,SAAO,OAAO,KAAK,KAAK,OAAO;AACjC;AAOA,MAAM,iBAAiB,CAAC,WAAmB;AACzC,QAAM,QAAQ,OAAO,SAAS;AAC9B,MAAI,UAAU,GAAG;AACf,WAAO;AAAA,EACT,OAAO;AACL,WAAO,OAAO,OAAO,CAAC,QAAQ,OAAO,MAAM,MAAM,OAAO,CAAC,CAAC,CAAC;AAAA,EAC7D;AACF;AAKA,MAAM,kBAAkB,OAAO,MAAM,MAAM,CAAC;AAW5C,MAAM,kBAAkB,CACtB,MACA,MACA,MACA,MACA,OACA,OACA,KACA,KACA,SACG;AAEH,QAAM,SAAS,OAAO,MAAM,KAAK,CAAC;AAGlC,QAAM,EAAE,MAAM,WAAW,UAAU,IAAI;AAGvC,SAAO,MAAM,MAAM,GAAG,KAAK,MAAM;AACjC,gBAAc,OAAO,MAAQ,CAAC,EAAE,KAAK,QAAQ,GAAG;AAChD,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,MAAM,EAAE,EAAE,KAAK,QAAQ,GAAG;AACxC,gBAAc,KAAK,MAAM,KAAK,QAAA,IAAY,GAAI,GAAG,EAAE,EAAE,KAAK,QAAQ,GAAG;AAGrE,SAAO,KAAK,YAAY,OAAO,EAAE,KAAK,QAAQ,GAAG;AAEjD,MAAI,SAAS,QAAQ;AACnB,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC,OAAO;AACL,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC;AACA,SAAO,MAAM,WAAW,KAAK,GAAG,OAAO;AACvC,SAAO,MAAM,MAAM,KAAK,GAAG,OAAO;AAClC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,QAAQ,KAAK,KAAK,MAAM;AAGrC,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,WAAO,OAAO,CAAC;AAAA,EACjB;AACA,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AAEtC,SAAO;AACT;AAWO,MAAM,kBAAkB,CAC7B,oBACA,iBACA,WAAyB;AAGzB,QAAM,oBAAoB,mBAAkB;AAE1C,qBAAiB,aAAa,oBAAoB;AAChD,cAAQ,eAAA;AAER,cAAQ,UAAU,MAAA;AAAA;AAAA,QAEhB,KAAK,QAAQ;AACX,gBAAM,mBAAmB,UAAU;AAEnC,cAAI,OAAO,qBAAqB,YAAY,OAAO,SAAS,gBAAgB,GAAG;AAE7E,kBAAM,eAAe,UAAU,gBAAgB;AAG/C,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,aAAa;AAAA,cACb,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAGN,kBAAM,0BAA0B,eAAe,YAAY;AAC3D,kBAAM;AAAA,UACR,OAAO;AAEL,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,iBAAiB;AAAA,cACjB,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAEN,gBAAI,WAAW;AACf,oBAAQ,iBAAiB,MAAA;AAAA;AAAA,cAEvB,KAAK,aAAa;AAChB,iCAAiB,gBAAgB,iBAAiB,WAAW;AAC3D,0BAAQ,eAAA;AACR,wBAAM;AACN,8BAAY,aAAa;AAAA,gBAC3B;AACA;AAAA,cACF;AAAA;AAAA,cAEA,KAAK,YAAY;AACf,iCAAiB,WAAW,iBAAiB,UAAU;AACrD,0BAAQ,eAAA;AACR,wBAAM,eAAe,UAAU,OAAO;AACtC,wBAAM;AACN,8BAAY,aAAa;AAAA,gBAC3B;AACA;AAAA,cACF;AAAA,YAAA;AAIF,gBAAI,WAAW,QAAQ,GAAG;AACxB,sBAAQ,eAAA;AACR,oBAAM,OAAO,MAAM,MAAO,WAAW,KAAM,CAAC;AAAA,YAC9C;AAAA,UACF;AACA;AAAA,QACF;AAAA;AAAA,QAEA,KAAK,aAAa;AAEhB,gBAAM,iBAAiB;AAAA,YACrB;AAAA,YACA,UAAU;AAAA,YACV;AAAA,YACA,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,UAAA;AAEZ,gBAAM;AACN;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM;AAAA,EACR;AAEA,QAAM,KAAK,mBAAmB;AAE9B,UAAQ,IAAA;AAAA;AAAA,IAEN,KAAK,QAAQ;AAEX,aAAOF,OAAAA,SAAS,KAAK,mBAAmB;AAAA,IAC1C;AAAA;AAAA,IAEA,KAAK,QAAQ;AAEX,YAAM,aAAaG,KAAAA,WAAW,EAAE,OAAO,GAAG;AAE1C,YAAM,kBAAkBH,OAAAA,SAAS,KAAK,kBAAA,CAAmB;AAEzD,sBAAgB,KAAK,UAAU;AAE/B,aAAO;AAAA,IACT;AAAA,EAAA;AAEJ;;;;;;;;;"}
|