tar-vern 1.0.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README_pack.md +12 -0
- package/dist/extractor.d.ts +2 -2
- package/dist/index.cjs +33 -3
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +2 -2
- package/dist/index.js +34 -4
- package/dist/index.js.map +1 -1
- package/dist/packer.d.ts +2 -2
- package/dist/types.d.ts +2 -2
- package/dist/utils.d.ts +4 -4
- package/dist/utils.d.ts.map +1 -1
- package/package.json +6 -6
package/README_pack.md
CHANGED
@@ -80,6 +80,18 @@ for await (const extractedItem of createTarExtractor(readableStream), 'gzip') {
|
|
80
80
|
|
81
81
|
----
|
82
82
|
|
83
|
+
## Features
|
84
|
+
|
85
|
+
- Bidirectional streaming: Both creation and extraction of tar archives
|
86
|
+
- Memory-efficient: Streaming API for processing large files without content buffering
|
87
|
+
- Multiple content sources: String, Buffer, ReadableStream, file paths and async generators
|
88
|
+
- Metadata preservation: File permissions, ownership, timestamps
|
89
|
+
- Built-in compression/decompression: GZip compression support (`tar.gz` format)
|
90
|
+
- Flexible content access: Extract files as string, Buffer, or Readable stream on demand
|
91
|
+
- Error handling: Comprehensive validation and error reporting
|
92
|
+
- Abort signal support: Cancellable operations
|
93
|
+
- No external dependencies: Pure TypeScript implementation
|
94
|
+
|
83
95
|
For more information, [see repository documents](http://github.com/kekyo/tar-vern/).
|
84
96
|
|
85
97
|
----
|
package/dist/extractor.d.ts
CHANGED
@@ -1,11 +1,11 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version: 1.
|
3
|
+
* version: 1.1.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
7
7
|
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
-
* git.commit.hash:
|
8
|
+
* git.commit.hash: 6d4ff13b538b16545ccc55b2e74f8e5f73999a34
|
9
9
|
*/
|
10
10
|
|
11
11
|
import { Readable } from 'stream';
|
package/dist/index.cjs
CHANGED
@@ -1,11 +1,11 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version: 1.
|
3
|
+
* version: 1.1.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
7
7
|
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
-
* git.commit.hash:
|
8
|
+
* git.commit.hash: 6d4ff13b538b16545ccc55b2e74f8e5f73999a34
|
9
9
|
*/
|
10
10
|
"use strict";
|
11
11
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
@@ -208,9 +208,39 @@ const storeReaderToFile = async (reader, path2, signal) => {
|
|
208
208
|
const writer = fs.createWriteStream(path2, { signal });
|
209
209
|
await promises$1.pipeline(reader, writer, { signal });
|
210
210
|
};
|
211
|
+
const getAllFilesInDirectory = async (baseDir, signal) => {
|
212
|
+
const collectFiles = async (currentDir, relativePath) => {
|
213
|
+
signal?.throwIfAborted();
|
214
|
+
try {
|
215
|
+
const entries = await promises.readdir(currentDir, { withFileTypes: true });
|
216
|
+
const result = [];
|
217
|
+
const tasks = entries.map(async (entry) => {
|
218
|
+
signal?.throwIfAborted();
|
219
|
+
const entryRelativePath = path.join(relativePath, entry.name);
|
220
|
+
if (entry.isDirectory()) {
|
221
|
+
const entryFullPath = path.join(currentDir, entry.name);
|
222
|
+
const directoryContents = await collectFiles(entryFullPath, entryRelativePath);
|
223
|
+
return [entryRelativePath, ...directoryContents];
|
224
|
+
} else {
|
225
|
+
return [entryRelativePath];
|
226
|
+
}
|
227
|
+
});
|
228
|
+
const allResults = await Promise.all(tasks);
|
229
|
+
for (const entryResults of allResults) {
|
230
|
+
result.push(...entryResults);
|
231
|
+
}
|
232
|
+
return result;
|
233
|
+
} catch (error) {
|
234
|
+
console.warn(`Warning: Could not read directory ${currentDir}:`, error);
|
235
|
+
return [];
|
236
|
+
}
|
237
|
+
};
|
238
|
+
return await collectFiles(baseDir, "");
|
239
|
+
};
|
211
240
|
const createEntryItemGenerator = async function* (baseDir, relativePaths, reflectStat, signal) {
|
212
241
|
const rs = reflectStat ?? "exceptName";
|
213
|
-
|
242
|
+
const pathsToProcess = relativePaths ?? await getAllFilesInDirectory(baseDir, signal);
|
243
|
+
for (const relativePath of pathsToProcess) {
|
214
244
|
signal?.throwIfAborted();
|
215
245
|
const fsPath = path.join(baseDir, relativePath);
|
216
246
|
try {
|
package/dist/index.cjs.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":["../src/utils.ts","../src/packer.ts","../src/extractor.ts"],"sourcesContent":["// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { createReadStream, createWriteStream } from \"fs\";\nimport { stat, mkdir, writeFile } from \"fs/promises\";\nimport { Readable } from \"stream\";\nimport { pipeline } from \"stream/promises\";\nimport { dirname, join } from \"path\";\nimport { CreateItemOptions, CreateReadableFileItemOptions, FileItem, DirectoryItem, ReflectStats, CreateDirectoryItemOptions, EntryItem, ExtractedEntryItem, ExtractedFileItem } from \"./types\";\n\n// Tar specification: name max 100 bytes, prefix max 155 bytes\nexport const MAX_NAME = 100;\nexport const MAX_PREFIX = 155;\n\n/**\n * Get the user/group name from the candidate name or ID\n * @param candidateName - The candidate user/group name\n * @param candidateId - The candidate user/group ID\n * @param reflectStat - Whether to reflect the stat (all, exceptName, none)\n * @returns The user/group name\n */\nconst getUName = (candidateName: string | undefined, candidateId: number, reflectStat: ReflectStats | undefined) => {\n return candidateName ?? (reflectStat === 'all' ? candidateId.toString() : 'root');\n}\n\n/**\n * Get a buffer from the string or Buffer\n * @param data - The data to get a buffer from\n * @returns A buffer\n */\nexport const getBuffer = (data: Buffer | string) => {\n return Buffer.isBuffer(data) ? data : Buffer.from(data, 'utf8');\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a DirectoryItem\n * @param path - The path to the directory in the tar archive\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'none')\n * @param options - Metadata for the directory including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A DirectoryItem\n * @remarks When reflectStat is 'all' or 'exceptName', `options.directoryPath` must be provided.\n */\nexport const createDirectoryItem = async (\n path: string,\n reflectStat?: ReflectStats,\n options?: CreateDirectoryItemOptions,\n signal?: AbortSignal\n): Promise<DirectoryItem> => {\n const rs = reflectStat ?? 'none';\n\n if (rs !== 'none' && options?.directoryPath) {\n signal?.throwIfAborted();\n const stats = await stat(options.directoryPath);\n const mode = options?.mode ?? stats.mode;\n const uid = options?.uid ?? stats.uid;\n const gid = options?.gid ?? stats.gid;\n const date = options?.date ?? stats.mtime;\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n } else {\n const mode = options?.mode ?? 0o755;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n const uname = getUName(options?.uname, undefined, rs);\n const gname = getUName(options?.gname, undefined, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n }\n};\n\n/**\n * Create a FileItem from content data directly\n * @param path - The path to the file in the tar archive\n * @param content - Content data\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createFileItem = async (\n path: string,\n content: string | Buffer,\n options?: CreateItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n signal?.throwIfAborted();\n \n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content\n };\n};\n\n/**\n * Create a FileItem from a Readable stream\n * @param path - The path to the file in the tar archive\n * @param readable - The readable stream\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createReadableFileItem = async (\n path: string,\n readable: Readable,\n options?: CreateReadableFileItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of readable) {\n signal?.throwIfAborted();\n const buffer = getBuffer(chunk);\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable: Readable.from(chunks, { signal })\n }\n };\n } else {\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable\n }\n };\n }\n};\n\n/**\n * Create a FileItem from a generator\n * @param path - The path to the file in the tar archive\n * @param generator - The generator to read the file from\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createGeneratorFileItem = async (\n path: string,\n generator: AsyncGenerator<Buffer, void, unknown>,\n options?: CreateReadableFileItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of generator) {\n signal?.throwIfAborted();\n const buffer = getBuffer(chunk);\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable: Readable.from(chunks, { signal })\n }\n };\n } else {\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'generator',\n length,\n generator\n }\n };\n }\n};\n\n/**\n * Create a FileItem from a local file path\n * @param path - The path to the file in the tar archive\n * @param filePath - The path to the file to read from real filesystem\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'exceptName')\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createReadFileItem = async (\n path: string,\n filePath: string,\n reflectStat?: ReflectStats,\n options?: CreateItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const rs = reflectStat ?? 'exceptName';\n\n // Get file stats to extract metadata\n signal?.throwIfAborted();\n const stats = await stat(filePath);\n // Create readable stream from file\n const reader = createReadStream(filePath, { signal });\n\n const mode = options?.mode ?? (rs !== 'none' ? stats.mode : undefined);\n const uid = options?.uid ?? (rs !== 'none' ? stats.uid : undefined);\n const gid = options?.gid ?? (rs !== 'none' ? stats.gid : undefined);\n const date = options?.date ?? (rs !== 'none' ? stats.mtime : undefined);\n\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n\n // Create a FileItem\n return await createReadableFileItem(path, reader, {\n length: stats.size, mode, uname, gname, uid, gid, date,\n }, signal);\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Store a readable stream to a file\n * @param reader - The readable stream\n * @param path - The path to the file to store the readable stream to\n * @param signal - Optional abort signal to cancel the operation\n * @returns A promise that resolves when the stream is finished\n */\nexport const storeReaderToFile = async (reader: Readable, path: string, signal?: AbortSignal) => {\n const writer = createWriteStream(path, { signal });\n await pipeline(reader, writer, { signal });\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create an async generator that yields EntryItem objects from filesystem paths\n * @param baseDir - Base directory path for resolving relative paths\n * @param relativePaths - Array of relative paths to include in the tar archive\n * @param reflectStat - Whether to reflect file stats (Default: 'exceptName')\n * @param signal - Optional abort signal to cancel the operation\n * @returns Async generator that yields EntryItem objects\n */\nexport const createEntryItemGenerator = async function* (\n baseDir: string,\n relativePaths: string[],\n reflectStat?: ReflectStats,\n signal?: AbortSignal\n): AsyncGenerator<EntryItem, void, unknown> {\n const rs = reflectStat ?? 'exceptName';\n \n for (const relativePath of relativePaths) {\n signal?.throwIfAborted();\n \n const fsPath = join(baseDir, relativePath);\n \n try {\n signal?.throwIfAborted();\n const stats = await stat(fsPath);\n \n if (stats.isDirectory()) {\n // Create directory entry\n yield await createDirectoryItem(relativePath, rs, {\n directoryPath: fsPath\n }, signal);\n } else if (stats.isFile()) {\n // Create file entry\n yield await createReadFileItem(relativePath, fsPath, rs, undefined, signal);\n }\n } catch (error) {\n // Skip files that can't be accessed (permissions, etc.)\n console.warn(`Warning: Could not access ${fsPath}:`, error);\n continue;\n }\n }\n};\n\n/**\n * Extract entries from a tar extractor to a directory on the filesystem\n * @param iterator - Async generator of extracted entry items\n * @param basePath - Base directory path where entries will be extracted\n * @param signal - Optional abort signal to cancel the operation\n * @returns Promise that resolves when extraction is complete\n */\nexport const extractTo = async (\n iterator: AsyncGenerator<ExtractedEntryItem, void, unknown>,\n basePath: string,\n signal?: AbortSignal\n): Promise<void> => {\n for await (const entry of iterator) {\n signal?.throwIfAborted();\n \n const targetPath = join(basePath, entry.path);\n \n if (entry.kind === 'directory') {\n // Create directory\n try {\n signal?.throwIfAborted();\n await mkdir(targetPath, { recursive: true, mode: entry.mode });\n } catch (error) {\n // Directory might already exist, which is fine\n if ((error as any).code !== 'EEXIST') {\n throw error;\n }\n }\n } else if (entry.kind === 'file') {\n // Create parent directories if they don't exist\n const parentDir = dirname(targetPath);\n signal?.throwIfAborted();\n await mkdir(parentDir, { recursive: true });\n \n // Extract file content and write to filesystem\n const fileEntry = entry as ExtractedFileItem;\n const content = await fileEntry.getContent('buffer');\n await writeFile(targetPath, content, { mode: entry.mode, signal });\n }\n }\n};\n","// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { Readable } from \"stream\";\nimport { createGzip } from \"zlib\";\nimport { getBuffer, MAX_NAME, MAX_PREFIX } from \"./utils\";\nimport { CompressionTypes, EntryItem, EntryItemContent } from \"./types\";\n\n/**\n * Get the byte length of a string in UTF-8\n * @param str - The string to get the byte length of\n * @returns The byte length of the string\n */\nconst utf8ByteLength = (str: string) => {\n return Buffer.byteLength(str, \"utf8\");\n}\n\n/**\n * Truncate a string to a maximum byte length in UTF-8\n * @param str - The string to truncate\n * @param maxBytes - The maximum byte length\n * @returns The truncated string\n */\nconst truncateUtf8Safe = (str: string, maxBytes: number) => {\n let total = 0;\n let i = 0;\n while (i < str.length) {\n const codePoint = str.codePointAt(i)!;\n const char = String.fromCodePoint(codePoint);\n const charBytes = Buffer.byteLength(char, \"utf8\");\n if (total + charBytes > maxBytes) break;\n total += charBytes;\n i += char.length;\n }\n return str.slice(0, i);\n}\n\n/**\n * Split a path into a name and a prefix\n * @param path - The path to split\n * @returns The name and prefix\n */\nconst splitPath = (path: string) => {\n if (utf8ByteLength(path) <= MAX_NAME) {\n return { prefix: \"\", name: path };\n }\n\n // Split by '/' and find the part that fits in name from the end\n const parts = path.split(\"/\");\n let name = parts.pop() ?? \"\";\n let prefix = parts.join(\"/\");\n\n // Truncate if name exceeds 100 bytes\n if (utf8ByteLength(name) > MAX_NAME) {\n name = truncateUtf8Safe(name, MAX_NAME);\n }\n\n // Truncate if prefix exceeds 155 bytes\n while (utf8ByteLength(prefix) > MAX_PREFIX) {\n prefix = truncateUtf8Safe(prefix, MAX_PREFIX);\n }\n\n return { prefix, name };\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Get octal bytes from a number\n * @param value - The number to get octal bytes from\n * @param length - The length of the octal bytes\n * @returns The octal bytes\n */\nconst getOctalBytes = (value: number, length: number) => {\n const str = value.toString(8).padStart(length - 1, \"0\") + \"\\0\";\n return Buffer.from(str, \"ascii\");\n};\n\n/**\n * Get padded bytes from a buffer\n * @param buffer - The buffer to get padded bytes from\n * @returns The padded bytes\n */\nconst getPaddedBytes = (buffer: Buffer) => {\n const extra = buffer.length % 512;\n if (extra === 0) {\n return buffer;\n } else {\n return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);\n }\n}\n\n/**\n * The terminator bytes\n */\nconst terminatorBytes = Buffer.alloc(1024, 0);\n\n/**\n * Create a tar header\n * @param type - The type of the entry\n * @param path - The path of the entry\n * @param size - The size of the entry\n * @param mode - The mode of the entry\n * @param uname - The user name of the entry\n * @param gname - The group name of the entry\n */\nconst createTarHeader = (\n type: 'file' | 'directory',\n path: string,\n size: number,\n mode: number,\n uname: string,\n gname: string,\n uid: number,\n gid: number,\n date: Date\n) => {\n // Allocate header bytes\n const buffer = Buffer.alloc(512, 0);\n\n // Split path into name and prefix\n const { name, prefix } = splitPath(path);\n\n // Write name, mode, uid, gid, size, mtime, typeflag, prefix, checksum\n buffer.write(name, 0, 100, \"utf8\");\n getOctalBytes(mode & 0o7777, 8).copy(buffer, 100);\n getOctalBytes(uid, 8).copy(buffer, 108);\n getOctalBytes(gid, 8).copy(buffer, 116);\n getOctalBytes(size, 12).copy(buffer, 124);\n getOctalBytes(Math.floor(date.getTime() / 1000), 12).copy(buffer, 136);\n\n // Check sum space\n Buffer.from(\" \", \"ascii\").copy(buffer, 148);\n\n if (type === 'file') {\n buffer.write(\"0\", 156, 1, \"ascii\"); // typeflag (file)\n } else {\n buffer.write(\"5\", 156, 1, \"ascii\"); // typeflag (directory)\n }\n buffer.write(\"ustar\\0\", 257, 6, \"ascii\");\n buffer.write(\"00\", 263, 2, \"ascii\"); // version\n buffer.write(uname, 265, 32, \"utf8\");\n buffer.write(gname, 297, 32, \"utf8\");\n buffer.write(prefix, 345, 155, \"utf8\"); // Path prefix\n\n // Calculate check sum\n let sum = 0;\n for (let i = 0; i < 512; i++) {\n sum += buffer[i];\n }\n getOctalBytes(sum, 8).copy(buffer, 148);\n\n return buffer;\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a tar packer\n * @param entryItemGenerator - The async generator of the entry items\n * @param compressionType - The compression type to use (Default: 'none')\n * @param signal - The abort signal to cancel the tar packer\n * @returns Readable stream of the tar packer\n */\nexport const createTarPacker = (\n entryItemGenerator: AsyncGenerator<EntryItem, void, unknown>,\n compressionType?: CompressionTypes,\n signal?: AbortSignal) => {\n\n // Create async generator function from entry item iterator\n const entryItemIterator = async function*() {\n // Iterate over the entry items\n for await (const entryItem of entryItemGenerator) {\n signal?.throwIfAborted();\n\n switch (entryItem.kind) {\n // Entry is a file\n case 'file': {\n const entryItemContent = entryItem.content;\n // Content is a string or buffer\n if (typeof entryItemContent === 'string' || Buffer.isBuffer(entryItemContent)) {\n // Get content bytes from string or buffer\n const contentBytes = getBuffer(entryItemContent);\n\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n contentBytes.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n // Content bytes to adjust padding space and produce\n const totalPaddedContentBytes = getPaddedBytes(contentBytes);\n yield totalPaddedContentBytes;\n } else {\n // Assert that this is EntryItemContent, not FileItemReader (packer doesn't handle FileItemReader)\n const content = entryItemContent as EntryItemContent;\n \n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n content.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n let position = 0;\n switch (content.kind) {\n // Content is a generator\n case 'generator': {\n for await (const contentBytes of content.generator) {\n signal?.throwIfAborted();\n yield contentBytes;\n position += contentBytes.length;\n }\n break;\n }\n // Content is a readable stream\n case 'readable': {\n for await (const chunk of content.readable) {\n signal?.throwIfAborted();\n const contentBytes = getBuffer(chunk);\n yield contentBytes;\n position += contentBytes.length;\n }\n break;\n }\n }\n\n // Padding space\n if (position % 512 !== 0) {\n signal?.throwIfAborted();\n yield Buffer.alloc(512 - (position % 512), 0);\n }\n }\n break;\n }\n // Entry is a directory\n case 'directory': {\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'directory',\n entryItem.path,\n 0,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date\n );\n yield tarHeaderBytes;\n break;\n }\n }\n }\n\n // Terminates for tar stream\n yield terminatorBytes;\n };\n\n const ct = compressionType ?? 'none';\n\n switch (ct) {\n // No compression\n case 'none': {\n // Create readable stream from entry item iterator\n return Readable.from(entryItemIterator(), { signal });\n }\n // Gzip compression\n case 'gzip': {\n // Create gzip stream\n const gzipStream = createGzip({ level: 9 });\n // Create readable stream from entry item iterator\n const entryItemStream = Readable.from(entryItemIterator(), { signal });\n // Pipe the entry item stream to the gzip stream\n entryItemStream.pipe(gzipStream);\n // Return the gzip stream\n return gzipStream;\n }\n }\n};\n","// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { Readable } from \"stream\";\nimport { createGunzip } from \"zlib\";\nimport { CompressionTypes, ExtractedDirectoryItem, ExtractedEntryItem, ExtractedFileItem } from \"./types\";\nimport { getBuffer } from \"./utils\";\n\n/**\n * Parse octal bytes to number\n * @param buffer - The buffer containing octal bytes\n * @param offset - The offset in the buffer\n * @param length - The length of the octal bytes\n * @returns The parsed number\n */\nconst parseOctalBytes = (buffer: Buffer, offset: number, length: number): number => {\n const str = buffer.subarray(offset, offset + length).toString('ascii').replace(/\\0/g, '').trim();\n return str ? parseInt(str, 8) : 0;\n};\n\n/**\n * Parse string from buffer\n * @param buffer - The buffer containing the string\n * @param offset - The offset in the buffer\n * @param length - The length of the string\n * @returns The parsed string\n */\nconst parseString = (buffer: Buffer, offset: number, length: number): string => {\n return buffer.subarray(offset, offset + length).toString('utf8').replace(/\\0/g, '').trim();\n};\n\n/**\n * Read exact number of bytes from stream\n * @param iterator - The async iterator\n * @param size - The number of bytes to read\n * @param signal - The abort signal\n * @returns The buffer containing the read bytes\n */\nconst readExactBytes = async (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined): Promise<Buffer | undefined> => {\n\n const chunks: Buffer[] = [];\n let totalRead = 0;\n\n while (totalRead < size) {\n signal?.throwIfAborted();\n \n const { value, done } = await iterator.next();\n if (done) {\n if (totalRead === 0) {\n return undefined; // No data at all\n } else {\n throw new Error(`Unexpected end of stream: expected ${size} bytes, got ${totalRead} bytes`);\n }\n }\n\n const chunk = getBuffer(value);\n const needed = size - totalRead;\n \n if (chunk.length <= needed) {\n chunks.push(chunk);\n totalRead += chunk.length;\n } else {\n // We read more than needed, split the chunk\n chunks.push(chunk.subarray(0, needed));\n // Put back the remaining data\n await iterator.return?.(chunk.subarray(needed));\n totalRead = size;\n }\n }\n\n return Buffer.concat(chunks, size);\n};\n\n/**\n * Skip exact number of bytes from stream without buffering\n * @param iterator - The async iterator\n * @param size - The number of bytes to skip\n * @param signal - The abort signal\n */\nconst skipExactBytes = async (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined): Promise<void> => {\n\n let totalSkipped = 0;\n\n while (totalSkipped < size) {\n signal?.throwIfAborted();\n \n const { value, done } = await iterator.next();\n if (done) {\n throw new Error(`Unexpected end of stream: expected to skip ${size} bytes, skipped ${totalSkipped} bytes`);\n }\n\n const chunk = getBuffer(value);\n const needed = size - totalSkipped;\n \n if (chunk.length <= needed) {\n totalSkipped += chunk.length;\n } else {\n // We read more than needed, put back the remaining data\n await iterator.return?.(chunk.subarray(needed));\n totalSkipped = size;\n }\n }\n};\n\n/**\n * Iterator will be skip padding bytes.\n * @param iterator - Async iterator\n * @param contentSize - Total content size to calculate boundary position\n * @param signal - Abort signal\n */\nconst skipPaddingBytesTo512Boundary = async (\n iterator: AsyncIterator<string | Buffer>,\n contentSize: number,\n signal: AbortSignal | undefined) => {\n // Skip padding bytes to next 512-byte boundary\n const padding = (512 - (contentSize % 512)) % 512;\n if (padding > 0) {\n await skipExactBytes(iterator, padding, signal);\n }\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Tar file/directory entry item.\n */\ninterface EntryItemInfo {\n readonly kind: 'file' | 'directory';\n readonly path: string;\n readonly size: number;\n readonly mode: number;\n readonly uid: number;\n readonly gid: number;\n readonly mtime: Date;\n readonly uname: string;\n readonly gname: string;\n readonly checksum: number;\n /**\n * This entry (file) item is consumed.\n */\n consumed: boolean;\n}\n\n/**\n * Parse tar header from buffer\n * @param buffer - The buffer containing the tar header\n * @returns The parsed entry information or null if end of archive\n */\nconst parseTarHeader = (buffer: Buffer): EntryItemInfo | undefined => {\n // Check if this is the end of archive (all zeros)\n if (buffer.every(b => b === 0)) {\n return undefined;\n }\n\n // Parse header fields\n const name = parseString(buffer, 0, 100);\n const mode = parseOctalBytes(buffer, 100, 8);\n const uid = parseOctalBytes(buffer, 108, 8);\n const gid = parseOctalBytes(buffer, 116, 8);\n const size = parseOctalBytes(buffer, 124, 12);\n const mtime = new Date(parseOctalBytes(buffer, 136, 12) * 1000);\n const checksum = parseOctalBytes(buffer, 148, 8);\n const typeflag = parseString(buffer, 156, 1);\n const magic = parseString(buffer, 257, 6);\n const uname = parseString(buffer, 265, 32);\n const gname = parseString(buffer, 297, 32);\n const prefix = parseString(buffer, 345, 155);\n\n // Verify magic (should be \"ustar\" for POSIX tar)\n if (magic !== 'ustar') {\n throw new Error(`Invalid tar format: magic=\"${magic}\"`);\n }\n\n // Calculate checksum\n let calculatedSum = 0;\n for (let i = 0; i < 512; i++) {\n if (i >= 148 && i < 156) {\n calculatedSum += 32; // Space character\n } else {\n calculatedSum += buffer[i];\n }\n }\n\n if (calculatedSum !== checksum) {\n throw new Error(`Invalid checksum: expected ${checksum}, got ${calculatedSum}`);\n }\n\n // Construct full path and remove trailing slash for directories\n let path = prefix ? `${prefix}/${name}` : name;\n if (path.endsWith('/')) {\n path = path.slice(0, -1);\n }\n\n // Determine type\n const kind = typeflag === '5' ? 'directory' : 'file';\n\n return {\n kind,\n path,\n size,\n mode,\n uid,\n gid,\n mtime,\n uname: uname || uid.toString(),\n gname: gname || gid.toString(),\n checksum,\n consumed: false\n };\n};\n\n/**\n * Create a buffered async iterator that allows returning data\n */\nconst createBufferedAsyncIterator = (\n iterable: AsyncIterable<string | Buffer>,\n signal: AbortSignal | undefined\n): AsyncIterator<string | Buffer> => {\n const buffer: (string | Buffer)[] = [];\n const iterator = iterable[Symbol.asyncIterator]();\n return {\n next: async () => {\n signal?.throwIfAborted();\n if (buffer.length > 0) {\n return { value: buffer.shift()!, done: false };\n }\n return iterator.next();\n },\n return: async (value?: string | Buffer) => {\n if (value !== undefined) {\n buffer.unshift(value);\n }\n return { value: undefined, done: false };\n }\n };\n};\n\n/**\n * Create a readable stream from an async iterator with size limit\n * @param iterator - The async iterator to read from\n * @param size - The number of bytes to read\n * @param signal - The abort signal\n * @returns Readable stream\n */\nconst createReadableFromIterator = (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined,\n consumedRef: { consumed: boolean }\n): Readable => {\n const generator = async function*() {\n let remainingBytes = size;\n \n while (remainingBytes > 0) {\n signal?.throwIfAborted();\n\n const { value, done } = await iterator.next();\n if (done) {\n throw new Error(`Unexpected end of stream: expected ${size} bytes, remaining ${remainingBytes} bytes`);\n }\n\n const chunk = getBuffer(value);\n if (chunk.length <= remainingBytes) {\n remainingBytes -= chunk.length;\n yield chunk;\n } else {\n // We read more than needed\n const needed = chunk.subarray(0, remainingBytes);\n const excess = chunk.subarray(remainingBytes);\n remainingBytes = 0;\n \n // Return excess data to the iterator\n await iterator.return?.(excess);\n yield needed;\n break;\n }\n }\n\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, size, signal);\n\n // Finished to consume\n consumedRef.consumed = true;\n };\n\n return Readable.from(generator(), { signal });\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a tar extractor\n * @param readable - The readable stream containing tar data\n * @param compressionType - The compression type (default: 'none')\n * @param signal - The abort signal\n * @returns Async generator of entry items\n */\nexport const createTarExtractor = async function* (\n readable: Readable,\n compressionType?: CompressionTypes,\n signal?: AbortSignal): AsyncGenerator<ExtractedEntryItem, void, unknown> {\n\n const ct = compressionType ?? 'none';\n\n // Apply decompression if needed\n let inputStream: Readable;\n switch (ct) {\n case 'gzip':\n const gunzip = createGunzip();\n readable.pipe(gunzip);\n inputStream = gunzip;\n break;\n case 'none':\n default:\n inputStream = readable;\n break;\n }\n\n // Get async iterator from the stream\n const iterator = createBufferedAsyncIterator(inputStream, signal);\n\n // Last entry item\n let header: EntryItemInfo | undefined;\n\n // For each tar items\n while (true) {\n signal?.throwIfAborted();\n\n // Did not consume last file item yielding?\n if (header?.kind === 'file' && !header.consumed) {\n // Have to skip the file contents and boundary\n\n // Skip entire contents without buffering\n await skipExactBytes(iterator, header.size, signal);\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, header.size, signal);\n\n // Mark consumed\n header.consumed = true;\n }\n\n // Read header (512 bytes)\n let headerBuffer: Buffer | undefined;\n try {\n headerBuffer = await readExactBytes(iterator, 512, signal);\n } catch (error) {\n if (error instanceof Error && error.message.includes('Unexpected end of stream')) {\n throw new Error('Invalid tar format: incomplete header');\n }\n throw error;\n }\n \n if (headerBuffer === undefined) {\n break; // End of stream\n }\n\n // Parse header\n header = parseTarHeader(headerBuffer);\n if (!header) {\n // Check for second terminator block\n const secondBlock = await readExactBytes(iterator, 512, signal);\n if (secondBlock === undefined || secondBlock.every(b => b === 0)) {\n break; // Proper end of archive\n }\n throw new Error('Invalid tar format: expected terminator block');\n }\n\n if (header.kind === 'directory') {\n // Yield directory entry\n yield {\n kind: 'directory',\n path: header.path,\n mode: header.mode,\n uid: header.uid,\n gid: header.gid,\n uname: header.uname,\n gname: header.gname,\n date: header.mtime\n } as ExtractedDirectoryItem;\n } else {\n // Capture current header to avoid closure issues\n const currentHeader = header;\n \n // Yield file entry with lazy getContent\n yield {\n kind: 'file',\n path: currentHeader.path,\n mode: currentHeader.mode,\n uid: currentHeader.uid,\n gid: currentHeader.gid,\n uname: currentHeader.uname,\n gname: currentHeader.gname,\n date: currentHeader.mtime,\n getContent: async (type: any) => {\n // Is multiple called\n if (currentHeader.consumed) {\n throw new Error('Content has already been consumed. Multiple calls to getContent are not supported.');\n }\n\n switch (type) {\n // For string\n case 'string': {\n // Read entire contents just now\n const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);\n if (dataBuffer === undefined) {\n throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);\n }\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);\n currentHeader.consumed = true;\n return dataBuffer.toString('utf8');\n }\n // For buffer\n case 'buffer': {\n // Read entire contents just now\n const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);\n if (dataBuffer === undefined) {\n throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);\n }\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);\n currentHeader.consumed = true;\n return dataBuffer;\n }\n // For Readble stream\n case 'readable': {\n // Get Readble object (to delegate)\n const readable = createReadableFromIterator(iterator, currentHeader.size, signal, currentHeader);\n return readable;\n }\n default:\n throw new Error(`Unsupported content type: ${type}`);\n }\n }\n } as ExtractedFileItem;\n }\n }\n};\n"],"names":["path","stat","Readable","createReadStream","createWriteStream","pipeline","join","mkdir","dirname","writeFile","createGzip","createGunzip","readable"],"mappings":";;;;;;;;AAaO,MAAM,WAAW;AACjB,MAAM,aAAa;AAS1B,MAAM,WAAW,CAAC,eAAmC,aAAqB,gBAA0C;AAClH,SAAO,kBAAkB,gBAAgB,QAAQ,YAAY,aAAa;AAC5E;AAOO,MAAM,YAAY,CAAC,SAA0B;AAClD,SAAO,OAAO,SAAS,IAAI,IAAI,OAAO,OAAO,KAAK,MAAM,MAAM;AAChE;AAaO,MAAM,sBAAsB,OACjCA,OACA,aACA,SACA,WAC2B;AAC3B,QAAM,KAAK,eAAe;AAE1B,MAAI,OAAO,UAAU,SAAS,eAAe;AAC3C,YAAQ,eAAA;AACR,UAAM,QAAQ,MAAMC,cAAK,QAAQ,aAAa;AAC9C,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAD;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC,OAAO;AACL,UAAM,OAAO,SAAS,QAAQ;AAC9B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAClC,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAA;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC;AACF;AAUO,MAAM,iBAAiB,OAC5BA,OACA,SACA,SACA,WACsB;AACtB,UAAQ,eAAA;AAER,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAAA;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,IACpC;AAAA,EAAA;AAEJ;AAUO,MAAM,yBAAyB,OACpCA,OACA,UACA,SACA,WACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,UAAU;AAClC,cAAQ,eAAA;AACR,YAAM,SAAS,UAAU,KAAK;AAC9B,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAA;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA,UAAUE,OAAAA,SAAS,KAAK,QAAQ,EAAE,QAAQ;AAAA,MAAA;AAAA,IAC5C;AAAA,EAEJ,OAAO;AAEL,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAF;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AACF;AAUO,MAAM,0BAA0B,OACrCA,OACA,WACA,SACA,WACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,WAAW;AACnC,cAAQ,eAAA;AACR,YAAM,SAAS,UAAU,KAAK;AAC9B,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAA;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA,UAAUE,OAAAA,SAAS,KAAK,QAAQ,EAAE,QAAQ;AAAA,MAAA;AAAA,IAC5C;AAAA,EAEJ,OAAO;AAEL,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAF;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AACF;AAWO,MAAM,qBAAqB,OAChCA,OACA,UACA,aACA,SACA,WACsB;AACtB,QAAM,KAAK,eAAe;AAG1B,UAAQ,eAAA;AACR,QAAM,QAAQ,MAAMC,SAAAA,KAAK,QAAQ;AAEjC,QAAM,SAASE,GAAAA,iBAAiB,UAAU,EAAE,QAAQ;AAEpD,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,OAAO;AAC5D,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,QAAQ;AAE7D,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AAGpD,SAAO,MAAM,uBAAuBH,OAAM,QAAQ;AAAA,IAChD,QAAQ,MAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,EAAA,GACjD,MAAM;AACX;AAWO,MAAM,oBAAoB,OAAO,QAAkBA,OAAc,WAAyB;AAC/F,QAAM,SAASI,GAAAA,kBAAkBJ,OAAM,EAAE,QAAQ;AACjD,QAAMK,WAAAA,SAAS,QAAQ,QAAQ,EAAE,QAAQ;AAC3C;AAYO,MAAM,2BAA2B,iBACtC,SACA,eACA,aACA,QAC0C;AAC1C,QAAM,KAAK,eAAe;AAE1B,aAAW,gBAAgB,eAAe;AACxC,YAAQ,eAAA;AAER,UAAM,SAASC,KAAAA,KAAK,SAAS,YAAY;AAEzC,QAAI;AACF,cAAQ,eAAA;AACR,YAAM,QAAQ,MAAML,SAAAA,KAAK,MAAM;AAE/B,UAAI,MAAM,eAAe;AAEvB,cAAM,MAAM,oBAAoB,cAAc,IAAI;AAAA,UAChD,eAAe;AAAA,QAAA,GACd,MAAM;AAAA,MACX,WAAW,MAAM,UAAU;AAEzB,cAAM,MAAM,mBAAmB,cAAc,QAAQ,IAAI,QAAW,MAAM;AAAA,MAC5E;AAAA,IACF,SAAS,OAAO;AAEd,cAAQ,KAAK,6BAA6B,MAAM,KAAK,KAAK;AAC1D;AAAA,IACF;AAAA,EACF;AACF;AASO,MAAM,YAAY,OACvB,UACA,UACA,WACkB;AAClB,mBAAiB,SAAS,UAAU;AAClC,YAAQ,eAAA;AAER,UAAM,aAAaK,KAAAA,KAAK,UAAU,MAAM,IAAI;AAE5C,QAAI,MAAM,SAAS,aAAa;AAE9B,UAAI;AACF,gBAAQ,eAAA;AACR,cAAMC,SAAAA,MAAM,YAAY,EAAE,WAAW,MAAM,MAAM,MAAM,MAAM;AAAA,MAC/D,SAAS,OAAO;AAEd,YAAK,MAAc,SAAS,UAAU;AACpC,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF,WAAW,MAAM,SAAS,QAAQ;AAEhC,YAAM,YAAYC,KAAAA,QAAQ,UAAU;AACpC,cAAQ,eAAA;AACR,YAAMD,SAAAA,MAAM,WAAW,EAAE,WAAW,MAAM;AAG1C,YAAM,YAAY;AAClB,YAAM,UAAU,MAAM,UAAU,WAAW,QAAQ;AACnD,YAAME,SAAAA,UAAU,YAAY,SAAS,EAAE,MAAM,MAAM,MAAM,QAAQ;AAAA,IACnE;AAAA,EACF;AACF;ACrWA,MAAM,iBAAiB,CAAC,QAAgB;AACtC,SAAO,OAAO,WAAW,KAAK,MAAM;AACtC;AAQA,MAAM,mBAAmB,CAAC,KAAa,aAAqB;AAC1D,MAAI,QAAQ;AACZ,MAAI,IAAI;AACR,SAAO,IAAI,IAAI,QAAQ;AACrB,UAAM,YAAY,IAAI,YAAY,CAAC;AACnC,UAAM,OAAO,OAAO,cAAc,SAAS;AAC3C,UAAM,YAAY,OAAO,WAAW,MAAM,MAAM;AAChD,QAAI,QAAQ,YAAY,SAAU;AAClC,aAAS;AACT,SAAK,KAAK;AAAA,EACZ;AACA,SAAO,IAAI,MAAM,GAAG,CAAC;AACvB;AAOA,MAAM,YAAY,CAACT,UAAiB;AAClC,MAAI,eAAeA,KAAI,KAAK,UAAU;AACpC,WAAO,EAAE,QAAQ,IAAI,MAAMA,MAAA;AAAA,EAC7B;AAGA,QAAM,QAAQA,MAAK,MAAM,GAAG;AAC5B,MAAI,OAAO,MAAM,IAAA,KAAS;AAC1B,MAAI,SAAS,MAAM,KAAK,GAAG;AAG3B,MAAI,eAAe,IAAI,IAAI,UAAU;AACnC,WAAO,iBAAiB,MAAM,QAAQ;AAAA,EACxC;AAGA,SAAO,eAAe,MAAM,IAAI,YAAY;AAC1C,aAAS,iBAAiB,QAAQ,UAAU;AAAA,EAC9C;AAEA,SAAO,EAAE,QAAQ,KAAA;AACnB;AAUA,MAAM,gBAAgB,CAAC,OAAe,WAAmB;AACvD,QAAM,MAAM,MAAM,SAAS,CAAC,EAAE,SAAS,SAAS,GAAG,GAAG,IAAI;AAC1D,SAAO,OAAO,KAAK,KAAK,OAAO;AACjC;AAOA,MAAM,iBAAiB,CAAC,WAAmB;AACzC,QAAM,QAAQ,OAAO,SAAS;AAC9B,MAAI,UAAU,GAAG;AACf,WAAO;AAAA,EACT,OAAO;AACL,WAAO,OAAO,OAAO,CAAC,QAAQ,OAAO,MAAM,MAAM,OAAO,CAAC,CAAC,CAAC;AAAA,EAC7D;AACF;AAKA,MAAM,kBAAkB,OAAO,MAAM,MAAM,CAAC;AAW5C,MAAM,kBAAkB,CACtB,MACAA,OACA,MACA,MACA,OACA,OACA,KACA,KACA,SACG;AAEH,QAAM,SAAS,OAAO,MAAM,KAAK,CAAC;AAGlC,QAAM,EAAE,MAAM,WAAW,UAAUA,KAAI;AAGvC,SAAO,MAAM,MAAM,GAAG,KAAK,MAAM;AACjC,gBAAc,OAAO,MAAQ,CAAC,EAAE,KAAK,QAAQ,GAAG;AAChD,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,MAAM,EAAE,EAAE,KAAK,QAAQ,GAAG;AACxC,gBAAc,KAAK,MAAM,KAAK,QAAA,IAAY,GAAI,GAAG,EAAE,EAAE,KAAK,QAAQ,GAAG;AAGrE,SAAO,KAAK,YAAY,OAAO,EAAE,KAAK,QAAQ,GAAG;AAEjD,MAAI,SAAS,QAAQ;AACnB,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC,OAAO;AACL,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC;AACA,SAAO,MAAM,WAAW,KAAK,GAAG,OAAO;AACvC,SAAO,MAAM,MAAM,KAAK,GAAG,OAAO;AAClC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,QAAQ,KAAK,KAAK,MAAM;AAGrC,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,WAAO,OAAO,CAAC;AAAA,EACjB;AACA,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AAEtC,SAAO;AACT;AAWO,MAAM,kBAAkB,CAC7B,oBACA,iBACA,WAAyB;AAGzB,QAAM,oBAAoB,mBAAkB;AAE1C,qBAAiB,aAAa,oBAAoB;AAChD,cAAQ,eAAA;AAER,cAAQ,UAAU,MAAA;AAAA;AAAA,QAEhB,KAAK,QAAQ;AACX,gBAAM,mBAAmB,UAAU;AAEnC,cAAI,OAAO,qBAAqB,YAAY,OAAO,SAAS,gBAAgB,GAAG;AAE7E,kBAAM,eAAe,UAAU,gBAAgB;AAG/C,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,aAAa;AAAA,cACb,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAGN,kBAAM,0BAA0B,eAAe,YAAY;AAC3D,kBAAM;AAAA,UACR,OAAO;AAEL,kBAAM,UAAU;AAGhB,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,QAAQ;AAAA,cACR,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAEN,gBAAI,WAAW;AACf,oBAAQ,QAAQ,MAAA;AAAA;AAAA,cAEd,KAAK,aAAa;AAChB,iCAAiB,gBAAgB,QAAQ,WAAW;AAClD,0BAAQ,eAAA;AACR,wBAAM;AACN,8BAAY,aAAa;AAAA,gBAC3B;AACA;AAAA,cACF;AAAA;AAAA,cAEA,KAAK,YAAY;AACf,iCAAiB,SAAS,QAAQ,UAAU;AAC1C,0BAAQ,eAAA;AACR,wBAAM,eAAe,UAAU,KAAK;AACpC,wBAAM;AACN,8BAAY,aAAa;AAAA,gBAC3B;AACA;AAAA,cACF;AAAA,YAAA;AAIF,gBAAI,WAAW,QAAQ,GAAG;AACxB,sBAAQ,eAAA;AACR,oBAAM,OAAO,MAAM,MAAO,WAAW,KAAM,CAAC;AAAA,YAC9C;AAAA,UACF;AACA;AAAA,QACF;AAAA;AAAA,QAEA,KAAK,aAAa;AAEhB,gBAAM,iBAAiB;AAAA,YACrB;AAAA,YACA,UAAU;AAAA,YACV;AAAA,YACA,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,UAAA;AAEZ,gBAAM;AACN;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM;AAAA,EACR;AAEA,QAAM,KAAK,mBAAmB;AAE9B,UAAQ,IAAA;AAAA;AAAA,IAEN,KAAK,QAAQ;AAEX,aAAOE,OAAAA,SAAS,KAAK,kBAAA,GAAqB,EAAE,QAAQ;AAAA,IACtD;AAAA;AAAA,IAEA,KAAK,QAAQ;AAEX,YAAM,aAAaQ,KAAAA,WAAW,EAAE,OAAO,GAAG;AAE1C,YAAM,kBAAkBR,OAAAA,SAAS,KAAK,qBAAqB,EAAE,QAAQ;AAErE,sBAAgB,KAAK,UAAU;AAE/B,aAAO;AAAA,IACT;AAAA,EAAA;AAEJ;ACrRA,MAAM,kBAAkB,CAAC,QAAgB,QAAgB,WAA2B;AAClF,QAAM,MAAM,OAAO,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,OAAO,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAA;AAC1F,SAAO,MAAM,SAAS,KAAK,CAAC,IAAI;AAClC;AASA,MAAM,cAAc,CAAC,QAAgB,QAAgB,WAA2B;AAC9E,SAAO,OAAO,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,MAAM,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAA;AACtF;AASA,MAAM,iBAAiB,OACrB,UACA,MACA,WAAiE;AAEjE,QAAM,SAAmB,CAAA;AACzB,MAAI,YAAY;AAEhB,SAAO,YAAY,MAAM;AACvB,YAAQ,eAAA;AAER,UAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,QAAI,MAAM;AACR,UAAI,cAAc,GAAG;AACnB,eAAO;AAAA,MACT,OAAO;AACL,cAAM,IAAI,MAAM,sCAAsC,IAAI,eAAe,SAAS,QAAQ;AAAA,MAC5F;AAAA,IACF;AAEA,UAAM,QAAQ,UAAU,KAAK;AAC7B,UAAM,SAAS,OAAO;AAEtB,QAAI,MAAM,UAAU,QAAQ;AAC1B,aAAO,KAAK,KAAK;AACjB,mBAAa,MAAM;AAAA,IACrB,OAAO;AAEL,aAAO,KAAK,MAAM,SAAS,GAAG,MAAM,CAAC;AAErC,YAAM,SAAS,SAAS,MAAM,SAAS,MAAM,CAAC;AAC9C,kBAAY;AAAA,IACd;AAAA,EACF;AAEA,SAAO,OAAO,OAAO,QAAQ,IAAI;AACnC;AAQA,MAAM,iBAAiB,OACrB,UACA,MACA,WAAmD;AAEnD,MAAI,eAAe;AAEnB,SAAO,eAAe,MAAM;AAC1B,YAAQ,eAAA;AAER,UAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,QAAI,MAAM;AACR,YAAM,IAAI,MAAM,8CAA8C,IAAI,mBAAmB,YAAY,QAAQ;AAAA,IAC3G;AAEA,UAAM,QAAQ,UAAU,KAAK;AAC7B,UAAM,SAAS,OAAO;AAEtB,QAAI,MAAM,UAAU,QAAQ;AAC1B,sBAAgB,MAAM;AAAA,IACxB,OAAO;AAEL,YAAM,SAAS,SAAS,MAAM,SAAS,MAAM,CAAC;AAC9C,qBAAe;AAAA,IACjB;AAAA,EACF;AACF;AAQA,MAAM,gCAAgC,OACpC,UACA,aACA,WAAoC;AAEpC,QAAM,WAAW,MAAO,cAAc,OAAQ;AAC9C,MAAI,UAAU,GAAG;AACf,UAAM,eAAe,UAAU,SAAS,MAAM;AAAA,EAChD;AACF;AA6BA,MAAM,iBAAiB,CAAC,WAA8C;AAEpE,MAAI,OAAO,MAAM,CAAA,MAAK,MAAM,CAAC,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,QAAM,OAAO,YAAY,QAAQ,GAAG,GAAG;AACvC,QAAM,OAAO,gBAAgB,QAAQ,KAAK,CAAC;AAC3C,QAAM,MAAM,gBAAgB,QAAQ,KAAK,CAAC;AAC1C,QAAM,MAAM,gBAAgB,QAAQ,KAAK,CAAC;AAC1C,QAAM,OAAO,gBAAgB,QAAQ,KAAK,EAAE;AAC5C,QAAM,QAAQ,IAAI,KAAK,gBAAgB,QAAQ,KAAK,EAAE,IAAI,GAAI;AAC9D,QAAM,WAAW,gBAAgB,QAAQ,KAAK,CAAC;AAC/C,QAAM,WAAW,YAAY,QAAQ,KAAK,CAAC;AAC3C,QAAM,QAAQ,YAAY,QAAQ,KAAK,CAAC;AACxC,QAAM,QAAQ,YAAY,QAAQ,KAAK,EAAE;AACzC,QAAM,QAAQ,YAAY,QAAQ,KAAK,EAAE;AACzC,QAAM,SAAS,YAAY,QAAQ,KAAK,GAAG;AAG3C,MAAI,UAAU,SAAS;AACrB,UAAM,IAAI,MAAM,8BAA8B,KAAK,GAAG;AAAA,EACxD;AAGA,MAAI,gBAAgB;AACpB,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,QAAI,KAAK,OAAO,IAAI,KAAK;AACvB,uBAAiB;AAAA,IACnB,OAAO;AACL,uBAAiB,OAAO,CAAC;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI,kBAAkB,UAAU;AAC9B,UAAM,IAAI,MAAM,8BAA8B,QAAQ,SAAS,aAAa,EAAE;AAAA,EAChF;AAGA,MAAIF,QAAO,SAAS,GAAG,MAAM,IAAI,IAAI,KAAK;AAC1C,MAAIA,MAAK,SAAS,GAAG,GAAG;AACtB,IAAAA,QAAOA,MAAK,MAAM,GAAG,EAAE;AAAA,EACzB;AAGA,QAAM,OAAO,aAAa,MAAM,cAAc;AAE9C,SAAO;AAAA,IACL;AAAA,IACA,MAAAA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,SAAS,IAAI,SAAA;AAAA,IACpB,OAAO,SAAS,IAAI,SAAA;AAAA,IACpB;AAAA,IACA,UAAU;AAAA,EAAA;AAEd;AAKA,MAAM,8BAA8B,CAClC,UACA,WACmC;AACnC,QAAM,SAA8B,CAAA;AACpC,QAAM,WAAW,SAAS,OAAO,aAAa,EAAA;AAC9C,SAAO;AAAA,IACL,MAAM,YAAY;AAChB,cAAQ,eAAA;AACR,UAAI,OAAO,SAAS,GAAG;AACrB,eAAO,EAAE,OAAO,OAAO,MAAA,GAAU,MAAM,MAAA;AAAA,MACzC;AACA,aAAO,SAAS,KAAA;AAAA,IAClB;AAAA,IACA,QAAQ,OAAO,UAA4B;AACzC,UAAI,UAAU,QAAW;AACvB,eAAO,QAAQ,KAAK;AAAA,MACtB;AACA,aAAO,EAAE,OAAO,QAAW,MAAM,MAAA;AAAA,IACnC;AAAA,EAAA;AAEJ;AASA,MAAM,6BAA6B,CACjC,UACA,MACA,QACA,gBACa;AACb,QAAM,YAAY,mBAAkB;AAClC,QAAI,iBAAiB;AAErB,WAAO,iBAAiB,GAAG;AACzB,cAAQ,eAAA;AAER,YAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,UAAI,MAAM;AACR,cAAM,IAAI,MAAM,sCAAsC,IAAI,qBAAqB,cAAc,QAAQ;AAAA,MACvG;AAEA,YAAM,QAAQ,UAAU,KAAK;AAC7B,UAAI,MAAM,UAAU,gBAAgB;AAClC,0BAAkB,MAAM;AACxB,cAAM;AAAA,MACR,OAAO;AAEL,cAAM,SAAS,MAAM,SAAS,GAAG,cAAc;AAC/C,cAAM,SAAS,MAAM,SAAS,cAAc;AAC5C,yBAAiB;AAGjB,cAAM,SAAS,SAAS,MAAM;AAC9B,cAAM;AACN;AAAA,MACF;AAAA,IACF;AAGA,UAAM,8BAA8B,UAAU,MAAM,MAAM;AAG1D,gBAAY,WAAW;AAAA,EACzB;AAEA,SAAOE,OAAAA,SAAS,KAAK,UAAA,GAAa,EAAE,QAAQ;AAC9C;AAWO,MAAM,qBAAqB,iBAChC,UACA,iBACA,QAAyE;AAEzE,QAAM,KAAK,mBAAmB;AAG9B,MAAI;AACJ,UAAQ,IAAA;AAAA,IACN,KAAK;AACH,YAAM,SAASS,KAAAA,aAAA;AACf,eAAS,KAAK,MAAM;AACpB,oBAAc;AACd;AAAA,IACF,KAAK;AAAA,IACL;AACE,oBAAc;AACd;AAAA,EAAA;AAIJ,QAAM,WAAW,4BAA4B,aAAa,MAAM;AAGhE,MAAI;AAGJ,SAAO,MAAM;AACX,YAAQ,eAAA;AAGR,QAAI,QAAQ,SAAS,UAAU,CAAC,OAAO,UAAU;AAI/C,YAAM,eAAe,UAAU,OAAO,MAAM,MAAM;AAElD,YAAM,8BAA8B,UAAU,OAAO,MAAM,MAAM;AAGjE,aAAO,WAAW;AAAA,IACpB;AAGA,QAAI;AACJ,QAAI;AACF,qBAAe,MAAM,eAAe,UAAU,KAAK,MAAM;AAAA,IAC3D,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,0BAA0B,GAAG;AAChF,cAAM,IAAI,MAAM,uCAAuC;AAAA,MACzD;AACA,YAAM;AAAA,IACR;AAEA,QAAI,iBAAiB,QAAW;AAC9B;AAAA,IACF;AAGA,aAAS,eAAe,YAAY;AACpC,QAAI,CAAC,QAAQ;AAEX,YAAM,cAAc,MAAM,eAAe,UAAU,KAAK,MAAM;AAC9D,UAAI,gBAAgB,UAAa,YAAY,MAAM,CAAA,MAAK,MAAM,CAAC,GAAG;AAChE;AAAA,MACF;AACA,YAAM,IAAI,MAAM,+CAA+C;AAAA,IACjE;AAEA,QAAI,OAAO,SAAS,aAAa;AAE/B,YAAM;AAAA,QACJ,MAAM;AAAA,QACN,MAAM,OAAO;AAAA,QACb,MAAM,OAAO;AAAA,QACb,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,OAAO,OAAO;AAAA,QACd,OAAO,OAAO;AAAA,QACd,MAAM,OAAO;AAAA,MAAA;AAAA,IAEjB,OAAO;AAEL,YAAM,gBAAgB;AAGtB,YAAM;AAAA,QACJ,MAAM;AAAA,QACN,MAAM,cAAc;AAAA,QACpB,MAAM,cAAc;AAAA,QACpB,KAAK,cAAc;AAAA,QACnB,KAAK,cAAc;AAAA,QACnB,OAAO,cAAc;AAAA,QACrB,OAAO,cAAc;AAAA,QACrB,MAAM,cAAc;AAAA,QACpB,YAAY,OAAO,SAAc;AAE/B,cAAI,cAAc,UAAU;AAC1B,kBAAM,IAAI,MAAM,oFAAoF;AAAA,UACtG;AAEA,kBAAQ,MAAA;AAAA;AAAA,YAEN,KAAK,UAAU;AAEb,oBAAM,aAAa,MAAM,eAAe,UAAU,cAAc,MAAM,MAAM;AAC5E,kBAAI,eAAe,QAAW;AAC5B,sBAAM,IAAI,MAAM,wDAAwD,cAAc,IAAI,EAAE;AAAA,cAC9F;AAEA,oBAAM,8BAA8B,UAAU,cAAc,MAAM,MAAM;AACxE,4BAAc,WAAW;AACzB,qBAAO,WAAW,SAAS,MAAM;AAAA,YACnC;AAAA;AAAA,YAEA,KAAK,UAAU;AAEb,oBAAM,aAAa,MAAM,eAAe,UAAU,cAAc,MAAM,MAAM;AAC5E,kBAAI,eAAe,QAAW;AAC5B,sBAAM,IAAI,MAAM,wDAAwD,cAAc,IAAI,EAAE;AAAA,cAC9F;AAEA,oBAAM,8BAA8B,UAAU,cAAc,MAAM,MAAM;AACxE,4BAAc,WAAW;AACzB,qBAAO;AAAA,YACT;AAAA;AAAA,YAEA,KAAK,YAAY;AAEf,oBAAMC,YAAW,2BAA2B,UAAU,cAAc,MAAM,QAAQ,aAAa;AAC/F,qBAAOA;AAAAA,YACT;AAAA,YACA;AACE,oBAAM,IAAI,MAAM,6BAA6B,IAAI,EAAE;AAAA,UAAA;AAAA,QAEzD;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AACF;;;;;;;;;;;"}
|
1
|
+
{"version":3,"file":"index.cjs","sources":["../src/utils.ts","../src/packer.ts","../src/extractor.ts"],"sourcesContent":["// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { createReadStream, createWriteStream } from \"fs\";\nimport { stat, mkdir, writeFile, readdir } from \"fs/promises\";\nimport { Readable } from \"stream\";\nimport { pipeline } from \"stream/promises\";\nimport { dirname, join } from \"path\";\nimport { CreateItemOptions, CreateReadableFileItemOptions, FileItem, DirectoryItem, ReflectStats, CreateDirectoryItemOptions, EntryItem, ExtractedEntryItem, ExtractedFileItem } from \"./types\";\n\n// Tar specification: name max 100 bytes, prefix max 155 bytes\nexport const MAX_NAME = 100;\nexport const MAX_PREFIX = 155;\n\n/**\n * Get the user/group name from the candidate name or ID\n * @param candidateName - The candidate user/group name\n * @param candidateId - The candidate user/group ID\n * @param reflectStat - Whether to reflect the stat (all, exceptName, none)\n * @returns The user/group name\n */\nconst getUName = (candidateName: string | undefined, candidateId: number, reflectStat: ReflectStats | undefined) => {\n return candidateName ?? (reflectStat === 'all' ? candidateId.toString() : 'root');\n}\n\n/**\n * Get a buffer from the string or Buffer\n * @param data - The data to get a buffer from\n * @returns A buffer\n */\nexport const getBuffer = (data: Buffer | string) => {\n return Buffer.isBuffer(data) ? data : Buffer.from(data, 'utf8');\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a DirectoryItem\n * @param path - The path to the directory in the tar archive\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'none')\n * @param options - Metadata for the directory including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A DirectoryItem\n * @remarks When reflectStat is 'all' or 'exceptName', `options.directoryPath` must be provided.\n */\nexport const createDirectoryItem = async (\n path: string,\n reflectStat?: ReflectStats,\n options?: CreateDirectoryItemOptions,\n signal?: AbortSignal\n): Promise<DirectoryItem> => {\n const rs = reflectStat ?? 'none';\n\n if (rs !== 'none' && options?.directoryPath) {\n signal?.throwIfAborted();\n const stats = await stat(options.directoryPath);\n const mode = options?.mode ?? stats.mode;\n const uid = options?.uid ?? stats.uid;\n const gid = options?.gid ?? stats.gid;\n const date = options?.date ?? stats.mtime;\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n } else {\n const mode = options?.mode ?? 0o755;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n const uname = getUName(options?.uname, undefined, rs);\n const gname = getUName(options?.gname, undefined, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n }\n};\n\n/**\n * Create a FileItem from content data directly\n * @param path - The path to the file in the tar archive\n * @param content - Content data\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createFileItem = async (\n path: string,\n content: string | Buffer,\n options?: CreateItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n signal?.throwIfAborted();\n \n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content\n };\n};\n\n/**\n * Create a FileItem from a Readable stream\n * @param path - The path to the file in the tar archive\n * @param readable - The readable stream\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createReadableFileItem = async (\n path: string,\n readable: Readable,\n options?: CreateReadableFileItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of readable) {\n signal?.throwIfAborted();\n const buffer = getBuffer(chunk);\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable: Readable.from(chunks, { signal })\n }\n };\n } else {\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable\n }\n };\n }\n};\n\n/**\n * Create a FileItem from a generator\n * @param path - The path to the file in the tar archive\n * @param generator - The generator to read the file from\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createGeneratorFileItem = async (\n path: string,\n generator: AsyncGenerator<Buffer, void, unknown>,\n options?: CreateReadableFileItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of generator) {\n signal?.throwIfAborted();\n const buffer = getBuffer(chunk);\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable: Readable.from(chunks, { signal })\n }\n };\n } else {\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'generator',\n length,\n generator\n }\n };\n }\n};\n\n/**\n * Create a FileItem from a local file path\n * @param path - The path to the file in the tar archive\n * @param filePath - The path to the file to read from real filesystem\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'exceptName')\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createReadFileItem = async (\n path: string,\n filePath: string,\n reflectStat?: ReflectStats,\n options?: CreateItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const rs = reflectStat ?? 'exceptName';\n\n // Get file stats to extract metadata\n signal?.throwIfAborted();\n const stats = await stat(filePath);\n // Create readable stream from file\n const reader = createReadStream(filePath, { signal });\n\n const mode = options?.mode ?? (rs !== 'none' ? stats.mode : undefined);\n const uid = options?.uid ?? (rs !== 'none' ? stats.uid : undefined);\n const gid = options?.gid ?? (rs !== 'none' ? stats.gid : undefined);\n const date = options?.date ?? (rs !== 'none' ? stats.mtime : undefined);\n\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n\n // Create a FileItem\n return await createReadableFileItem(path, reader, {\n length: stats.size, mode, uname, gname, uid, gid, date,\n }, signal);\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Store a readable stream to a file\n * @param reader - The readable stream\n * @param path - The path to the file to store the readable stream to\n * @param signal - Optional abort signal to cancel the operation\n * @returns A promise that resolves when the stream is finished\n */\nexport const storeReaderToFile = async (reader: Readable, path: string, signal?: AbortSignal) => {\n const writer = createWriteStream(path, { signal });\n await pipeline(reader, writer, { signal });\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Recursively collect all files and directories in a directory\n * @param baseDir - The base directory to collect files from\n * @param signal - Optional abort signal to cancel the operation\n * @returns Array of relative paths\n */\nconst getAllFilesInDirectory = async (\n baseDir: string, signal: AbortSignal | undefined): Promise<string[]> => {\n \n const collectFiles = async (currentDir: string, relativePath: string): Promise<string[]> => {\n signal?.throwIfAborted();\n \n try {\n const entries = await readdir(currentDir, { withFileTypes: true });\n const result: string[] = [];\n \n // Process all entries in parallel and collect their results\n const tasks = entries.map(async (entry) => {\n signal?.throwIfAborted();\n \n const entryRelativePath = join(relativePath, entry.name);\n \n if (entry.isDirectory()) {\n const entryFullPath = join(currentDir, entry.name);\n // First add the directory itself, then its contents\n const directoryContents = await collectFiles(entryFullPath, entryRelativePath);\n return [entryRelativePath, ...directoryContents];\n } else {\n // For files, just return the file path\n return [entryRelativePath];\n }\n });\n \n const allResults = await Promise.all(tasks);\n \n // Flatten and combine all results while maintaining order\n for (const entryResults of allResults) {\n result.push(...entryResults);\n }\n \n return result;\n } catch (error) {\n console.warn(`Warning: Could not read directory ${currentDir}:`, error);\n return [];\n }\n };\n \n return await collectFiles(baseDir, '');\n};\n\n/**\n * Create an async generator that yields EntryItem objects from filesystem paths\n * @param baseDir - Base directory path for resolving relative paths\n * @param relativePaths - Array of relative paths to include in the tar archive (optional)\n * @param reflectStat - Whether to reflect file stats (Default: 'exceptName')\n * @param signal - Optional abort signal to cancel the operation\n * @returns Async generator that yields EntryItem objects\n */\nexport const createEntryItemGenerator = async function* (\n baseDir: string,\n relativePaths?: string[],\n reflectStat?: ReflectStats,\n signal?: AbortSignal\n): AsyncGenerator<EntryItem, void, unknown> {\n const rs = reflectStat ?? 'exceptName';\n \n // If relativePaths is not provided, collect all files in baseDir\n const pathsToProcess = relativePaths ?? await getAllFilesInDirectory(baseDir, signal);\n \n for (const relativePath of pathsToProcess) {\n signal?.throwIfAborted();\n \n const fsPath = join(baseDir, relativePath);\n \n try {\n signal?.throwIfAborted();\n const stats = await stat(fsPath);\n \n if (stats.isDirectory()) {\n // Create directory entry\n yield await createDirectoryItem(relativePath, rs, {\n directoryPath: fsPath\n }, signal);\n } else if (stats.isFile()) {\n // Create file entry\n yield await createReadFileItem(relativePath, fsPath, rs, undefined, signal);\n }\n } catch (error) {\n // Skip files that can't be accessed (permissions, etc.)\n console.warn(`Warning: Could not access ${fsPath}:`, error);\n continue;\n }\n }\n};\n\n/**\n * Extract entries from a tar extractor to a directory on the filesystem\n * @param iterator - Async generator of extracted entry items\n * @param basePath - Base directory path where entries will be extracted\n * @param signal - Optional abort signal to cancel the operation\n * @returns Promise that resolves when extraction is complete\n */\nexport const extractTo = async (\n iterator: AsyncGenerator<ExtractedEntryItem, void, unknown>,\n basePath: string,\n signal?: AbortSignal\n): Promise<void> => {\n for await (const entry of iterator) {\n signal?.throwIfAborted();\n \n const targetPath = join(basePath, entry.path);\n \n if (entry.kind === 'directory') {\n // Create directory\n try {\n signal?.throwIfAborted();\n await mkdir(targetPath, { recursive: true, mode: entry.mode });\n } catch (error) {\n // Directory might already exist, which is fine\n if ((error as any).code !== 'EEXIST') {\n throw error;\n }\n }\n } else if (entry.kind === 'file') {\n // Create parent directories if they don't exist\n const parentDir = dirname(targetPath);\n signal?.throwIfAborted();\n await mkdir(parentDir, { recursive: true });\n \n // Extract file content and write to filesystem\n const fileEntry = entry as ExtractedFileItem;\n const content = await fileEntry.getContent('buffer');\n await writeFile(targetPath, content, { mode: entry.mode, signal });\n }\n }\n};\n","// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { Readable } from \"stream\";\nimport { createGzip } from \"zlib\";\nimport { getBuffer, MAX_NAME, MAX_PREFIX } from \"./utils\";\nimport { CompressionTypes, EntryItem, EntryItemContent } from \"./types\";\n\n/**\n * Get the byte length of a string in UTF-8\n * @param str - The string to get the byte length of\n * @returns The byte length of the string\n */\nconst utf8ByteLength = (str: string) => {\n return Buffer.byteLength(str, \"utf8\");\n}\n\n/**\n * Truncate a string to a maximum byte length in UTF-8\n * @param str - The string to truncate\n * @param maxBytes - The maximum byte length\n * @returns The truncated string\n */\nconst truncateUtf8Safe = (str: string, maxBytes: number) => {\n let total = 0;\n let i = 0;\n while (i < str.length) {\n const codePoint = str.codePointAt(i)!;\n const char = String.fromCodePoint(codePoint);\n const charBytes = Buffer.byteLength(char, \"utf8\");\n if (total + charBytes > maxBytes) break;\n total += charBytes;\n i += char.length;\n }\n return str.slice(0, i);\n}\n\n/**\n * Split a path into a name and a prefix\n * @param path - The path to split\n * @returns The name and prefix\n */\nconst splitPath = (path: string) => {\n if (utf8ByteLength(path) <= MAX_NAME) {\n return { prefix: \"\", name: path };\n }\n\n // Split by '/' and find the part that fits in name from the end\n const parts = path.split(\"/\");\n let name = parts.pop() ?? \"\";\n let prefix = parts.join(\"/\");\n\n // Truncate if name exceeds 100 bytes\n if (utf8ByteLength(name) > MAX_NAME) {\n name = truncateUtf8Safe(name, MAX_NAME);\n }\n\n // Truncate if prefix exceeds 155 bytes\n while (utf8ByteLength(prefix) > MAX_PREFIX) {\n prefix = truncateUtf8Safe(prefix, MAX_PREFIX);\n }\n\n return { prefix, name };\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Get octal bytes from a number\n * @param value - The number to get octal bytes from\n * @param length - The length of the octal bytes\n * @returns The octal bytes\n */\nconst getOctalBytes = (value: number, length: number) => {\n const str = value.toString(8).padStart(length - 1, \"0\") + \"\\0\";\n return Buffer.from(str, \"ascii\");\n};\n\n/**\n * Get padded bytes from a buffer\n * @param buffer - The buffer to get padded bytes from\n * @returns The padded bytes\n */\nconst getPaddedBytes = (buffer: Buffer) => {\n const extra = buffer.length % 512;\n if (extra === 0) {\n return buffer;\n } else {\n return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);\n }\n}\n\n/**\n * The terminator bytes\n */\nconst terminatorBytes = Buffer.alloc(1024, 0);\n\n/**\n * Create a tar header\n * @param type - The type of the entry\n * @param path - The path of the entry\n * @param size - The size of the entry\n * @param mode - The mode of the entry\n * @param uname - The user name of the entry\n * @param gname - The group name of the entry\n */\nconst createTarHeader = (\n type: 'file' | 'directory',\n path: string,\n size: number,\n mode: number,\n uname: string,\n gname: string,\n uid: number,\n gid: number,\n date: Date\n) => {\n // Allocate header bytes\n const buffer = Buffer.alloc(512, 0);\n\n // Split path into name and prefix\n const { name, prefix } = splitPath(path);\n\n // Write name, mode, uid, gid, size, mtime, typeflag, prefix, checksum\n buffer.write(name, 0, 100, \"utf8\");\n getOctalBytes(mode & 0o7777, 8).copy(buffer, 100);\n getOctalBytes(uid, 8).copy(buffer, 108);\n getOctalBytes(gid, 8).copy(buffer, 116);\n getOctalBytes(size, 12).copy(buffer, 124);\n getOctalBytes(Math.floor(date.getTime() / 1000), 12).copy(buffer, 136);\n\n // Check sum space\n Buffer.from(\" \", \"ascii\").copy(buffer, 148);\n\n if (type === 'file') {\n buffer.write(\"0\", 156, 1, \"ascii\"); // typeflag (file)\n } else {\n buffer.write(\"5\", 156, 1, \"ascii\"); // typeflag (directory)\n }\n buffer.write(\"ustar\\0\", 257, 6, \"ascii\");\n buffer.write(\"00\", 263, 2, \"ascii\"); // version\n buffer.write(uname, 265, 32, \"utf8\");\n buffer.write(gname, 297, 32, \"utf8\");\n buffer.write(prefix, 345, 155, \"utf8\"); // Path prefix\n\n // Calculate check sum\n let sum = 0;\n for (let i = 0; i < 512; i++) {\n sum += buffer[i];\n }\n getOctalBytes(sum, 8).copy(buffer, 148);\n\n return buffer;\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a tar packer\n * @param entryItemGenerator - The async generator of the entry items\n * @param compressionType - The compression type to use (Default: 'none')\n * @param signal - The abort signal to cancel the tar packer\n * @returns Readable stream of the tar packer\n */\nexport const createTarPacker = (\n entryItemGenerator: AsyncGenerator<EntryItem, void, unknown>,\n compressionType?: CompressionTypes,\n signal?: AbortSignal) => {\n\n // Create async generator function from entry item iterator\n const entryItemIterator = async function*() {\n // Iterate over the entry items\n for await (const entryItem of entryItemGenerator) {\n signal?.throwIfAborted();\n\n switch (entryItem.kind) {\n // Entry is a file\n case 'file': {\n const entryItemContent = entryItem.content;\n // Content is a string or buffer\n if (typeof entryItemContent === 'string' || Buffer.isBuffer(entryItemContent)) {\n // Get content bytes from string or buffer\n const contentBytes = getBuffer(entryItemContent);\n\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n contentBytes.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n // Content bytes to adjust padding space and produce\n const totalPaddedContentBytes = getPaddedBytes(contentBytes);\n yield totalPaddedContentBytes;\n } else {\n // Assert that this is EntryItemContent, not FileItemReader (packer doesn't handle FileItemReader)\n const content = entryItemContent as EntryItemContent;\n \n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n content.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n let position = 0;\n switch (content.kind) {\n // Content is a generator\n case 'generator': {\n for await (const contentBytes of content.generator) {\n signal?.throwIfAborted();\n yield contentBytes;\n position += contentBytes.length;\n }\n break;\n }\n // Content is a readable stream\n case 'readable': {\n for await (const chunk of content.readable) {\n signal?.throwIfAborted();\n const contentBytes = getBuffer(chunk);\n yield contentBytes;\n position += contentBytes.length;\n }\n break;\n }\n }\n\n // Padding space\n if (position % 512 !== 0) {\n signal?.throwIfAborted();\n yield Buffer.alloc(512 - (position % 512), 0);\n }\n }\n break;\n }\n // Entry is a directory\n case 'directory': {\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'directory',\n entryItem.path,\n 0,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date\n );\n yield tarHeaderBytes;\n break;\n }\n }\n }\n\n // Terminates for tar stream\n yield terminatorBytes;\n };\n\n const ct = compressionType ?? 'none';\n\n switch (ct) {\n // No compression\n case 'none': {\n // Create readable stream from entry item iterator\n return Readable.from(entryItemIterator(), { signal });\n }\n // Gzip compression\n case 'gzip': {\n // Create gzip stream\n const gzipStream = createGzip({ level: 9 });\n // Create readable stream from entry item iterator\n const entryItemStream = Readable.from(entryItemIterator(), { signal });\n // Pipe the entry item stream to the gzip stream\n entryItemStream.pipe(gzipStream);\n // Return the gzip stream\n return gzipStream;\n }\n }\n};\n","// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { Readable } from \"stream\";\nimport { createGunzip } from \"zlib\";\nimport { CompressionTypes, ExtractedDirectoryItem, ExtractedEntryItem, ExtractedFileItem } from \"./types\";\nimport { getBuffer } from \"./utils\";\n\n/**\n * Parse octal bytes to number\n * @param buffer - The buffer containing octal bytes\n * @param offset - The offset in the buffer\n * @param length - The length of the octal bytes\n * @returns The parsed number\n */\nconst parseOctalBytes = (buffer: Buffer, offset: number, length: number): number => {\n const str = buffer.subarray(offset, offset + length).toString('ascii').replace(/\\0/g, '').trim();\n return str ? parseInt(str, 8) : 0;\n};\n\n/**\n * Parse string from buffer\n * @param buffer - The buffer containing the string\n * @param offset - The offset in the buffer\n * @param length - The length of the string\n * @returns The parsed string\n */\nconst parseString = (buffer: Buffer, offset: number, length: number): string => {\n return buffer.subarray(offset, offset + length).toString('utf8').replace(/\\0/g, '').trim();\n};\n\n/**\n * Read exact number of bytes from stream\n * @param iterator - The async iterator\n * @param size - The number of bytes to read\n * @param signal - The abort signal\n * @returns The buffer containing the read bytes\n */\nconst readExactBytes = async (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined): Promise<Buffer | undefined> => {\n\n const chunks: Buffer[] = [];\n let totalRead = 0;\n\n while (totalRead < size) {\n signal?.throwIfAborted();\n \n const { value, done } = await iterator.next();\n if (done) {\n if (totalRead === 0) {\n return undefined; // No data at all\n } else {\n throw new Error(`Unexpected end of stream: expected ${size} bytes, got ${totalRead} bytes`);\n }\n }\n\n const chunk = getBuffer(value);\n const needed = size - totalRead;\n \n if (chunk.length <= needed) {\n chunks.push(chunk);\n totalRead += chunk.length;\n } else {\n // We read more than needed, split the chunk\n chunks.push(chunk.subarray(0, needed));\n // Put back the remaining data\n await iterator.return?.(chunk.subarray(needed));\n totalRead = size;\n }\n }\n\n return Buffer.concat(chunks, size);\n};\n\n/**\n * Skip exact number of bytes from stream without buffering\n * @param iterator - The async iterator\n * @param size - The number of bytes to skip\n * @param signal - The abort signal\n */\nconst skipExactBytes = async (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined): Promise<void> => {\n\n let totalSkipped = 0;\n\n while (totalSkipped < size) {\n signal?.throwIfAborted();\n \n const { value, done } = await iterator.next();\n if (done) {\n throw new Error(`Unexpected end of stream: expected to skip ${size} bytes, skipped ${totalSkipped} bytes`);\n }\n\n const chunk = getBuffer(value);\n const needed = size - totalSkipped;\n \n if (chunk.length <= needed) {\n totalSkipped += chunk.length;\n } else {\n // We read more than needed, put back the remaining data\n await iterator.return?.(chunk.subarray(needed));\n totalSkipped = size;\n }\n }\n};\n\n/**\n * Iterator will be skip padding bytes.\n * @param iterator - Async iterator\n * @param contentSize - Total content size to calculate boundary position\n * @param signal - Abort signal\n */\nconst skipPaddingBytesTo512Boundary = async (\n iterator: AsyncIterator<string | Buffer>,\n contentSize: number,\n signal: AbortSignal | undefined) => {\n // Skip padding bytes to next 512-byte boundary\n const padding = (512 - (contentSize % 512)) % 512;\n if (padding > 0) {\n await skipExactBytes(iterator, padding, signal);\n }\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Tar file/directory entry item.\n */\ninterface EntryItemInfo {\n readonly kind: 'file' | 'directory';\n readonly path: string;\n readonly size: number;\n readonly mode: number;\n readonly uid: number;\n readonly gid: number;\n readonly mtime: Date;\n readonly uname: string;\n readonly gname: string;\n readonly checksum: number;\n /**\n * This entry (file) item is consumed.\n */\n consumed: boolean;\n}\n\n/**\n * Parse tar header from buffer\n * @param buffer - The buffer containing the tar header\n * @returns The parsed entry information or null if end of archive\n */\nconst parseTarHeader = (buffer: Buffer): EntryItemInfo | undefined => {\n // Check if this is the end of archive (all zeros)\n if (buffer.every(b => b === 0)) {\n return undefined;\n }\n\n // Parse header fields\n const name = parseString(buffer, 0, 100);\n const mode = parseOctalBytes(buffer, 100, 8);\n const uid = parseOctalBytes(buffer, 108, 8);\n const gid = parseOctalBytes(buffer, 116, 8);\n const size = parseOctalBytes(buffer, 124, 12);\n const mtime = new Date(parseOctalBytes(buffer, 136, 12) * 1000);\n const checksum = parseOctalBytes(buffer, 148, 8);\n const typeflag = parseString(buffer, 156, 1);\n const magic = parseString(buffer, 257, 6);\n const uname = parseString(buffer, 265, 32);\n const gname = parseString(buffer, 297, 32);\n const prefix = parseString(buffer, 345, 155);\n\n // Verify magic (should be \"ustar\" for POSIX tar)\n if (magic !== 'ustar') {\n throw new Error(`Invalid tar format: magic=\"${magic}\"`);\n }\n\n // Calculate checksum\n let calculatedSum = 0;\n for (let i = 0; i < 512; i++) {\n if (i >= 148 && i < 156) {\n calculatedSum += 32; // Space character\n } else {\n calculatedSum += buffer[i];\n }\n }\n\n if (calculatedSum !== checksum) {\n throw new Error(`Invalid checksum: expected ${checksum}, got ${calculatedSum}`);\n }\n\n // Construct full path and remove trailing slash for directories\n let path = prefix ? `${prefix}/${name}` : name;\n if (path.endsWith('/')) {\n path = path.slice(0, -1);\n }\n\n // Determine type\n const kind = typeflag === '5' ? 'directory' : 'file';\n\n return {\n kind,\n path,\n size,\n mode,\n uid,\n gid,\n mtime,\n uname: uname || uid.toString(),\n gname: gname || gid.toString(),\n checksum,\n consumed: false\n };\n};\n\n/**\n * Create a buffered async iterator that allows returning data\n */\nconst createBufferedAsyncIterator = (\n iterable: AsyncIterable<string | Buffer>,\n signal: AbortSignal | undefined\n): AsyncIterator<string | Buffer> => {\n const buffer: (string | Buffer)[] = [];\n const iterator = iterable[Symbol.asyncIterator]();\n return {\n next: async () => {\n signal?.throwIfAborted();\n if (buffer.length > 0) {\n return { value: buffer.shift()!, done: false };\n }\n return iterator.next();\n },\n return: async (value?: string | Buffer) => {\n if (value !== undefined) {\n buffer.unshift(value);\n }\n return { value: undefined, done: false };\n }\n };\n};\n\n/**\n * Create a readable stream from an async iterator with size limit\n * @param iterator - The async iterator to read from\n * @param size - The number of bytes to read\n * @param signal - The abort signal\n * @returns Readable stream\n */\nconst createReadableFromIterator = (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined,\n consumedRef: { consumed: boolean }\n): Readable => {\n const generator = async function*() {\n let remainingBytes = size;\n \n while (remainingBytes > 0) {\n signal?.throwIfAborted();\n\n const { value, done } = await iterator.next();\n if (done) {\n throw new Error(`Unexpected end of stream: expected ${size} bytes, remaining ${remainingBytes} bytes`);\n }\n\n const chunk = getBuffer(value);\n if (chunk.length <= remainingBytes) {\n remainingBytes -= chunk.length;\n yield chunk;\n } else {\n // We read more than needed\n const needed = chunk.subarray(0, remainingBytes);\n const excess = chunk.subarray(remainingBytes);\n remainingBytes = 0;\n \n // Return excess data to the iterator\n await iterator.return?.(excess);\n yield needed;\n break;\n }\n }\n\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, size, signal);\n\n // Finished to consume\n consumedRef.consumed = true;\n };\n\n return Readable.from(generator(), { signal });\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a tar extractor\n * @param readable - The readable stream containing tar data\n * @param compressionType - The compression type (default: 'none')\n * @param signal - The abort signal\n * @returns Async generator of entry items\n */\nexport const createTarExtractor = async function* (\n readable: Readable,\n compressionType?: CompressionTypes,\n signal?: AbortSignal): AsyncGenerator<ExtractedEntryItem, void, unknown> {\n\n const ct = compressionType ?? 'none';\n\n // Apply decompression if needed\n let inputStream: Readable;\n switch (ct) {\n case 'gzip':\n const gunzip = createGunzip();\n readable.pipe(gunzip);\n inputStream = gunzip;\n break;\n case 'none':\n default:\n inputStream = readable;\n break;\n }\n\n // Get async iterator from the stream\n const iterator = createBufferedAsyncIterator(inputStream, signal);\n\n // Last entry item\n let header: EntryItemInfo | undefined;\n\n // For each tar items\n while (true) {\n signal?.throwIfAborted();\n\n // Did not consume last file item yielding?\n if (header?.kind === 'file' && !header.consumed) {\n // Have to skip the file contents and boundary\n\n // Skip entire contents without buffering\n await skipExactBytes(iterator, header.size, signal);\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, header.size, signal);\n\n // Mark consumed\n header.consumed = true;\n }\n\n // Read header (512 bytes)\n let headerBuffer: Buffer | undefined;\n try {\n headerBuffer = await readExactBytes(iterator, 512, signal);\n } catch (error) {\n if (error instanceof Error && error.message.includes('Unexpected end of stream')) {\n throw new Error('Invalid tar format: incomplete header');\n }\n throw error;\n }\n \n if (headerBuffer === undefined) {\n break; // End of stream\n }\n\n // Parse header\n header = parseTarHeader(headerBuffer);\n if (!header) {\n // Check for second terminator block\n const secondBlock = await readExactBytes(iterator, 512, signal);\n if (secondBlock === undefined || secondBlock.every(b => b === 0)) {\n break; // Proper end of archive\n }\n throw new Error('Invalid tar format: expected terminator block');\n }\n\n if (header.kind === 'directory') {\n // Yield directory entry\n yield {\n kind: 'directory',\n path: header.path,\n mode: header.mode,\n uid: header.uid,\n gid: header.gid,\n uname: header.uname,\n gname: header.gname,\n date: header.mtime\n } as ExtractedDirectoryItem;\n } else {\n // Capture current header to avoid closure issues\n const currentHeader = header;\n \n // Yield file entry with lazy getContent\n yield {\n kind: 'file',\n path: currentHeader.path,\n mode: currentHeader.mode,\n uid: currentHeader.uid,\n gid: currentHeader.gid,\n uname: currentHeader.uname,\n gname: currentHeader.gname,\n date: currentHeader.mtime,\n getContent: async (type: any) => {\n // Is multiple called\n if (currentHeader.consumed) {\n throw new Error('Content has already been consumed. Multiple calls to getContent are not supported.');\n }\n\n switch (type) {\n // For string\n case 'string': {\n // Read entire contents just now\n const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);\n if (dataBuffer === undefined) {\n throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);\n }\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);\n currentHeader.consumed = true;\n return dataBuffer.toString('utf8');\n }\n // For buffer\n case 'buffer': {\n // Read entire contents just now\n const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);\n if (dataBuffer === undefined) {\n throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);\n }\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);\n currentHeader.consumed = true;\n return dataBuffer;\n }\n // For Readble stream\n case 'readable': {\n // Get Readble object (to delegate)\n const readable = createReadableFromIterator(iterator, currentHeader.size, signal, currentHeader);\n return readable;\n }\n default:\n throw new Error(`Unsupported content type: ${type}`);\n }\n }\n } as ExtractedFileItem;\n }\n }\n};\n"],"names":["path","stat","Readable","createReadStream","createWriteStream","pipeline","readdir","join","mkdir","dirname","writeFile","createGzip","createGunzip","readable"],"mappings":";;;;;;;;AAaO,MAAM,WAAW;AACjB,MAAM,aAAa;AAS1B,MAAM,WAAW,CAAC,eAAmC,aAAqB,gBAA0C;AAClH,SAAO,kBAAkB,gBAAgB,QAAQ,YAAY,aAAa;AAC5E;AAOO,MAAM,YAAY,CAAC,SAA0B;AAClD,SAAO,OAAO,SAAS,IAAI,IAAI,OAAO,OAAO,KAAK,MAAM,MAAM;AAChE;AAaO,MAAM,sBAAsB,OACjCA,OACA,aACA,SACA,WAC2B;AAC3B,QAAM,KAAK,eAAe;AAE1B,MAAI,OAAO,UAAU,SAAS,eAAe;AAC3C,YAAQ,eAAA;AACR,UAAM,QAAQ,MAAMC,cAAK,QAAQ,aAAa;AAC9C,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAD;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC,OAAO;AACL,UAAM,OAAO,SAAS,QAAQ;AAC9B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAClC,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAA;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC;AACF;AAUO,MAAM,iBAAiB,OAC5BA,OACA,SACA,SACA,WACsB;AACtB,UAAQ,eAAA;AAER,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAAA;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,IACpC;AAAA,EAAA;AAEJ;AAUO,MAAM,yBAAyB,OACpCA,OACA,UACA,SACA,WACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,UAAU;AAClC,cAAQ,eAAA;AACR,YAAM,SAAS,UAAU,KAAK;AAC9B,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAA;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA,UAAUE,OAAAA,SAAS,KAAK,QAAQ,EAAE,QAAQ;AAAA,MAAA;AAAA,IAC5C;AAAA,EAEJ,OAAO;AAEL,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAF;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AACF;AAUO,MAAM,0BAA0B,OACrCA,OACA,WACA,SACA,WACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,WAAW;AACnC,cAAQ,eAAA;AACR,YAAM,SAAS,UAAU,KAAK;AAC9B,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAA;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA,UAAUE,OAAAA,SAAS,KAAK,QAAQ,EAAE,QAAQ;AAAA,MAAA;AAAA,IAC5C;AAAA,EAEJ,OAAO;AAEL,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAAF;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AACF;AAWO,MAAM,qBAAqB,OAChCA,OACA,UACA,aACA,SACA,WACsB;AACtB,QAAM,KAAK,eAAe;AAG1B,UAAQ,eAAA;AACR,QAAM,QAAQ,MAAMC,SAAAA,KAAK,QAAQ;AAEjC,QAAM,SAASE,GAAAA,iBAAiB,UAAU,EAAE,QAAQ;AAEpD,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,OAAO;AAC5D,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,QAAQ;AAE7D,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AAGpD,SAAO,MAAM,uBAAuBH,OAAM,QAAQ;AAAA,IAChD,QAAQ,MAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,EAAA,GACjD,MAAM;AACX;AAWO,MAAM,oBAAoB,OAAO,QAAkBA,OAAc,WAAyB;AAC/F,QAAM,SAASI,GAAAA,kBAAkBJ,OAAM,EAAE,QAAQ;AACjD,QAAMK,WAAAA,SAAS,QAAQ,QAAQ,EAAE,QAAQ;AAC3C;AAUA,MAAM,yBAAyB,OAC7B,SAAiB,WAAuD;AAExE,QAAM,eAAe,OAAO,YAAoB,iBAA4C;AAC1F,YAAQ,eAAA;AAER,QAAI;AACF,YAAM,UAAU,MAAMC,SAAAA,QAAQ,YAAY,EAAE,eAAe,MAAM;AACjE,YAAM,SAAmB,CAAA;AAGzB,YAAM,QAAQ,QAAQ,IAAI,OAAO,UAAU;AACzC,gBAAQ,eAAA;AAER,cAAM,oBAAoBC,KAAAA,KAAK,cAAc,MAAM,IAAI;AAEvD,YAAI,MAAM,eAAe;AACvB,gBAAM,gBAAgBA,KAAAA,KAAK,YAAY,MAAM,IAAI;AAEjD,gBAAM,oBAAoB,MAAM,aAAa,eAAe,iBAAiB;AAC7E,iBAAO,CAAC,mBAAmB,GAAG,iBAAiB;AAAA,QACjD,OAAO;AAEL,iBAAO,CAAC,iBAAiB;AAAA,QAC3B;AAAA,MACF,CAAC;AAED,YAAM,aAAa,MAAM,QAAQ,IAAI,KAAK;AAG1C,iBAAW,gBAAgB,YAAY;AACrC,eAAO,KAAK,GAAG,YAAY;AAAA,MAC7B;AAEA,aAAO;AAAA,IACT,SAAS,OAAO;AACd,cAAQ,KAAK,qCAAqC,UAAU,KAAK,KAAK;AACtE,aAAO,CAAA;AAAA,IACT;AAAA,EACF;AAEA,SAAO,MAAM,aAAa,SAAS,EAAE;AACvC;AAUO,MAAM,2BAA2B,iBACtC,SACA,eACA,aACA,QAC0C;AAC1C,QAAM,KAAK,eAAe;AAG1B,QAAM,iBAAiB,iBAAiB,MAAM,uBAAuB,SAAS,MAAM;AAEpF,aAAW,gBAAgB,gBAAgB;AACzC,YAAQ,eAAA;AAER,UAAM,SAASA,KAAAA,KAAK,SAAS,YAAY;AAEzC,QAAI;AACF,cAAQ,eAAA;AACR,YAAM,QAAQ,MAAMN,SAAAA,KAAK,MAAM;AAE/B,UAAI,MAAM,eAAe;AAEvB,cAAM,MAAM,oBAAoB,cAAc,IAAI;AAAA,UAChD,eAAe;AAAA,QAAA,GACd,MAAM;AAAA,MACX,WAAW,MAAM,UAAU;AAEzB,cAAM,MAAM,mBAAmB,cAAc,QAAQ,IAAI,QAAW,MAAM;AAAA,MAC5E;AAAA,IACF,SAAS,OAAO;AAEd,cAAQ,KAAK,6BAA6B,MAAM,KAAK,KAAK;AAC1D;AAAA,IACF;AAAA,EACF;AACF;AASO,MAAM,YAAY,OACvB,UACA,UACA,WACkB;AAClB,mBAAiB,SAAS,UAAU;AAClC,YAAQ,eAAA;AAER,UAAM,aAAaM,KAAAA,KAAK,UAAU,MAAM,IAAI;AAE5C,QAAI,MAAM,SAAS,aAAa;AAE9B,UAAI;AACF,gBAAQ,eAAA;AACR,cAAMC,SAAAA,MAAM,YAAY,EAAE,WAAW,MAAM,MAAM,MAAM,MAAM;AAAA,MAC/D,SAAS,OAAO;AAEd,YAAK,MAAc,SAAS,UAAU;AACpC,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF,WAAW,MAAM,SAAS,QAAQ;AAEhC,YAAM,YAAYC,KAAAA,QAAQ,UAAU;AACpC,cAAQ,eAAA;AACR,YAAMD,SAAAA,MAAM,WAAW,EAAE,WAAW,MAAM;AAG1C,YAAM,YAAY;AAClB,YAAM,UAAU,MAAM,UAAU,WAAW,QAAQ;AACnD,YAAME,SAAAA,UAAU,YAAY,SAAS,EAAE,MAAM,MAAM,MAAM,QAAQ;AAAA,IACnE;AAAA,EACF;AACF;AC1ZA,MAAM,iBAAiB,CAAC,QAAgB;AACtC,SAAO,OAAO,WAAW,KAAK,MAAM;AACtC;AAQA,MAAM,mBAAmB,CAAC,KAAa,aAAqB;AAC1D,MAAI,QAAQ;AACZ,MAAI,IAAI;AACR,SAAO,IAAI,IAAI,QAAQ;AACrB,UAAM,YAAY,IAAI,YAAY,CAAC;AACnC,UAAM,OAAO,OAAO,cAAc,SAAS;AAC3C,UAAM,YAAY,OAAO,WAAW,MAAM,MAAM;AAChD,QAAI,QAAQ,YAAY,SAAU;AAClC,aAAS;AACT,SAAK,KAAK;AAAA,EACZ;AACA,SAAO,IAAI,MAAM,GAAG,CAAC;AACvB;AAOA,MAAM,YAAY,CAACV,UAAiB;AAClC,MAAI,eAAeA,KAAI,KAAK,UAAU;AACpC,WAAO,EAAE,QAAQ,IAAI,MAAMA,MAAA;AAAA,EAC7B;AAGA,QAAM,QAAQA,MAAK,MAAM,GAAG;AAC5B,MAAI,OAAO,MAAM,IAAA,KAAS;AAC1B,MAAI,SAAS,MAAM,KAAK,GAAG;AAG3B,MAAI,eAAe,IAAI,IAAI,UAAU;AACnC,WAAO,iBAAiB,MAAM,QAAQ;AAAA,EACxC;AAGA,SAAO,eAAe,MAAM,IAAI,YAAY;AAC1C,aAAS,iBAAiB,QAAQ,UAAU;AAAA,EAC9C;AAEA,SAAO,EAAE,QAAQ,KAAA;AACnB;AAUA,MAAM,gBAAgB,CAAC,OAAe,WAAmB;AACvD,QAAM,MAAM,MAAM,SAAS,CAAC,EAAE,SAAS,SAAS,GAAG,GAAG,IAAI;AAC1D,SAAO,OAAO,KAAK,KAAK,OAAO;AACjC;AAOA,MAAM,iBAAiB,CAAC,WAAmB;AACzC,QAAM,QAAQ,OAAO,SAAS;AAC9B,MAAI,UAAU,GAAG;AACf,WAAO;AAAA,EACT,OAAO;AACL,WAAO,OAAO,OAAO,CAAC,QAAQ,OAAO,MAAM,MAAM,OAAO,CAAC,CAAC,CAAC;AAAA,EAC7D;AACF;AAKA,MAAM,kBAAkB,OAAO,MAAM,MAAM,CAAC;AAW5C,MAAM,kBAAkB,CACtB,MACAA,OACA,MACA,MACA,OACA,OACA,KACA,KACA,SACG;AAEH,QAAM,SAAS,OAAO,MAAM,KAAK,CAAC;AAGlC,QAAM,EAAE,MAAM,WAAW,UAAUA,KAAI;AAGvC,SAAO,MAAM,MAAM,GAAG,KAAK,MAAM;AACjC,gBAAc,OAAO,MAAQ,CAAC,EAAE,KAAK,QAAQ,GAAG;AAChD,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,MAAM,EAAE,EAAE,KAAK,QAAQ,GAAG;AACxC,gBAAc,KAAK,MAAM,KAAK,QAAA,IAAY,GAAI,GAAG,EAAE,EAAE,KAAK,QAAQ,GAAG;AAGrE,SAAO,KAAK,YAAY,OAAO,EAAE,KAAK,QAAQ,GAAG;AAEjD,MAAI,SAAS,QAAQ;AACnB,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC,OAAO;AACL,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC;AACA,SAAO,MAAM,WAAW,KAAK,GAAG,OAAO;AACvC,SAAO,MAAM,MAAM,KAAK,GAAG,OAAO;AAClC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,QAAQ,KAAK,KAAK,MAAM;AAGrC,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,WAAO,OAAO,CAAC;AAAA,EACjB;AACA,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AAEtC,SAAO;AACT;AAWO,MAAM,kBAAkB,CAC7B,oBACA,iBACA,WAAyB;AAGzB,QAAM,oBAAoB,mBAAkB;AAE1C,qBAAiB,aAAa,oBAAoB;AAChD,cAAQ,eAAA;AAER,cAAQ,UAAU,MAAA;AAAA;AAAA,QAEhB,KAAK,QAAQ;AACX,gBAAM,mBAAmB,UAAU;AAEnC,cAAI,OAAO,qBAAqB,YAAY,OAAO,SAAS,gBAAgB,GAAG;AAE7E,kBAAM,eAAe,UAAU,gBAAgB;AAG/C,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,aAAa;AAAA,cACb,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAGN,kBAAM,0BAA0B,eAAe,YAAY;AAC3D,kBAAM;AAAA,UACR,OAAO;AAEL,kBAAM,UAAU;AAGhB,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,QAAQ;AAAA,cACR,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAEN,gBAAI,WAAW;AACf,oBAAQ,QAAQ,MAAA;AAAA;AAAA,cAEd,KAAK,aAAa;AAChB,iCAAiB,gBAAgB,QAAQ,WAAW;AAClD,0BAAQ,eAAA;AACR,wBAAM;AACN,8BAAY,aAAa;AAAA,gBAC3B;AACA;AAAA,cACF;AAAA;AAAA,cAEA,KAAK,YAAY;AACf,iCAAiB,SAAS,QAAQ,UAAU;AAC1C,0BAAQ,eAAA;AACR,wBAAM,eAAe,UAAU,KAAK;AACpC,wBAAM;AACN,8BAAY,aAAa;AAAA,gBAC3B;AACA;AAAA,cACF;AAAA,YAAA;AAIF,gBAAI,WAAW,QAAQ,GAAG;AACxB,sBAAQ,eAAA;AACR,oBAAM,OAAO,MAAM,MAAO,WAAW,KAAM,CAAC;AAAA,YAC9C;AAAA,UACF;AACA;AAAA,QACF;AAAA;AAAA,QAEA,KAAK,aAAa;AAEhB,gBAAM,iBAAiB;AAAA,YACrB;AAAA,YACA,UAAU;AAAA,YACV;AAAA,YACA,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,UAAA;AAEZ,gBAAM;AACN;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM;AAAA,EACR;AAEA,QAAM,KAAK,mBAAmB;AAE9B,UAAQ,IAAA;AAAA;AAAA,IAEN,KAAK,QAAQ;AAEX,aAAOE,OAAAA,SAAS,KAAK,kBAAA,GAAqB,EAAE,QAAQ;AAAA,IACtD;AAAA;AAAA,IAEA,KAAK,QAAQ;AAEX,YAAM,aAAaS,KAAAA,WAAW,EAAE,OAAO,GAAG;AAE1C,YAAM,kBAAkBT,OAAAA,SAAS,KAAK,qBAAqB,EAAE,QAAQ;AAErE,sBAAgB,KAAK,UAAU;AAE/B,aAAO;AAAA,IACT;AAAA,EAAA;AAEJ;ACrRA,MAAM,kBAAkB,CAAC,QAAgB,QAAgB,WAA2B;AAClF,QAAM,MAAM,OAAO,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,OAAO,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAA;AAC1F,SAAO,MAAM,SAAS,KAAK,CAAC,IAAI;AAClC;AASA,MAAM,cAAc,CAAC,QAAgB,QAAgB,WAA2B;AAC9E,SAAO,OAAO,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,MAAM,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAA;AACtF;AASA,MAAM,iBAAiB,OACrB,UACA,MACA,WAAiE;AAEjE,QAAM,SAAmB,CAAA;AACzB,MAAI,YAAY;AAEhB,SAAO,YAAY,MAAM;AACvB,YAAQ,eAAA;AAER,UAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,QAAI,MAAM;AACR,UAAI,cAAc,GAAG;AACnB,eAAO;AAAA,MACT,OAAO;AACL,cAAM,IAAI,MAAM,sCAAsC,IAAI,eAAe,SAAS,QAAQ;AAAA,MAC5F;AAAA,IACF;AAEA,UAAM,QAAQ,UAAU,KAAK;AAC7B,UAAM,SAAS,OAAO;AAEtB,QAAI,MAAM,UAAU,QAAQ;AAC1B,aAAO,KAAK,KAAK;AACjB,mBAAa,MAAM;AAAA,IACrB,OAAO;AAEL,aAAO,KAAK,MAAM,SAAS,GAAG,MAAM,CAAC;AAErC,YAAM,SAAS,SAAS,MAAM,SAAS,MAAM,CAAC;AAC9C,kBAAY;AAAA,IACd;AAAA,EACF;AAEA,SAAO,OAAO,OAAO,QAAQ,IAAI;AACnC;AAQA,MAAM,iBAAiB,OACrB,UACA,MACA,WAAmD;AAEnD,MAAI,eAAe;AAEnB,SAAO,eAAe,MAAM;AAC1B,YAAQ,eAAA;AAER,UAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,QAAI,MAAM;AACR,YAAM,IAAI,MAAM,8CAA8C,IAAI,mBAAmB,YAAY,QAAQ;AAAA,IAC3G;AAEA,UAAM,QAAQ,UAAU,KAAK;AAC7B,UAAM,SAAS,OAAO;AAEtB,QAAI,MAAM,UAAU,QAAQ;AAC1B,sBAAgB,MAAM;AAAA,IACxB,OAAO;AAEL,YAAM,SAAS,SAAS,MAAM,SAAS,MAAM,CAAC;AAC9C,qBAAe;AAAA,IACjB;AAAA,EACF;AACF;AAQA,MAAM,gCAAgC,OACpC,UACA,aACA,WAAoC;AAEpC,QAAM,WAAW,MAAO,cAAc,OAAQ;AAC9C,MAAI,UAAU,GAAG;AACf,UAAM,eAAe,UAAU,SAAS,MAAM;AAAA,EAChD;AACF;AA6BA,MAAM,iBAAiB,CAAC,WAA8C;AAEpE,MAAI,OAAO,MAAM,CAAA,MAAK,MAAM,CAAC,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,QAAM,OAAO,YAAY,QAAQ,GAAG,GAAG;AACvC,QAAM,OAAO,gBAAgB,QAAQ,KAAK,CAAC;AAC3C,QAAM,MAAM,gBAAgB,QAAQ,KAAK,CAAC;AAC1C,QAAM,MAAM,gBAAgB,QAAQ,KAAK,CAAC;AAC1C,QAAM,OAAO,gBAAgB,QAAQ,KAAK,EAAE;AAC5C,QAAM,QAAQ,IAAI,KAAK,gBAAgB,QAAQ,KAAK,EAAE,IAAI,GAAI;AAC9D,QAAM,WAAW,gBAAgB,QAAQ,KAAK,CAAC;AAC/C,QAAM,WAAW,YAAY,QAAQ,KAAK,CAAC;AAC3C,QAAM,QAAQ,YAAY,QAAQ,KAAK,CAAC;AACxC,QAAM,QAAQ,YAAY,QAAQ,KAAK,EAAE;AACzC,QAAM,QAAQ,YAAY,QAAQ,KAAK,EAAE;AACzC,QAAM,SAAS,YAAY,QAAQ,KAAK,GAAG;AAG3C,MAAI,UAAU,SAAS;AACrB,UAAM,IAAI,MAAM,8BAA8B,KAAK,GAAG;AAAA,EACxD;AAGA,MAAI,gBAAgB;AACpB,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,QAAI,KAAK,OAAO,IAAI,KAAK;AACvB,uBAAiB;AAAA,IACnB,OAAO;AACL,uBAAiB,OAAO,CAAC;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI,kBAAkB,UAAU;AAC9B,UAAM,IAAI,MAAM,8BAA8B,QAAQ,SAAS,aAAa,EAAE;AAAA,EAChF;AAGA,MAAIF,QAAO,SAAS,GAAG,MAAM,IAAI,IAAI,KAAK;AAC1C,MAAIA,MAAK,SAAS,GAAG,GAAG;AACtB,IAAAA,QAAOA,MAAK,MAAM,GAAG,EAAE;AAAA,EACzB;AAGA,QAAM,OAAO,aAAa,MAAM,cAAc;AAE9C,SAAO;AAAA,IACL;AAAA,IACA,MAAAA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,SAAS,IAAI,SAAA;AAAA,IACpB,OAAO,SAAS,IAAI,SAAA;AAAA,IACpB;AAAA,IACA,UAAU;AAAA,EAAA;AAEd;AAKA,MAAM,8BAA8B,CAClC,UACA,WACmC;AACnC,QAAM,SAA8B,CAAA;AACpC,QAAM,WAAW,SAAS,OAAO,aAAa,EAAA;AAC9C,SAAO;AAAA,IACL,MAAM,YAAY;AAChB,cAAQ,eAAA;AACR,UAAI,OAAO,SAAS,GAAG;AACrB,eAAO,EAAE,OAAO,OAAO,MAAA,GAAU,MAAM,MAAA;AAAA,MACzC;AACA,aAAO,SAAS,KAAA;AAAA,IAClB;AAAA,IACA,QAAQ,OAAO,UAA4B;AACzC,UAAI,UAAU,QAAW;AACvB,eAAO,QAAQ,KAAK;AAAA,MACtB;AACA,aAAO,EAAE,OAAO,QAAW,MAAM,MAAA;AAAA,IACnC;AAAA,EAAA;AAEJ;AASA,MAAM,6BAA6B,CACjC,UACA,MACA,QACA,gBACa;AACb,QAAM,YAAY,mBAAkB;AAClC,QAAI,iBAAiB;AAErB,WAAO,iBAAiB,GAAG;AACzB,cAAQ,eAAA;AAER,YAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,UAAI,MAAM;AACR,cAAM,IAAI,MAAM,sCAAsC,IAAI,qBAAqB,cAAc,QAAQ;AAAA,MACvG;AAEA,YAAM,QAAQ,UAAU,KAAK;AAC7B,UAAI,MAAM,UAAU,gBAAgB;AAClC,0BAAkB,MAAM;AACxB,cAAM;AAAA,MACR,OAAO;AAEL,cAAM,SAAS,MAAM,SAAS,GAAG,cAAc;AAC/C,cAAM,SAAS,MAAM,SAAS,cAAc;AAC5C,yBAAiB;AAGjB,cAAM,SAAS,SAAS,MAAM;AAC9B,cAAM;AACN;AAAA,MACF;AAAA,IACF;AAGA,UAAM,8BAA8B,UAAU,MAAM,MAAM;AAG1D,gBAAY,WAAW;AAAA,EACzB;AAEA,SAAOE,OAAAA,SAAS,KAAK,UAAA,GAAa,EAAE,QAAQ;AAC9C;AAWO,MAAM,qBAAqB,iBAChC,UACA,iBACA,QAAyE;AAEzE,QAAM,KAAK,mBAAmB;AAG9B,MAAI;AACJ,UAAQ,IAAA;AAAA,IACN,KAAK;AACH,YAAM,SAASU,KAAAA,aAAA;AACf,eAAS,KAAK,MAAM;AACpB,oBAAc;AACd;AAAA,IACF,KAAK;AAAA,IACL;AACE,oBAAc;AACd;AAAA,EAAA;AAIJ,QAAM,WAAW,4BAA4B,aAAa,MAAM;AAGhE,MAAI;AAGJ,SAAO,MAAM;AACX,YAAQ,eAAA;AAGR,QAAI,QAAQ,SAAS,UAAU,CAAC,OAAO,UAAU;AAI/C,YAAM,eAAe,UAAU,OAAO,MAAM,MAAM;AAElD,YAAM,8BAA8B,UAAU,OAAO,MAAM,MAAM;AAGjE,aAAO,WAAW;AAAA,IACpB;AAGA,QAAI;AACJ,QAAI;AACF,qBAAe,MAAM,eAAe,UAAU,KAAK,MAAM;AAAA,IAC3D,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,0BAA0B,GAAG;AAChF,cAAM,IAAI,MAAM,uCAAuC;AAAA,MACzD;AACA,YAAM;AAAA,IACR;AAEA,QAAI,iBAAiB,QAAW;AAC9B;AAAA,IACF;AAGA,aAAS,eAAe,YAAY;AACpC,QAAI,CAAC,QAAQ;AAEX,YAAM,cAAc,MAAM,eAAe,UAAU,KAAK,MAAM;AAC9D,UAAI,gBAAgB,UAAa,YAAY,MAAM,CAAA,MAAK,MAAM,CAAC,GAAG;AAChE;AAAA,MACF;AACA,YAAM,IAAI,MAAM,+CAA+C;AAAA,IACjE;AAEA,QAAI,OAAO,SAAS,aAAa;AAE/B,YAAM;AAAA,QACJ,MAAM;AAAA,QACN,MAAM,OAAO;AAAA,QACb,MAAM,OAAO;AAAA,QACb,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,OAAO,OAAO;AAAA,QACd,OAAO,OAAO;AAAA,QACd,MAAM,OAAO;AAAA,MAAA;AAAA,IAEjB,OAAO;AAEL,YAAM,gBAAgB;AAGtB,YAAM;AAAA,QACJ,MAAM;AAAA,QACN,MAAM,cAAc;AAAA,QACpB,MAAM,cAAc;AAAA,QACpB,KAAK,cAAc;AAAA,QACnB,KAAK,cAAc;AAAA,QACnB,OAAO,cAAc;AAAA,QACrB,OAAO,cAAc;AAAA,QACrB,MAAM,cAAc;AAAA,QACpB,YAAY,OAAO,SAAc;AAE/B,cAAI,cAAc,UAAU;AAC1B,kBAAM,IAAI,MAAM,oFAAoF;AAAA,UACtG;AAEA,kBAAQ,MAAA;AAAA;AAAA,YAEN,KAAK,UAAU;AAEb,oBAAM,aAAa,MAAM,eAAe,UAAU,cAAc,MAAM,MAAM;AAC5E,kBAAI,eAAe,QAAW;AAC5B,sBAAM,IAAI,MAAM,wDAAwD,cAAc,IAAI,EAAE;AAAA,cAC9F;AAEA,oBAAM,8BAA8B,UAAU,cAAc,MAAM,MAAM;AACxE,4BAAc,WAAW;AACzB,qBAAO,WAAW,SAAS,MAAM;AAAA,YACnC;AAAA;AAAA,YAEA,KAAK,UAAU;AAEb,oBAAM,aAAa,MAAM,eAAe,UAAU,cAAc,MAAM,MAAM;AAC5E,kBAAI,eAAe,QAAW;AAC5B,sBAAM,IAAI,MAAM,wDAAwD,cAAc,IAAI,EAAE;AAAA,cAC9F;AAEA,oBAAM,8BAA8B,UAAU,cAAc,MAAM,MAAM;AACxE,4BAAc,WAAW;AACzB,qBAAO;AAAA,YACT;AAAA;AAAA,YAEA,KAAK,YAAY;AAEf,oBAAMC,YAAW,2BAA2B,UAAU,cAAc,MAAM,QAAQ,aAAa;AAC/F,qBAAOA;AAAAA,YACT;AAAA,YACA;AACE,oBAAM,IAAI,MAAM,6BAA6B,IAAI,EAAE;AAAA,UAAA;AAAA,QAEzD;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AACF;;;;;;;;;;;"}
|
package/dist/index.d.ts
CHANGED
@@ -1,11 +1,11 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version: 1.
|
3
|
+
* version: 1.1.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
7
7
|
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
-
* git.commit.hash:
|
8
|
+
* git.commit.hash: 6d4ff13b538b16545ccc55b2e74f8e5f73999a34
|
9
9
|
*/
|
10
10
|
|
11
11
|
export type * from './types';
|
package/dist/index.js
CHANGED
@@ -1,16 +1,16 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version: 1.
|
3
|
+
* version: 1.1.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
7
7
|
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
-
* git.commit.hash:
|
8
|
+
* git.commit.hash: 6d4ff13b538b16545ccc55b2e74f8e5f73999a34
|
9
9
|
*/
|
10
10
|
import { Readable } from "stream";
|
11
11
|
import { createGzip, createGunzip } from "zlib";
|
12
12
|
import { createReadStream, createWriteStream } from "fs";
|
13
|
-
import { stat, mkdir, writeFile } from "fs/promises";
|
13
|
+
import { stat, mkdir, writeFile, readdir } from "fs/promises";
|
14
14
|
import { pipeline } from "stream/promises";
|
15
15
|
import { join, dirname } from "path";
|
16
16
|
const MAX_NAME = 100;
|
@@ -206,9 +206,39 @@ const storeReaderToFile = async (reader, path, signal) => {
|
|
206
206
|
const writer = createWriteStream(path, { signal });
|
207
207
|
await pipeline(reader, writer, { signal });
|
208
208
|
};
|
209
|
+
const getAllFilesInDirectory = async (baseDir, signal) => {
|
210
|
+
const collectFiles = async (currentDir, relativePath) => {
|
211
|
+
signal?.throwIfAborted();
|
212
|
+
try {
|
213
|
+
const entries = await readdir(currentDir, { withFileTypes: true });
|
214
|
+
const result = [];
|
215
|
+
const tasks = entries.map(async (entry) => {
|
216
|
+
signal?.throwIfAborted();
|
217
|
+
const entryRelativePath = join(relativePath, entry.name);
|
218
|
+
if (entry.isDirectory()) {
|
219
|
+
const entryFullPath = join(currentDir, entry.name);
|
220
|
+
const directoryContents = await collectFiles(entryFullPath, entryRelativePath);
|
221
|
+
return [entryRelativePath, ...directoryContents];
|
222
|
+
} else {
|
223
|
+
return [entryRelativePath];
|
224
|
+
}
|
225
|
+
});
|
226
|
+
const allResults = await Promise.all(tasks);
|
227
|
+
for (const entryResults of allResults) {
|
228
|
+
result.push(...entryResults);
|
229
|
+
}
|
230
|
+
return result;
|
231
|
+
} catch (error) {
|
232
|
+
console.warn(`Warning: Could not read directory ${currentDir}:`, error);
|
233
|
+
return [];
|
234
|
+
}
|
235
|
+
};
|
236
|
+
return await collectFiles(baseDir, "");
|
237
|
+
};
|
209
238
|
const createEntryItemGenerator = async function* (baseDir, relativePaths, reflectStat, signal) {
|
210
239
|
const rs = reflectStat ?? "exceptName";
|
211
|
-
|
240
|
+
const pathsToProcess = relativePaths ?? await getAllFilesInDirectory(baseDir, signal);
|
241
|
+
for (const relativePath of pathsToProcess) {
|
212
242
|
signal?.throwIfAborted();
|
213
243
|
const fsPath = join(baseDir, relativePath);
|
214
244
|
try {
|
package/dist/index.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"index.js","sources":["../src/utils.ts","../src/packer.ts","../src/extractor.ts"],"sourcesContent":["// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { createReadStream, createWriteStream } from \"fs\";\nimport { stat, mkdir, writeFile } from \"fs/promises\";\nimport { Readable } from \"stream\";\nimport { pipeline } from \"stream/promises\";\nimport { dirname, join } from \"path\";\nimport { CreateItemOptions, CreateReadableFileItemOptions, FileItem, DirectoryItem, ReflectStats, CreateDirectoryItemOptions, EntryItem, ExtractedEntryItem, ExtractedFileItem } from \"./types\";\n\n// Tar specification: name max 100 bytes, prefix max 155 bytes\nexport const MAX_NAME = 100;\nexport const MAX_PREFIX = 155;\n\n/**\n * Get the user/group name from the candidate name or ID\n * @param candidateName - The candidate user/group name\n * @param candidateId - The candidate user/group ID\n * @param reflectStat - Whether to reflect the stat (all, exceptName, none)\n * @returns The user/group name\n */\nconst getUName = (candidateName: string | undefined, candidateId: number, reflectStat: ReflectStats | undefined) => {\n return candidateName ?? (reflectStat === 'all' ? candidateId.toString() : 'root');\n}\n\n/**\n * Get a buffer from the string or Buffer\n * @param data - The data to get a buffer from\n * @returns A buffer\n */\nexport const getBuffer = (data: Buffer | string) => {\n return Buffer.isBuffer(data) ? data : Buffer.from(data, 'utf8');\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a DirectoryItem\n * @param path - The path to the directory in the tar archive\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'none')\n * @param options - Metadata for the directory including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A DirectoryItem\n * @remarks When reflectStat is 'all' or 'exceptName', `options.directoryPath` must be provided.\n */\nexport const createDirectoryItem = async (\n path: string,\n reflectStat?: ReflectStats,\n options?: CreateDirectoryItemOptions,\n signal?: AbortSignal\n): Promise<DirectoryItem> => {\n const rs = reflectStat ?? 'none';\n\n if (rs !== 'none' && options?.directoryPath) {\n signal?.throwIfAborted();\n const stats = await stat(options.directoryPath);\n const mode = options?.mode ?? stats.mode;\n const uid = options?.uid ?? stats.uid;\n const gid = options?.gid ?? stats.gid;\n const date = options?.date ?? stats.mtime;\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n } else {\n const mode = options?.mode ?? 0o755;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n const uname = getUName(options?.uname, undefined, rs);\n const gname = getUName(options?.gname, undefined, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n }\n};\n\n/**\n * Create a FileItem from content data directly\n * @param path - The path to the file in the tar archive\n * @param content - Content data\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createFileItem = async (\n path: string,\n content: string | Buffer,\n options?: CreateItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n signal?.throwIfAborted();\n \n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content\n };\n};\n\n/**\n * Create a FileItem from a Readable stream\n * @param path - The path to the file in the tar archive\n * @param readable - The readable stream\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createReadableFileItem = async (\n path: string,\n readable: Readable,\n options?: CreateReadableFileItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of readable) {\n signal?.throwIfAborted();\n const buffer = getBuffer(chunk);\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable: Readable.from(chunks, { signal })\n }\n };\n } else {\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable\n }\n };\n }\n};\n\n/**\n * Create a FileItem from a generator\n * @param path - The path to the file in the tar archive\n * @param generator - The generator to read the file from\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createGeneratorFileItem = async (\n path: string,\n generator: AsyncGenerator<Buffer, void, unknown>,\n options?: CreateReadableFileItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of generator) {\n signal?.throwIfAborted();\n const buffer = getBuffer(chunk);\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable: Readable.from(chunks, { signal })\n }\n };\n } else {\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'generator',\n length,\n generator\n }\n };\n }\n};\n\n/**\n * Create a FileItem from a local file path\n * @param path - The path to the file in the tar archive\n * @param filePath - The path to the file to read from real filesystem\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'exceptName')\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createReadFileItem = async (\n path: string,\n filePath: string,\n reflectStat?: ReflectStats,\n options?: CreateItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const rs = reflectStat ?? 'exceptName';\n\n // Get file stats to extract metadata\n signal?.throwIfAborted();\n const stats = await stat(filePath);\n // Create readable stream from file\n const reader = createReadStream(filePath, { signal });\n\n const mode = options?.mode ?? (rs !== 'none' ? stats.mode : undefined);\n const uid = options?.uid ?? (rs !== 'none' ? stats.uid : undefined);\n const gid = options?.gid ?? (rs !== 'none' ? stats.gid : undefined);\n const date = options?.date ?? (rs !== 'none' ? stats.mtime : undefined);\n\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n\n // Create a FileItem\n return await createReadableFileItem(path, reader, {\n length: stats.size, mode, uname, gname, uid, gid, date,\n }, signal);\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Store a readable stream to a file\n * @param reader - The readable stream\n * @param path - The path to the file to store the readable stream to\n * @param signal - Optional abort signal to cancel the operation\n * @returns A promise that resolves when the stream is finished\n */\nexport const storeReaderToFile = async (reader: Readable, path: string, signal?: AbortSignal) => {\n const writer = createWriteStream(path, { signal });\n await pipeline(reader, writer, { signal });\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create an async generator that yields EntryItem objects from filesystem paths\n * @param baseDir - Base directory path for resolving relative paths\n * @param relativePaths - Array of relative paths to include in the tar archive\n * @param reflectStat - Whether to reflect file stats (Default: 'exceptName')\n * @param signal - Optional abort signal to cancel the operation\n * @returns Async generator that yields EntryItem objects\n */\nexport const createEntryItemGenerator = async function* (\n baseDir: string,\n relativePaths: string[],\n reflectStat?: ReflectStats,\n signal?: AbortSignal\n): AsyncGenerator<EntryItem, void, unknown> {\n const rs = reflectStat ?? 'exceptName';\n \n for (const relativePath of relativePaths) {\n signal?.throwIfAborted();\n \n const fsPath = join(baseDir, relativePath);\n \n try {\n signal?.throwIfAborted();\n const stats = await stat(fsPath);\n \n if (stats.isDirectory()) {\n // Create directory entry\n yield await createDirectoryItem(relativePath, rs, {\n directoryPath: fsPath\n }, signal);\n } else if (stats.isFile()) {\n // Create file entry\n yield await createReadFileItem(relativePath, fsPath, rs, undefined, signal);\n }\n } catch (error) {\n // Skip files that can't be accessed (permissions, etc.)\n console.warn(`Warning: Could not access ${fsPath}:`, error);\n continue;\n }\n }\n};\n\n/**\n * Extract entries from a tar extractor to a directory on the filesystem\n * @param iterator - Async generator of extracted entry items\n * @param basePath - Base directory path where entries will be extracted\n * @param signal - Optional abort signal to cancel the operation\n * @returns Promise that resolves when extraction is complete\n */\nexport const extractTo = async (\n iterator: AsyncGenerator<ExtractedEntryItem, void, unknown>,\n basePath: string,\n signal?: AbortSignal\n): Promise<void> => {\n for await (const entry of iterator) {\n signal?.throwIfAborted();\n \n const targetPath = join(basePath, entry.path);\n \n if (entry.kind === 'directory') {\n // Create directory\n try {\n signal?.throwIfAborted();\n await mkdir(targetPath, { recursive: true, mode: entry.mode });\n } catch (error) {\n // Directory might already exist, which is fine\n if ((error as any).code !== 'EEXIST') {\n throw error;\n }\n }\n } else if (entry.kind === 'file') {\n // Create parent directories if they don't exist\n const parentDir = dirname(targetPath);\n signal?.throwIfAborted();\n await mkdir(parentDir, { recursive: true });\n \n // Extract file content and write to filesystem\n const fileEntry = entry as ExtractedFileItem;\n const content = await fileEntry.getContent('buffer');\n await writeFile(targetPath, content, { mode: entry.mode, signal });\n }\n }\n};\n","// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { Readable } from \"stream\";\nimport { createGzip } from \"zlib\";\nimport { getBuffer, MAX_NAME, MAX_PREFIX } from \"./utils\";\nimport { CompressionTypes, EntryItem, EntryItemContent } from \"./types\";\n\n/**\n * Get the byte length of a string in UTF-8\n * @param str - The string to get the byte length of\n * @returns The byte length of the string\n */\nconst utf8ByteLength = (str: string) => {\n return Buffer.byteLength(str, \"utf8\");\n}\n\n/**\n * Truncate a string to a maximum byte length in UTF-8\n * @param str - The string to truncate\n * @param maxBytes - The maximum byte length\n * @returns The truncated string\n */\nconst truncateUtf8Safe = (str: string, maxBytes: number) => {\n let total = 0;\n let i = 0;\n while (i < str.length) {\n const codePoint = str.codePointAt(i)!;\n const char = String.fromCodePoint(codePoint);\n const charBytes = Buffer.byteLength(char, \"utf8\");\n if (total + charBytes > maxBytes) break;\n total += charBytes;\n i += char.length;\n }\n return str.slice(0, i);\n}\n\n/**\n * Split a path into a name and a prefix\n * @param path - The path to split\n * @returns The name and prefix\n */\nconst splitPath = (path: string) => {\n if (utf8ByteLength(path) <= MAX_NAME) {\n return { prefix: \"\", name: path };\n }\n\n // Split by '/' and find the part that fits in name from the end\n const parts = path.split(\"/\");\n let name = parts.pop() ?? \"\";\n let prefix = parts.join(\"/\");\n\n // Truncate if name exceeds 100 bytes\n if (utf8ByteLength(name) > MAX_NAME) {\n name = truncateUtf8Safe(name, MAX_NAME);\n }\n\n // Truncate if prefix exceeds 155 bytes\n while (utf8ByteLength(prefix) > MAX_PREFIX) {\n prefix = truncateUtf8Safe(prefix, MAX_PREFIX);\n }\n\n return { prefix, name };\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Get octal bytes from a number\n * @param value - The number to get octal bytes from\n * @param length - The length of the octal bytes\n * @returns The octal bytes\n */\nconst getOctalBytes = (value: number, length: number) => {\n const str = value.toString(8).padStart(length - 1, \"0\") + \"\\0\";\n return Buffer.from(str, \"ascii\");\n};\n\n/**\n * Get padded bytes from a buffer\n * @param buffer - The buffer to get padded bytes from\n * @returns The padded bytes\n */\nconst getPaddedBytes = (buffer: Buffer) => {\n const extra = buffer.length % 512;\n if (extra === 0) {\n return buffer;\n } else {\n return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);\n }\n}\n\n/**\n * The terminator bytes\n */\nconst terminatorBytes = Buffer.alloc(1024, 0);\n\n/**\n * Create a tar header\n * @param type - The type of the entry\n * @param path - The path of the entry\n * @param size - The size of the entry\n * @param mode - The mode of the entry\n * @param uname - The user name of the entry\n * @param gname - The group name of the entry\n */\nconst createTarHeader = (\n type: 'file' | 'directory',\n path: string,\n size: number,\n mode: number,\n uname: string,\n gname: string,\n uid: number,\n gid: number,\n date: Date\n) => {\n // Allocate header bytes\n const buffer = Buffer.alloc(512, 0);\n\n // Split path into name and prefix\n const { name, prefix } = splitPath(path);\n\n // Write name, mode, uid, gid, size, mtime, typeflag, prefix, checksum\n buffer.write(name, 0, 100, \"utf8\");\n getOctalBytes(mode & 0o7777, 8).copy(buffer, 100);\n getOctalBytes(uid, 8).copy(buffer, 108);\n getOctalBytes(gid, 8).copy(buffer, 116);\n getOctalBytes(size, 12).copy(buffer, 124);\n getOctalBytes(Math.floor(date.getTime() / 1000), 12).copy(buffer, 136);\n\n // Check sum space\n Buffer.from(\" \", \"ascii\").copy(buffer, 148);\n\n if (type === 'file') {\n buffer.write(\"0\", 156, 1, \"ascii\"); // typeflag (file)\n } else {\n buffer.write(\"5\", 156, 1, \"ascii\"); // typeflag (directory)\n }\n buffer.write(\"ustar\\0\", 257, 6, \"ascii\");\n buffer.write(\"00\", 263, 2, \"ascii\"); // version\n buffer.write(uname, 265, 32, \"utf8\");\n buffer.write(gname, 297, 32, \"utf8\");\n buffer.write(prefix, 345, 155, \"utf8\"); // Path prefix\n\n // Calculate check sum\n let sum = 0;\n for (let i = 0; i < 512; i++) {\n sum += buffer[i];\n }\n getOctalBytes(sum, 8).copy(buffer, 148);\n\n return buffer;\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a tar packer\n * @param entryItemGenerator - The async generator of the entry items\n * @param compressionType - The compression type to use (Default: 'none')\n * @param signal - The abort signal to cancel the tar packer\n * @returns Readable stream of the tar packer\n */\nexport const createTarPacker = (\n entryItemGenerator: AsyncGenerator<EntryItem, void, unknown>,\n compressionType?: CompressionTypes,\n signal?: AbortSignal) => {\n\n // Create async generator function from entry item iterator\n const entryItemIterator = async function*() {\n // Iterate over the entry items\n for await (const entryItem of entryItemGenerator) {\n signal?.throwIfAborted();\n\n switch (entryItem.kind) {\n // Entry is a file\n case 'file': {\n const entryItemContent = entryItem.content;\n // Content is a string or buffer\n if (typeof entryItemContent === 'string' || Buffer.isBuffer(entryItemContent)) {\n // Get content bytes from string or buffer\n const contentBytes = getBuffer(entryItemContent);\n\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n contentBytes.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n // Content bytes to adjust padding space and produce\n const totalPaddedContentBytes = getPaddedBytes(contentBytes);\n yield totalPaddedContentBytes;\n } else {\n // Assert that this is EntryItemContent, not FileItemReader (packer doesn't handle FileItemReader)\n const content = entryItemContent as EntryItemContent;\n \n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n content.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n let position = 0;\n switch (content.kind) {\n // Content is a generator\n case 'generator': {\n for await (const contentBytes of content.generator) {\n signal?.throwIfAborted();\n yield contentBytes;\n position += contentBytes.length;\n }\n break;\n }\n // Content is a readable stream\n case 'readable': {\n for await (const chunk of content.readable) {\n signal?.throwIfAborted();\n const contentBytes = getBuffer(chunk);\n yield contentBytes;\n position += contentBytes.length;\n }\n break;\n }\n }\n\n // Padding space\n if (position % 512 !== 0) {\n signal?.throwIfAborted();\n yield Buffer.alloc(512 - (position % 512), 0);\n }\n }\n break;\n }\n // Entry is a directory\n case 'directory': {\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'directory',\n entryItem.path,\n 0,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date\n );\n yield tarHeaderBytes;\n break;\n }\n }\n }\n\n // Terminates for tar stream\n yield terminatorBytes;\n };\n\n const ct = compressionType ?? 'none';\n\n switch (ct) {\n // No compression\n case 'none': {\n // Create readable stream from entry item iterator\n return Readable.from(entryItemIterator(), { signal });\n }\n // Gzip compression\n case 'gzip': {\n // Create gzip stream\n const gzipStream = createGzip({ level: 9 });\n // Create readable stream from entry item iterator\n const entryItemStream = Readable.from(entryItemIterator(), { signal });\n // Pipe the entry item stream to the gzip stream\n entryItemStream.pipe(gzipStream);\n // Return the gzip stream\n return gzipStream;\n }\n }\n};\n","// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { Readable } from \"stream\";\nimport { createGunzip } from \"zlib\";\nimport { CompressionTypes, ExtractedDirectoryItem, ExtractedEntryItem, ExtractedFileItem } from \"./types\";\nimport { getBuffer } from \"./utils\";\n\n/**\n * Parse octal bytes to number\n * @param buffer - The buffer containing octal bytes\n * @param offset - The offset in the buffer\n * @param length - The length of the octal bytes\n * @returns The parsed number\n */\nconst parseOctalBytes = (buffer: Buffer, offset: number, length: number): number => {\n const str = buffer.subarray(offset, offset + length).toString('ascii').replace(/\\0/g, '').trim();\n return str ? parseInt(str, 8) : 0;\n};\n\n/**\n * Parse string from buffer\n * @param buffer - The buffer containing the string\n * @param offset - The offset in the buffer\n * @param length - The length of the string\n * @returns The parsed string\n */\nconst parseString = (buffer: Buffer, offset: number, length: number): string => {\n return buffer.subarray(offset, offset + length).toString('utf8').replace(/\\0/g, '').trim();\n};\n\n/**\n * Read exact number of bytes from stream\n * @param iterator - The async iterator\n * @param size - The number of bytes to read\n * @param signal - The abort signal\n * @returns The buffer containing the read bytes\n */\nconst readExactBytes = async (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined): Promise<Buffer | undefined> => {\n\n const chunks: Buffer[] = [];\n let totalRead = 0;\n\n while (totalRead < size) {\n signal?.throwIfAborted();\n \n const { value, done } = await iterator.next();\n if (done) {\n if (totalRead === 0) {\n return undefined; // No data at all\n } else {\n throw new Error(`Unexpected end of stream: expected ${size} bytes, got ${totalRead} bytes`);\n }\n }\n\n const chunk = getBuffer(value);\n const needed = size - totalRead;\n \n if (chunk.length <= needed) {\n chunks.push(chunk);\n totalRead += chunk.length;\n } else {\n // We read more than needed, split the chunk\n chunks.push(chunk.subarray(0, needed));\n // Put back the remaining data\n await iterator.return?.(chunk.subarray(needed));\n totalRead = size;\n }\n }\n\n return Buffer.concat(chunks, size);\n};\n\n/**\n * Skip exact number of bytes from stream without buffering\n * @param iterator - The async iterator\n * @param size - The number of bytes to skip\n * @param signal - The abort signal\n */\nconst skipExactBytes = async (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined): Promise<void> => {\n\n let totalSkipped = 0;\n\n while (totalSkipped < size) {\n signal?.throwIfAborted();\n \n const { value, done } = await iterator.next();\n if (done) {\n throw new Error(`Unexpected end of stream: expected to skip ${size} bytes, skipped ${totalSkipped} bytes`);\n }\n\n const chunk = getBuffer(value);\n const needed = size - totalSkipped;\n \n if (chunk.length <= needed) {\n totalSkipped += chunk.length;\n } else {\n // We read more than needed, put back the remaining data\n await iterator.return?.(chunk.subarray(needed));\n totalSkipped = size;\n }\n }\n};\n\n/**\n * Iterator will be skip padding bytes.\n * @param iterator - Async iterator\n * @param contentSize - Total content size to calculate boundary position\n * @param signal - Abort signal\n */\nconst skipPaddingBytesTo512Boundary = async (\n iterator: AsyncIterator<string | Buffer>,\n contentSize: number,\n signal: AbortSignal | undefined) => {\n // Skip padding bytes to next 512-byte boundary\n const padding = (512 - (contentSize % 512)) % 512;\n if (padding > 0) {\n await skipExactBytes(iterator, padding, signal);\n }\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Tar file/directory entry item.\n */\ninterface EntryItemInfo {\n readonly kind: 'file' | 'directory';\n readonly path: string;\n readonly size: number;\n readonly mode: number;\n readonly uid: number;\n readonly gid: number;\n readonly mtime: Date;\n readonly uname: string;\n readonly gname: string;\n readonly checksum: number;\n /**\n * This entry (file) item is consumed.\n */\n consumed: boolean;\n}\n\n/**\n * Parse tar header from buffer\n * @param buffer - The buffer containing the tar header\n * @returns The parsed entry information or null if end of archive\n */\nconst parseTarHeader = (buffer: Buffer): EntryItemInfo | undefined => {\n // Check if this is the end of archive (all zeros)\n if (buffer.every(b => b === 0)) {\n return undefined;\n }\n\n // Parse header fields\n const name = parseString(buffer, 0, 100);\n const mode = parseOctalBytes(buffer, 100, 8);\n const uid = parseOctalBytes(buffer, 108, 8);\n const gid = parseOctalBytes(buffer, 116, 8);\n const size = parseOctalBytes(buffer, 124, 12);\n const mtime = new Date(parseOctalBytes(buffer, 136, 12) * 1000);\n const checksum = parseOctalBytes(buffer, 148, 8);\n const typeflag = parseString(buffer, 156, 1);\n const magic = parseString(buffer, 257, 6);\n const uname = parseString(buffer, 265, 32);\n const gname = parseString(buffer, 297, 32);\n const prefix = parseString(buffer, 345, 155);\n\n // Verify magic (should be \"ustar\" for POSIX tar)\n if (magic !== 'ustar') {\n throw new Error(`Invalid tar format: magic=\"${magic}\"`);\n }\n\n // Calculate checksum\n let calculatedSum = 0;\n for (let i = 0; i < 512; i++) {\n if (i >= 148 && i < 156) {\n calculatedSum += 32; // Space character\n } else {\n calculatedSum += buffer[i];\n }\n }\n\n if (calculatedSum !== checksum) {\n throw new Error(`Invalid checksum: expected ${checksum}, got ${calculatedSum}`);\n }\n\n // Construct full path and remove trailing slash for directories\n let path = prefix ? `${prefix}/${name}` : name;\n if (path.endsWith('/')) {\n path = path.slice(0, -1);\n }\n\n // Determine type\n const kind = typeflag === '5' ? 'directory' : 'file';\n\n return {\n kind,\n path,\n size,\n mode,\n uid,\n gid,\n mtime,\n uname: uname || uid.toString(),\n gname: gname || gid.toString(),\n checksum,\n consumed: false\n };\n};\n\n/**\n * Create a buffered async iterator that allows returning data\n */\nconst createBufferedAsyncIterator = (\n iterable: AsyncIterable<string | Buffer>,\n signal: AbortSignal | undefined\n): AsyncIterator<string | Buffer> => {\n const buffer: (string | Buffer)[] = [];\n const iterator = iterable[Symbol.asyncIterator]();\n return {\n next: async () => {\n signal?.throwIfAborted();\n if (buffer.length > 0) {\n return { value: buffer.shift()!, done: false };\n }\n return iterator.next();\n },\n return: async (value?: string | Buffer) => {\n if (value !== undefined) {\n buffer.unshift(value);\n }\n return { value: undefined, done: false };\n }\n };\n};\n\n/**\n * Create a readable stream from an async iterator with size limit\n * @param iterator - The async iterator to read from\n * @param size - The number of bytes to read\n * @param signal - The abort signal\n * @returns Readable stream\n */\nconst createReadableFromIterator = (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined,\n consumedRef: { consumed: boolean }\n): Readable => {\n const generator = async function*() {\n let remainingBytes = size;\n \n while (remainingBytes > 0) {\n signal?.throwIfAborted();\n\n const { value, done } = await iterator.next();\n if (done) {\n throw new Error(`Unexpected end of stream: expected ${size} bytes, remaining ${remainingBytes} bytes`);\n }\n\n const chunk = getBuffer(value);\n if (chunk.length <= remainingBytes) {\n remainingBytes -= chunk.length;\n yield chunk;\n } else {\n // We read more than needed\n const needed = chunk.subarray(0, remainingBytes);\n const excess = chunk.subarray(remainingBytes);\n remainingBytes = 0;\n \n // Return excess data to the iterator\n await iterator.return?.(excess);\n yield needed;\n break;\n }\n }\n\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, size, signal);\n\n // Finished to consume\n consumedRef.consumed = true;\n };\n\n return Readable.from(generator(), { signal });\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a tar extractor\n * @param readable - The readable stream containing tar data\n * @param compressionType - The compression type (default: 'none')\n * @param signal - The abort signal\n * @returns Async generator of entry items\n */\nexport const createTarExtractor = async function* (\n readable: Readable,\n compressionType?: CompressionTypes,\n signal?: AbortSignal): AsyncGenerator<ExtractedEntryItem, void, unknown> {\n\n const ct = compressionType ?? 'none';\n\n // Apply decompression if needed\n let inputStream: Readable;\n switch (ct) {\n case 'gzip':\n const gunzip = createGunzip();\n readable.pipe(gunzip);\n inputStream = gunzip;\n break;\n case 'none':\n default:\n inputStream = readable;\n break;\n }\n\n // Get async iterator from the stream\n const iterator = createBufferedAsyncIterator(inputStream, signal);\n\n // Last entry item\n let header: EntryItemInfo | undefined;\n\n // For each tar items\n while (true) {\n signal?.throwIfAborted();\n\n // Did not consume last file item yielding?\n if (header?.kind === 'file' && !header.consumed) {\n // Have to skip the file contents and boundary\n\n // Skip entire contents without buffering\n await skipExactBytes(iterator, header.size, signal);\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, header.size, signal);\n\n // Mark consumed\n header.consumed = true;\n }\n\n // Read header (512 bytes)\n let headerBuffer: Buffer | undefined;\n try {\n headerBuffer = await readExactBytes(iterator, 512, signal);\n } catch (error) {\n if (error instanceof Error && error.message.includes('Unexpected end of stream')) {\n throw new Error('Invalid tar format: incomplete header');\n }\n throw error;\n }\n \n if (headerBuffer === undefined) {\n break; // End of stream\n }\n\n // Parse header\n header = parseTarHeader(headerBuffer);\n if (!header) {\n // Check for second terminator block\n const secondBlock = await readExactBytes(iterator, 512, signal);\n if (secondBlock === undefined || secondBlock.every(b => b === 0)) {\n break; // Proper end of archive\n }\n throw new Error('Invalid tar format: expected terminator block');\n }\n\n if (header.kind === 'directory') {\n // Yield directory entry\n yield {\n kind: 'directory',\n path: header.path,\n mode: header.mode,\n uid: header.uid,\n gid: header.gid,\n uname: header.uname,\n gname: header.gname,\n date: header.mtime\n } as ExtractedDirectoryItem;\n } else {\n // Capture current header to avoid closure issues\n const currentHeader = header;\n \n // Yield file entry with lazy getContent\n yield {\n kind: 'file',\n path: currentHeader.path,\n mode: currentHeader.mode,\n uid: currentHeader.uid,\n gid: currentHeader.gid,\n uname: currentHeader.uname,\n gname: currentHeader.gname,\n date: currentHeader.mtime,\n getContent: async (type: any) => {\n // Is multiple called\n if (currentHeader.consumed) {\n throw new Error('Content has already been consumed. Multiple calls to getContent are not supported.');\n }\n\n switch (type) {\n // For string\n case 'string': {\n // Read entire contents just now\n const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);\n if (dataBuffer === undefined) {\n throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);\n }\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);\n currentHeader.consumed = true;\n return dataBuffer.toString('utf8');\n }\n // For buffer\n case 'buffer': {\n // Read entire contents just now\n const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);\n if (dataBuffer === undefined) {\n throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);\n }\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);\n currentHeader.consumed = true;\n return dataBuffer;\n }\n // For Readble stream\n case 'readable': {\n // Get Readble object (to delegate)\n const readable = createReadableFromIterator(iterator, currentHeader.size, signal, currentHeader);\n return readable;\n }\n default:\n throw new Error(`Unsupported content type: ${type}`);\n }\n }\n } as ExtractedFileItem;\n }\n }\n};\n"],"names":["readable"],"mappings":";;;;;;AAaO,MAAM,WAAW;AACjB,MAAM,aAAa;AAS1B,MAAM,WAAW,CAAC,eAAmC,aAAqB,gBAA0C;AAClH,SAAO,kBAAkB,gBAAgB,QAAQ,YAAY,aAAa;AAC5E;AAOO,MAAM,YAAY,CAAC,SAA0B;AAClD,SAAO,OAAO,SAAS,IAAI,IAAI,OAAO,OAAO,KAAK,MAAM,MAAM;AAChE;AAaO,MAAM,sBAAsB,OACjC,MACA,aACA,SACA,WAC2B;AAC3B,QAAM,KAAK,eAAe;AAE1B,MAAI,OAAO,UAAU,SAAS,eAAe;AAC3C,YAAQ,eAAA;AACR,UAAM,QAAQ,MAAM,KAAK,QAAQ,aAAa;AAC9C,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC,OAAO;AACL,UAAM,OAAO,SAAS,QAAQ;AAC9B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAClC,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC;AACF;AAUO,MAAM,iBAAiB,OAC5B,MACA,SACA,SACA,WACsB;AACtB,UAAQ,eAAA;AAER,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,IACpC;AAAA,EAAA;AAEJ;AAUO,MAAM,yBAAyB,OACpC,MACA,UACA,SACA,WACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,UAAU;AAClC,cAAQ,eAAA;AACR,YAAM,SAAS,UAAU,KAAK;AAC9B,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA,UAAU,SAAS,KAAK,QAAQ,EAAE,QAAQ;AAAA,MAAA;AAAA,IAC5C;AAAA,EAEJ,OAAO;AAEL,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AACF;AAUO,MAAM,0BAA0B,OACrC,MACA,WACA,SACA,WACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,WAAW;AACnC,cAAQ,eAAA;AACR,YAAM,SAAS,UAAU,KAAK;AAC9B,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA,UAAU,SAAS,KAAK,QAAQ,EAAE,QAAQ;AAAA,MAAA;AAAA,IAC5C;AAAA,EAEJ,OAAO;AAEL,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AACF;AAWO,MAAM,qBAAqB,OAChC,MACA,UACA,aACA,SACA,WACsB;AACtB,QAAM,KAAK,eAAe;AAG1B,UAAQ,eAAA;AACR,QAAM,QAAQ,MAAM,KAAK,QAAQ;AAEjC,QAAM,SAAS,iBAAiB,UAAU,EAAE,QAAQ;AAEpD,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,OAAO;AAC5D,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,QAAQ;AAE7D,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AAGpD,SAAO,MAAM,uBAAuB,MAAM,QAAQ;AAAA,IAChD,QAAQ,MAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,EAAA,GACjD,MAAM;AACX;AAWO,MAAM,oBAAoB,OAAO,QAAkB,MAAc,WAAyB;AAC/F,QAAM,SAAS,kBAAkB,MAAM,EAAE,QAAQ;AACjD,QAAM,SAAS,QAAQ,QAAQ,EAAE,QAAQ;AAC3C;AAYO,MAAM,2BAA2B,iBACtC,SACA,eACA,aACA,QAC0C;AAC1C,QAAM,KAAK,eAAe;AAE1B,aAAW,gBAAgB,eAAe;AACxC,YAAQ,eAAA;AAER,UAAM,SAAS,KAAK,SAAS,YAAY;AAEzC,QAAI;AACF,cAAQ,eAAA;AACR,YAAM,QAAQ,MAAM,KAAK,MAAM;AAE/B,UAAI,MAAM,eAAe;AAEvB,cAAM,MAAM,oBAAoB,cAAc,IAAI;AAAA,UAChD,eAAe;AAAA,QAAA,GACd,MAAM;AAAA,MACX,WAAW,MAAM,UAAU;AAEzB,cAAM,MAAM,mBAAmB,cAAc,QAAQ,IAAI,QAAW,MAAM;AAAA,MAC5E;AAAA,IACF,SAAS,OAAO;AAEd,cAAQ,KAAK,6BAA6B,MAAM,KAAK,KAAK;AAC1D;AAAA,IACF;AAAA,EACF;AACF;AASO,MAAM,YAAY,OACvB,UACA,UACA,WACkB;AAClB,mBAAiB,SAAS,UAAU;AAClC,YAAQ,eAAA;AAER,UAAM,aAAa,KAAK,UAAU,MAAM,IAAI;AAE5C,QAAI,MAAM,SAAS,aAAa;AAE9B,UAAI;AACF,gBAAQ,eAAA;AACR,cAAM,MAAM,YAAY,EAAE,WAAW,MAAM,MAAM,MAAM,MAAM;AAAA,MAC/D,SAAS,OAAO;AAEd,YAAK,MAAc,SAAS,UAAU;AACpC,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF,WAAW,MAAM,SAAS,QAAQ;AAEhC,YAAM,YAAY,QAAQ,UAAU;AACpC,cAAQ,eAAA;AACR,YAAM,MAAM,WAAW,EAAE,WAAW,MAAM;AAG1C,YAAM,YAAY;AAClB,YAAM,UAAU,MAAM,UAAU,WAAW,QAAQ;AACnD,YAAM,UAAU,YAAY,SAAS,EAAE,MAAM,MAAM,MAAM,QAAQ;AAAA,IACnE;AAAA,EACF;AACF;ACrWA,MAAM,iBAAiB,CAAC,QAAgB;AACtC,SAAO,OAAO,WAAW,KAAK,MAAM;AACtC;AAQA,MAAM,mBAAmB,CAAC,KAAa,aAAqB;AAC1D,MAAI,QAAQ;AACZ,MAAI,IAAI;AACR,SAAO,IAAI,IAAI,QAAQ;AACrB,UAAM,YAAY,IAAI,YAAY,CAAC;AACnC,UAAM,OAAO,OAAO,cAAc,SAAS;AAC3C,UAAM,YAAY,OAAO,WAAW,MAAM,MAAM;AAChD,QAAI,QAAQ,YAAY,SAAU;AAClC,aAAS;AACT,SAAK,KAAK;AAAA,EACZ;AACA,SAAO,IAAI,MAAM,GAAG,CAAC;AACvB;AAOA,MAAM,YAAY,CAAC,SAAiB;AAClC,MAAI,eAAe,IAAI,KAAK,UAAU;AACpC,WAAO,EAAE,QAAQ,IAAI,MAAM,KAAA;AAAA,EAC7B;AAGA,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,OAAO,MAAM,IAAA,KAAS;AAC1B,MAAI,SAAS,MAAM,KAAK,GAAG;AAG3B,MAAI,eAAe,IAAI,IAAI,UAAU;AACnC,WAAO,iBAAiB,MAAM,QAAQ;AAAA,EACxC;AAGA,SAAO,eAAe,MAAM,IAAI,YAAY;AAC1C,aAAS,iBAAiB,QAAQ,UAAU;AAAA,EAC9C;AAEA,SAAO,EAAE,QAAQ,KAAA;AACnB;AAUA,MAAM,gBAAgB,CAAC,OAAe,WAAmB;AACvD,QAAM,MAAM,MAAM,SAAS,CAAC,EAAE,SAAS,SAAS,GAAG,GAAG,IAAI;AAC1D,SAAO,OAAO,KAAK,KAAK,OAAO;AACjC;AAOA,MAAM,iBAAiB,CAAC,WAAmB;AACzC,QAAM,QAAQ,OAAO,SAAS;AAC9B,MAAI,UAAU,GAAG;AACf,WAAO;AAAA,EACT,OAAO;AACL,WAAO,OAAO,OAAO,CAAC,QAAQ,OAAO,MAAM,MAAM,OAAO,CAAC,CAAC,CAAC;AAAA,EAC7D;AACF;AAKA,MAAM,kBAAkB,OAAO,MAAM,MAAM,CAAC;AAW5C,MAAM,kBAAkB,CACtB,MACA,MACA,MACA,MACA,OACA,OACA,KACA,KACA,SACG;AAEH,QAAM,SAAS,OAAO,MAAM,KAAK,CAAC;AAGlC,QAAM,EAAE,MAAM,WAAW,UAAU,IAAI;AAGvC,SAAO,MAAM,MAAM,GAAG,KAAK,MAAM;AACjC,gBAAc,OAAO,MAAQ,CAAC,EAAE,KAAK,QAAQ,GAAG;AAChD,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,MAAM,EAAE,EAAE,KAAK,QAAQ,GAAG;AACxC,gBAAc,KAAK,MAAM,KAAK,QAAA,IAAY,GAAI,GAAG,EAAE,EAAE,KAAK,QAAQ,GAAG;AAGrE,SAAO,KAAK,YAAY,OAAO,EAAE,KAAK,QAAQ,GAAG;AAEjD,MAAI,SAAS,QAAQ;AACnB,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC,OAAO;AACL,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC;AACA,SAAO,MAAM,WAAW,KAAK,GAAG,OAAO;AACvC,SAAO,MAAM,MAAM,KAAK,GAAG,OAAO;AAClC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,QAAQ,KAAK,KAAK,MAAM;AAGrC,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,WAAO,OAAO,CAAC;AAAA,EACjB;AACA,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AAEtC,SAAO;AACT;AAWO,MAAM,kBAAkB,CAC7B,oBACA,iBACA,WAAyB;AAGzB,QAAM,oBAAoB,mBAAkB;AAE1C,qBAAiB,aAAa,oBAAoB;AAChD,cAAQ,eAAA;AAER,cAAQ,UAAU,MAAA;AAAA;AAAA,QAEhB,KAAK,QAAQ;AACX,gBAAM,mBAAmB,UAAU;AAEnC,cAAI,OAAO,qBAAqB,YAAY,OAAO,SAAS,gBAAgB,GAAG;AAE7E,kBAAM,eAAe,UAAU,gBAAgB;AAG/C,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,aAAa;AAAA,cACb,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAGN,kBAAM,0BAA0B,eAAe,YAAY;AAC3D,kBAAM;AAAA,UACR,OAAO;AAEL,kBAAM,UAAU;AAGhB,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,QAAQ;AAAA,cACR,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAEN,gBAAI,WAAW;AACf,oBAAQ,QAAQ,MAAA;AAAA;AAAA,cAEd,KAAK,aAAa;AAChB,iCAAiB,gBAAgB,QAAQ,WAAW;AAClD,0BAAQ,eAAA;AACR,wBAAM;AACN,8BAAY,aAAa;AAAA,gBAC3B;AACA;AAAA,cACF;AAAA;AAAA,cAEA,KAAK,YAAY;AACf,iCAAiB,SAAS,QAAQ,UAAU;AAC1C,0BAAQ,eAAA;AACR,wBAAM,eAAe,UAAU,KAAK;AACpC,wBAAM;AACN,8BAAY,aAAa;AAAA,gBAC3B;AACA;AAAA,cACF;AAAA,YAAA;AAIF,gBAAI,WAAW,QAAQ,GAAG;AACxB,sBAAQ,eAAA;AACR,oBAAM,OAAO,MAAM,MAAO,WAAW,KAAM,CAAC;AAAA,YAC9C;AAAA,UACF;AACA;AAAA,QACF;AAAA;AAAA,QAEA,KAAK,aAAa;AAEhB,gBAAM,iBAAiB;AAAA,YACrB;AAAA,YACA,UAAU;AAAA,YACV;AAAA,YACA,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,UAAA;AAEZ,gBAAM;AACN;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM;AAAA,EACR;AAEA,QAAM,KAAK,mBAAmB;AAE9B,UAAQ,IAAA;AAAA;AAAA,IAEN,KAAK,QAAQ;AAEX,aAAO,SAAS,KAAK,kBAAA,GAAqB,EAAE,QAAQ;AAAA,IACtD;AAAA;AAAA,IAEA,KAAK,QAAQ;AAEX,YAAM,aAAa,WAAW,EAAE,OAAO,GAAG;AAE1C,YAAM,kBAAkB,SAAS,KAAK,qBAAqB,EAAE,QAAQ;AAErE,sBAAgB,KAAK,UAAU;AAE/B,aAAO;AAAA,IACT;AAAA,EAAA;AAEJ;ACrRA,MAAM,kBAAkB,CAAC,QAAgB,QAAgB,WAA2B;AAClF,QAAM,MAAM,OAAO,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,OAAO,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAA;AAC1F,SAAO,MAAM,SAAS,KAAK,CAAC,IAAI;AAClC;AASA,MAAM,cAAc,CAAC,QAAgB,QAAgB,WAA2B;AAC9E,SAAO,OAAO,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,MAAM,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAA;AACtF;AASA,MAAM,iBAAiB,OACrB,UACA,MACA,WAAiE;AAEjE,QAAM,SAAmB,CAAA;AACzB,MAAI,YAAY;AAEhB,SAAO,YAAY,MAAM;AACvB,YAAQ,eAAA;AAER,UAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,QAAI,MAAM;AACR,UAAI,cAAc,GAAG;AACnB,eAAO;AAAA,MACT,OAAO;AACL,cAAM,IAAI,MAAM,sCAAsC,IAAI,eAAe,SAAS,QAAQ;AAAA,MAC5F;AAAA,IACF;AAEA,UAAM,QAAQ,UAAU,KAAK;AAC7B,UAAM,SAAS,OAAO;AAEtB,QAAI,MAAM,UAAU,QAAQ;AAC1B,aAAO,KAAK,KAAK;AACjB,mBAAa,MAAM;AAAA,IACrB,OAAO;AAEL,aAAO,KAAK,MAAM,SAAS,GAAG,MAAM,CAAC;AAErC,YAAM,SAAS,SAAS,MAAM,SAAS,MAAM,CAAC;AAC9C,kBAAY;AAAA,IACd;AAAA,EACF;AAEA,SAAO,OAAO,OAAO,QAAQ,IAAI;AACnC;AAQA,MAAM,iBAAiB,OACrB,UACA,MACA,WAAmD;AAEnD,MAAI,eAAe;AAEnB,SAAO,eAAe,MAAM;AAC1B,YAAQ,eAAA;AAER,UAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,QAAI,MAAM;AACR,YAAM,IAAI,MAAM,8CAA8C,IAAI,mBAAmB,YAAY,QAAQ;AAAA,IAC3G;AAEA,UAAM,QAAQ,UAAU,KAAK;AAC7B,UAAM,SAAS,OAAO;AAEtB,QAAI,MAAM,UAAU,QAAQ;AAC1B,sBAAgB,MAAM;AAAA,IACxB,OAAO;AAEL,YAAM,SAAS,SAAS,MAAM,SAAS,MAAM,CAAC;AAC9C,qBAAe;AAAA,IACjB;AAAA,EACF;AACF;AAQA,MAAM,gCAAgC,OACpC,UACA,aACA,WAAoC;AAEpC,QAAM,WAAW,MAAO,cAAc,OAAQ;AAC9C,MAAI,UAAU,GAAG;AACf,UAAM,eAAe,UAAU,SAAS,MAAM;AAAA,EAChD;AACF;AA6BA,MAAM,iBAAiB,CAAC,WAA8C;AAEpE,MAAI,OAAO,MAAM,CAAA,MAAK,MAAM,CAAC,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,QAAM,OAAO,YAAY,QAAQ,GAAG,GAAG;AACvC,QAAM,OAAO,gBAAgB,QAAQ,KAAK,CAAC;AAC3C,QAAM,MAAM,gBAAgB,QAAQ,KAAK,CAAC;AAC1C,QAAM,MAAM,gBAAgB,QAAQ,KAAK,CAAC;AAC1C,QAAM,OAAO,gBAAgB,QAAQ,KAAK,EAAE;AAC5C,QAAM,QAAQ,IAAI,KAAK,gBAAgB,QAAQ,KAAK,EAAE,IAAI,GAAI;AAC9D,QAAM,WAAW,gBAAgB,QAAQ,KAAK,CAAC;AAC/C,QAAM,WAAW,YAAY,QAAQ,KAAK,CAAC;AAC3C,QAAM,QAAQ,YAAY,QAAQ,KAAK,CAAC;AACxC,QAAM,QAAQ,YAAY,QAAQ,KAAK,EAAE;AACzC,QAAM,QAAQ,YAAY,QAAQ,KAAK,EAAE;AACzC,QAAM,SAAS,YAAY,QAAQ,KAAK,GAAG;AAG3C,MAAI,UAAU,SAAS;AACrB,UAAM,IAAI,MAAM,8BAA8B,KAAK,GAAG;AAAA,EACxD;AAGA,MAAI,gBAAgB;AACpB,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,QAAI,KAAK,OAAO,IAAI,KAAK;AACvB,uBAAiB;AAAA,IACnB,OAAO;AACL,uBAAiB,OAAO,CAAC;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI,kBAAkB,UAAU;AAC9B,UAAM,IAAI,MAAM,8BAA8B,QAAQ,SAAS,aAAa,EAAE;AAAA,EAChF;AAGA,MAAI,OAAO,SAAS,GAAG,MAAM,IAAI,IAAI,KAAK;AAC1C,MAAI,KAAK,SAAS,GAAG,GAAG;AACtB,WAAO,KAAK,MAAM,GAAG,EAAE;AAAA,EACzB;AAGA,QAAM,OAAO,aAAa,MAAM,cAAc;AAE9C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,SAAS,IAAI,SAAA;AAAA,IACpB,OAAO,SAAS,IAAI,SAAA;AAAA,IACpB;AAAA,IACA,UAAU;AAAA,EAAA;AAEd;AAKA,MAAM,8BAA8B,CAClC,UACA,WACmC;AACnC,QAAM,SAA8B,CAAA;AACpC,QAAM,WAAW,SAAS,OAAO,aAAa,EAAA;AAC9C,SAAO;AAAA,IACL,MAAM,YAAY;AAChB,cAAQ,eAAA;AACR,UAAI,OAAO,SAAS,GAAG;AACrB,eAAO,EAAE,OAAO,OAAO,MAAA,GAAU,MAAM,MAAA;AAAA,MACzC;AACA,aAAO,SAAS,KAAA;AAAA,IAClB;AAAA,IACA,QAAQ,OAAO,UAA4B;AACzC,UAAI,UAAU,QAAW;AACvB,eAAO,QAAQ,KAAK;AAAA,MACtB;AACA,aAAO,EAAE,OAAO,QAAW,MAAM,MAAA;AAAA,IACnC;AAAA,EAAA;AAEJ;AASA,MAAM,6BAA6B,CACjC,UACA,MACA,QACA,gBACa;AACb,QAAM,YAAY,mBAAkB;AAClC,QAAI,iBAAiB;AAErB,WAAO,iBAAiB,GAAG;AACzB,cAAQ,eAAA;AAER,YAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,UAAI,MAAM;AACR,cAAM,IAAI,MAAM,sCAAsC,IAAI,qBAAqB,cAAc,QAAQ;AAAA,MACvG;AAEA,YAAM,QAAQ,UAAU,KAAK;AAC7B,UAAI,MAAM,UAAU,gBAAgB;AAClC,0BAAkB,MAAM;AACxB,cAAM;AAAA,MACR,OAAO;AAEL,cAAM,SAAS,MAAM,SAAS,GAAG,cAAc;AAC/C,cAAM,SAAS,MAAM,SAAS,cAAc;AAC5C,yBAAiB;AAGjB,cAAM,SAAS,SAAS,MAAM;AAC9B,cAAM;AACN;AAAA,MACF;AAAA,IACF;AAGA,UAAM,8BAA8B,UAAU,MAAM,MAAM;AAG1D,gBAAY,WAAW;AAAA,EACzB;AAEA,SAAO,SAAS,KAAK,UAAA,GAAa,EAAE,QAAQ;AAC9C;AAWO,MAAM,qBAAqB,iBAChC,UACA,iBACA,QAAyE;AAEzE,QAAM,KAAK,mBAAmB;AAG9B,MAAI;AACJ,UAAQ,IAAA;AAAA,IACN,KAAK;AACH,YAAM,SAAS,aAAA;AACf,eAAS,KAAK,MAAM;AACpB,oBAAc;AACd;AAAA,IACF,KAAK;AAAA,IACL;AACE,oBAAc;AACd;AAAA,EAAA;AAIJ,QAAM,WAAW,4BAA4B,aAAa,MAAM;AAGhE,MAAI;AAGJ,SAAO,MAAM;AACX,YAAQ,eAAA;AAGR,QAAI,QAAQ,SAAS,UAAU,CAAC,OAAO,UAAU;AAI/C,YAAM,eAAe,UAAU,OAAO,MAAM,MAAM;AAElD,YAAM,8BAA8B,UAAU,OAAO,MAAM,MAAM;AAGjE,aAAO,WAAW;AAAA,IACpB;AAGA,QAAI;AACJ,QAAI;AACF,qBAAe,MAAM,eAAe,UAAU,KAAK,MAAM;AAAA,IAC3D,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,0BAA0B,GAAG;AAChF,cAAM,IAAI,MAAM,uCAAuC;AAAA,MACzD;AACA,YAAM;AAAA,IACR;AAEA,QAAI,iBAAiB,QAAW;AAC9B;AAAA,IACF;AAGA,aAAS,eAAe,YAAY;AACpC,QAAI,CAAC,QAAQ;AAEX,YAAM,cAAc,MAAM,eAAe,UAAU,KAAK,MAAM;AAC9D,UAAI,gBAAgB,UAAa,YAAY,MAAM,CAAA,MAAK,MAAM,CAAC,GAAG;AAChE;AAAA,MACF;AACA,YAAM,IAAI,MAAM,+CAA+C;AAAA,IACjE;AAEA,QAAI,OAAO,SAAS,aAAa;AAE/B,YAAM;AAAA,QACJ,MAAM;AAAA,QACN,MAAM,OAAO;AAAA,QACb,MAAM,OAAO;AAAA,QACb,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,OAAO,OAAO;AAAA,QACd,OAAO,OAAO;AAAA,QACd,MAAM,OAAO;AAAA,MAAA;AAAA,IAEjB,OAAO;AAEL,YAAM,gBAAgB;AAGtB,YAAM;AAAA,QACJ,MAAM;AAAA,QACN,MAAM,cAAc;AAAA,QACpB,MAAM,cAAc;AAAA,QACpB,KAAK,cAAc;AAAA,QACnB,KAAK,cAAc;AAAA,QACnB,OAAO,cAAc;AAAA,QACrB,OAAO,cAAc;AAAA,QACrB,MAAM,cAAc;AAAA,QACpB,YAAY,OAAO,SAAc;AAE/B,cAAI,cAAc,UAAU;AAC1B,kBAAM,IAAI,MAAM,oFAAoF;AAAA,UACtG;AAEA,kBAAQ,MAAA;AAAA;AAAA,YAEN,KAAK,UAAU;AAEb,oBAAM,aAAa,MAAM,eAAe,UAAU,cAAc,MAAM,MAAM;AAC5E,kBAAI,eAAe,QAAW;AAC5B,sBAAM,IAAI,MAAM,wDAAwD,cAAc,IAAI,EAAE;AAAA,cAC9F;AAEA,oBAAM,8BAA8B,UAAU,cAAc,MAAM,MAAM;AACxE,4BAAc,WAAW;AACzB,qBAAO,WAAW,SAAS,MAAM;AAAA,YACnC;AAAA;AAAA,YAEA,KAAK,UAAU;AAEb,oBAAM,aAAa,MAAM,eAAe,UAAU,cAAc,MAAM,MAAM;AAC5E,kBAAI,eAAe,QAAW;AAC5B,sBAAM,IAAI,MAAM,wDAAwD,cAAc,IAAI,EAAE;AAAA,cAC9F;AAEA,oBAAM,8BAA8B,UAAU,cAAc,MAAM,MAAM;AACxE,4BAAc,WAAW;AACzB,qBAAO;AAAA,YACT;AAAA;AAAA,YAEA,KAAK,YAAY;AAEf,oBAAMA,YAAW,2BAA2B,UAAU,cAAc,MAAM,QAAQ,aAAa;AAC/F,qBAAOA;AAAAA,YACT;AAAA,YACA;AACE,oBAAM,IAAI,MAAM,6BAA6B,IAAI,EAAE;AAAA,UAAA;AAAA,QAEzD;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AACF;"}
|
1
|
+
{"version":3,"file":"index.js","sources":["../src/utils.ts","../src/packer.ts","../src/extractor.ts"],"sourcesContent":["// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { createReadStream, createWriteStream } from \"fs\";\nimport { stat, mkdir, writeFile, readdir } from \"fs/promises\";\nimport { Readable } from \"stream\";\nimport { pipeline } from \"stream/promises\";\nimport { dirname, join } from \"path\";\nimport { CreateItemOptions, CreateReadableFileItemOptions, FileItem, DirectoryItem, ReflectStats, CreateDirectoryItemOptions, EntryItem, ExtractedEntryItem, ExtractedFileItem } from \"./types\";\n\n// Tar specification: name max 100 bytes, prefix max 155 bytes\nexport const MAX_NAME = 100;\nexport const MAX_PREFIX = 155;\n\n/**\n * Get the user/group name from the candidate name or ID\n * @param candidateName - The candidate user/group name\n * @param candidateId - The candidate user/group ID\n * @param reflectStat - Whether to reflect the stat (all, exceptName, none)\n * @returns The user/group name\n */\nconst getUName = (candidateName: string | undefined, candidateId: number, reflectStat: ReflectStats | undefined) => {\n return candidateName ?? (reflectStat === 'all' ? candidateId.toString() : 'root');\n}\n\n/**\n * Get a buffer from the string or Buffer\n * @param data - The data to get a buffer from\n * @returns A buffer\n */\nexport const getBuffer = (data: Buffer | string) => {\n return Buffer.isBuffer(data) ? data : Buffer.from(data, 'utf8');\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a DirectoryItem\n * @param path - The path to the directory in the tar archive\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'none')\n * @param options - Metadata for the directory including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A DirectoryItem\n * @remarks When reflectStat is 'all' or 'exceptName', `options.directoryPath` must be provided.\n */\nexport const createDirectoryItem = async (\n path: string,\n reflectStat?: ReflectStats,\n options?: CreateDirectoryItemOptions,\n signal?: AbortSignal\n): Promise<DirectoryItem> => {\n const rs = reflectStat ?? 'none';\n\n if (rs !== 'none' && options?.directoryPath) {\n signal?.throwIfAborted();\n const stats = await stat(options.directoryPath);\n const mode = options?.mode ?? stats.mode;\n const uid = options?.uid ?? stats.uid;\n const gid = options?.gid ?? stats.gid;\n const date = options?.date ?? stats.mtime;\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n } else {\n const mode = options?.mode ?? 0o755;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n const uname = getUName(options?.uname, undefined, rs);\n const gname = getUName(options?.gname, undefined, rs);\n return {\n kind: 'directory',\n path, mode, uname, gname, uid, gid, date,\n };\n }\n};\n\n/**\n * Create a FileItem from content data directly\n * @param path - The path to the file in the tar archive\n * @param content - Content data\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createFileItem = async (\n path: string,\n content: string | Buffer,\n options?: CreateItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n signal?.throwIfAborted();\n \n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content\n };\n};\n\n/**\n * Create a FileItem from a Readable stream\n * @param path - The path to the file in the tar archive\n * @param readable - The readable stream\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createReadableFileItem = async (\n path: string,\n readable: Readable,\n options?: CreateReadableFileItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of readable) {\n signal?.throwIfAborted();\n const buffer = getBuffer(chunk);\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable: Readable.from(chunks, { signal })\n }\n };\n } else {\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable\n }\n };\n }\n};\n\n/**\n * Create a FileItem from a generator\n * @param path - The path to the file in the tar archive\n * @param generator - The generator to read the file from\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createGeneratorFileItem = async (\n path: string,\n generator: AsyncGenerator<Buffer, void, unknown>,\n options?: CreateReadableFileItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const mode = options?.mode ?? 0o644;\n const uid = options?.uid ?? 0;\n const gid = options?.gid ?? 0;\n const date = options?.date ?? new Date();\n\n const uname = options?.uname ?? 'root';\n const gname = options?.gname ?? 'root';\n\n // When length is not provided, calculate the total size by reading all chunks\n let length = options?.length;\n if (!length) {\n // Calculate the total size by reading all chunks\n const chunks: Buffer[] = [];\n length = 0;\n\n // Collect all chunks to calculate size\n for await (const chunk of generator) {\n signal?.throwIfAborted();\n const buffer = getBuffer(chunk);\n chunks.push(buffer);\n length += buffer.length;\n }\n\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'readable',\n length,\n readable: Readable.from(chunks, { signal })\n }\n };\n } else {\n // Create a FileItem\n return {\n kind: 'file',\n path, mode, uname, gname, uid, gid, date,\n content: {\n kind: 'generator',\n length,\n generator\n }\n };\n }\n};\n\n/**\n * Create a FileItem from a local file path\n * @param path - The path to the file in the tar archive\n * @param filePath - The path to the file to read from real filesystem\n * @param reflectStat - Whether to reflect optional stat of the file (mode, uid, gid, mtime. Default: 'exceptName')\n * @param options - Metadata for the file including path in tar archive\n * @param signal - Optional abort signal to cancel the operation\n * @returns A FileItem\n */\nexport const createReadFileItem = async (\n path: string,\n filePath: string,\n reflectStat?: ReflectStats,\n options?: CreateItemOptions,\n signal?: AbortSignal\n): Promise<FileItem> => {\n const rs = reflectStat ?? 'exceptName';\n\n // Get file stats to extract metadata\n signal?.throwIfAborted();\n const stats = await stat(filePath);\n // Create readable stream from file\n const reader = createReadStream(filePath, { signal });\n\n const mode = options?.mode ?? (rs !== 'none' ? stats.mode : undefined);\n const uid = options?.uid ?? (rs !== 'none' ? stats.uid : undefined);\n const gid = options?.gid ?? (rs !== 'none' ? stats.gid : undefined);\n const date = options?.date ?? (rs !== 'none' ? stats.mtime : undefined);\n\n const uname = getUName(options?.uname, stats.uid, rs);\n const gname = getUName(options?.gname, stats.gid, rs);\n\n // Create a FileItem\n return await createReadableFileItem(path, reader, {\n length: stats.size, mode, uname, gname, uid, gid, date,\n }, signal);\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Store a readable stream to a file\n * @param reader - The readable stream\n * @param path - The path to the file to store the readable stream to\n * @param signal - Optional abort signal to cancel the operation\n * @returns A promise that resolves when the stream is finished\n */\nexport const storeReaderToFile = async (reader: Readable, path: string, signal?: AbortSignal) => {\n const writer = createWriteStream(path, { signal });\n await pipeline(reader, writer, { signal });\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Recursively collect all files and directories in a directory\n * @param baseDir - The base directory to collect files from\n * @param signal - Optional abort signal to cancel the operation\n * @returns Array of relative paths\n */\nconst getAllFilesInDirectory = async (\n baseDir: string, signal: AbortSignal | undefined): Promise<string[]> => {\n \n const collectFiles = async (currentDir: string, relativePath: string): Promise<string[]> => {\n signal?.throwIfAborted();\n \n try {\n const entries = await readdir(currentDir, { withFileTypes: true });\n const result: string[] = [];\n \n // Process all entries in parallel and collect their results\n const tasks = entries.map(async (entry) => {\n signal?.throwIfAborted();\n \n const entryRelativePath = join(relativePath, entry.name);\n \n if (entry.isDirectory()) {\n const entryFullPath = join(currentDir, entry.name);\n // First add the directory itself, then its contents\n const directoryContents = await collectFiles(entryFullPath, entryRelativePath);\n return [entryRelativePath, ...directoryContents];\n } else {\n // For files, just return the file path\n return [entryRelativePath];\n }\n });\n \n const allResults = await Promise.all(tasks);\n \n // Flatten and combine all results while maintaining order\n for (const entryResults of allResults) {\n result.push(...entryResults);\n }\n \n return result;\n } catch (error) {\n console.warn(`Warning: Could not read directory ${currentDir}:`, error);\n return [];\n }\n };\n \n return await collectFiles(baseDir, '');\n};\n\n/**\n * Create an async generator that yields EntryItem objects from filesystem paths\n * @param baseDir - Base directory path for resolving relative paths\n * @param relativePaths - Array of relative paths to include in the tar archive (optional)\n * @param reflectStat - Whether to reflect file stats (Default: 'exceptName')\n * @param signal - Optional abort signal to cancel the operation\n * @returns Async generator that yields EntryItem objects\n */\nexport const createEntryItemGenerator = async function* (\n baseDir: string,\n relativePaths?: string[],\n reflectStat?: ReflectStats,\n signal?: AbortSignal\n): AsyncGenerator<EntryItem, void, unknown> {\n const rs = reflectStat ?? 'exceptName';\n \n // If relativePaths is not provided, collect all files in baseDir\n const pathsToProcess = relativePaths ?? await getAllFilesInDirectory(baseDir, signal);\n \n for (const relativePath of pathsToProcess) {\n signal?.throwIfAborted();\n \n const fsPath = join(baseDir, relativePath);\n \n try {\n signal?.throwIfAborted();\n const stats = await stat(fsPath);\n \n if (stats.isDirectory()) {\n // Create directory entry\n yield await createDirectoryItem(relativePath, rs, {\n directoryPath: fsPath\n }, signal);\n } else if (stats.isFile()) {\n // Create file entry\n yield await createReadFileItem(relativePath, fsPath, rs, undefined, signal);\n }\n } catch (error) {\n // Skip files that can't be accessed (permissions, etc.)\n console.warn(`Warning: Could not access ${fsPath}:`, error);\n continue;\n }\n }\n};\n\n/**\n * Extract entries from a tar extractor to a directory on the filesystem\n * @param iterator - Async generator of extracted entry items\n * @param basePath - Base directory path where entries will be extracted\n * @param signal - Optional abort signal to cancel the operation\n * @returns Promise that resolves when extraction is complete\n */\nexport const extractTo = async (\n iterator: AsyncGenerator<ExtractedEntryItem, void, unknown>,\n basePath: string,\n signal?: AbortSignal\n): Promise<void> => {\n for await (const entry of iterator) {\n signal?.throwIfAborted();\n \n const targetPath = join(basePath, entry.path);\n \n if (entry.kind === 'directory') {\n // Create directory\n try {\n signal?.throwIfAborted();\n await mkdir(targetPath, { recursive: true, mode: entry.mode });\n } catch (error) {\n // Directory might already exist, which is fine\n if ((error as any).code !== 'EEXIST') {\n throw error;\n }\n }\n } else if (entry.kind === 'file') {\n // Create parent directories if they don't exist\n const parentDir = dirname(targetPath);\n signal?.throwIfAborted();\n await mkdir(parentDir, { recursive: true });\n \n // Extract file content and write to filesystem\n const fileEntry = entry as ExtractedFileItem;\n const content = await fileEntry.getContent('buffer');\n await writeFile(targetPath, content, { mode: entry.mode, signal });\n }\n }\n};\n","// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { Readable } from \"stream\";\nimport { createGzip } from \"zlib\";\nimport { getBuffer, MAX_NAME, MAX_PREFIX } from \"./utils\";\nimport { CompressionTypes, EntryItem, EntryItemContent } from \"./types\";\n\n/**\n * Get the byte length of a string in UTF-8\n * @param str - The string to get the byte length of\n * @returns The byte length of the string\n */\nconst utf8ByteLength = (str: string) => {\n return Buffer.byteLength(str, \"utf8\");\n}\n\n/**\n * Truncate a string to a maximum byte length in UTF-8\n * @param str - The string to truncate\n * @param maxBytes - The maximum byte length\n * @returns The truncated string\n */\nconst truncateUtf8Safe = (str: string, maxBytes: number) => {\n let total = 0;\n let i = 0;\n while (i < str.length) {\n const codePoint = str.codePointAt(i)!;\n const char = String.fromCodePoint(codePoint);\n const charBytes = Buffer.byteLength(char, \"utf8\");\n if (total + charBytes > maxBytes) break;\n total += charBytes;\n i += char.length;\n }\n return str.slice(0, i);\n}\n\n/**\n * Split a path into a name and a prefix\n * @param path - The path to split\n * @returns The name and prefix\n */\nconst splitPath = (path: string) => {\n if (utf8ByteLength(path) <= MAX_NAME) {\n return { prefix: \"\", name: path };\n }\n\n // Split by '/' and find the part that fits in name from the end\n const parts = path.split(\"/\");\n let name = parts.pop() ?? \"\";\n let prefix = parts.join(\"/\");\n\n // Truncate if name exceeds 100 bytes\n if (utf8ByteLength(name) > MAX_NAME) {\n name = truncateUtf8Safe(name, MAX_NAME);\n }\n\n // Truncate if prefix exceeds 155 bytes\n while (utf8ByteLength(prefix) > MAX_PREFIX) {\n prefix = truncateUtf8Safe(prefix, MAX_PREFIX);\n }\n\n return { prefix, name };\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Get octal bytes from a number\n * @param value - The number to get octal bytes from\n * @param length - The length of the octal bytes\n * @returns The octal bytes\n */\nconst getOctalBytes = (value: number, length: number) => {\n const str = value.toString(8).padStart(length - 1, \"0\") + \"\\0\";\n return Buffer.from(str, \"ascii\");\n};\n\n/**\n * Get padded bytes from a buffer\n * @param buffer - The buffer to get padded bytes from\n * @returns The padded bytes\n */\nconst getPaddedBytes = (buffer: Buffer) => {\n const extra = buffer.length % 512;\n if (extra === 0) {\n return buffer;\n } else {\n return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);\n }\n}\n\n/**\n * The terminator bytes\n */\nconst terminatorBytes = Buffer.alloc(1024, 0);\n\n/**\n * Create a tar header\n * @param type - The type of the entry\n * @param path - The path of the entry\n * @param size - The size of the entry\n * @param mode - The mode of the entry\n * @param uname - The user name of the entry\n * @param gname - The group name of the entry\n */\nconst createTarHeader = (\n type: 'file' | 'directory',\n path: string,\n size: number,\n mode: number,\n uname: string,\n gname: string,\n uid: number,\n gid: number,\n date: Date\n) => {\n // Allocate header bytes\n const buffer = Buffer.alloc(512, 0);\n\n // Split path into name and prefix\n const { name, prefix } = splitPath(path);\n\n // Write name, mode, uid, gid, size, mtime, typeflag, prefix, checksum\n buffer.write(name, 0, 100, \"utf8\");\n getOctalBytes(mode & 0o7777, 8).copy(buffer, 100);\n getOctalBytes(uid, 8).copy(buffer, 108);\n getOctalBytes(gid, 8).copy(buffer, 116);\n getOctalBytes(size, 12).copy(buffer, 124);\n getOctalBytes(Math.floor(date.getTime() / 1000), 12).copy(buffer, 136);\n\n // Check sum space\n Buffer.from(\" \", \"ascii\").copy(buffer, 148);\n\n if (type === 'file') {\n buffer.write(\"0\", 156, 1, \"ascii\"); // typeflag (file)\n } else {\n buffer.write(\"5\", 156, 1, \"ascii\"); // typeflag (directory)\n }\n buffer.write(\"ustar\\0\", 257, 6, \"ascii\");\n buffer.write(\"00\", 263, 2, \"ascii\"); // version\n buffer.write(uname, 265, 32, \"utf8\");\n buffer.write(gname, 297, 32, \"utf8\");\n buffer.write(prefix, 345, 155, \"utf8\"); // Path prefix\n\n // Calculate check sum\n let sum = 0;\n for (let i = 0; i < 512; i++) {\n sum += buffer[i];\n }\n getOctalBytes(sum, 8).copy(buffer, 148);\n\n return buffer;\n}\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a tar packer\n * @param entryItemGenerator - The async generator of the entry items\n * @param compressionType - The compression type to use (Default: 'none')\n * @param signal - The abort signal to cancel the tar packer\n * @returns Readable stream of the tar packer\n */\nexport const createTarPacker = (\n entryItemGenerator: AsyncGenerator<EntryItem, void, unknown>,\n compressionType?: CompressionTypes,\n signal?: AbortSignal) => {\n\n // Create async generator function from entry item iterator\n const entryItemIterator = async function*() {\n // Iterate over the entry items\n for await (const entryItem of entryItemGenerator) {\n signal?.throwIfAborted();\n\n switch (entryItem.kind) {\n // Entry is a file\n case 'file': {\n const entryItemContent = entryItem.content;\n // Content is a string or buffer\n if (typeof entryItemContent === 'string' || Buffer.isBuffer(entryItemContent)) {\n // Get content bytes from string or buffer\n const contentBytes = getBuffer(entryItemContent);\n\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n contentBytes.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n // Content bytes to adjust padding space and produce\n const totalPaddedContentBytes = getPaddedBytes(contentBytes);\n yield totalPaddedContentBytes;\n } else {\n // Assert that this is EntryItemContent, not FileItemReader (packer doesn't handle FileItemReader)\n const content = entryItemContent as EntryItemContent;\n \n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'file',\n entryItem.path,\n content.length,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date);\n yield tarHeaderBytes;\n\n let position = 0;\n switch (content.kind) {\n // Content is a generator\n case 'generator': {\n for await (const contentBytes of content.generator) {\n signal?.throwIfAborted();\n yield contentBytes;\n position += contentBytes.length;\n }\n break;\n }\n // Content is a readable stream\n case 'readable': {\n for await (const chunk of content.readable) {\n signal?.throwIfAborted();\n const contentBytes = getBuffer(chunk);\n yield contentBytes;\n position += contentBytes.length;\n }\n break;\n }\n }\n\n // Padding space\n if (position % 512 !== 0) {\n signal?.throwIfAborted();\n yield Buffer.alloc(512 - (position % 512), 0);\n }\n }\n break;\n }\n // Entry is a directory\n case 'directory': {\n // Create and produce tar header bytes\n const tarHeaderBytes = createTarHeader(\n 'directory',\n entryItem.path,\n 0,\n entryItem.mode,\n entryItem.uname,\n entryItem.gname,\n entryItem.uid,\n entryItem.gid,\n entryItem.date\n );\n yield tarHeaderBytes;\n break;\n }\n }\n }\n\n // Terminates for tar stream\n yield terminatorBytes;\n };\n\n const ct = compressionType ?? 'none';\n\n switch (ct) {\n // No compression\n case 'none': {\n // Create readable stream from entry item iterator\n return Readable.from(entryItemIterator(), { signal });\n }\n // Gzip compression\n case 'gzip': {\n // Create gzip stream\n const gzipStream = createGzip({ level: 9 });\n // Create readable stream from entry item iterator\n const entryItemStream = Readable.from(entryItemIterator(), { signal });\n // Pipe the entry item stream to the gzip stream\n entryItemStream.pipe(gzipStream);\n // Return the gzip stream\n return gzipStream;\n }\n }\n};\n","// tar-vern - Tape archiver library for Typescript\n// Copyright (c) Kouji Matsui (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/tar-vern/\n\nimport { Readable } from \"stream\";\nimport { createGunzip } from \"zlib\";\nimport { CompressionTypes, ExtractedDirectoryItem, ExtractedEntryItem, ExtractedFileItem } from \"./types\";\nimport { getBuffer } from \"./utils\";\n\n/**\n * Parse octal bytes to number\n * @param buffer - The buffer containing octal bytes\n * @param offset - The offset in the buffer\n * @param length - The length of the octal bytes\n * @returns The parsed number\n */\nconst parseOctalBytes = (buffer: Buffer, offset: number, length: number): number => {\n const str = buffer.subarray(offset, offset + length).toString('ascii').replace(/\\0/g, '').trim();\n return str ? parseInt(str, 8) : 0;\n};\n\n/**\n * Parse string from buffer\n * @param buffer - The buffer containing the string\n * @param offset - The offset in the buffer\n * @param length - The length of the string\n * @returns The parsed string\n */\nconst parseString = (buffer: Buffer, offset: number, length: number): string => {\n return buffer.subarray(offset, offset + length).toString('utf8').replace(/\\0/g, '').trim();\n};\n\n/**\n * Read exact number of bytes from stream\n * @param iterator - The async iterator\n * @param size - The number of bytes to read\n * @param signal - The abort signal\n * @returns The buffer containing the read bytes\n */\nconst readExactBytes = async (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined): Promise<Buffer | undefined> => {\n\n const chunks: Buffer[] = [];\n let totalRead = 0;\n\n while (totalRead < size) {\n signal?.throwIfAborted();\n \n const { value, done } = await iterator.next();\n if (done) {\n if (totalRead === 0) {\n return undefined; // No data at all\n } else {\n throw new Error(`Unexpected end of stream: expected ${size} bytes, got ${totalRead} bytes`);\n }\n }\n\n const chunk = getBuffer(value);\n const needed = size - totalRead;\n \n if (chunk.length <= needed) {\n chunks.push(chunk);\n totalRead += chunk.length;\n } else {\n // We read more than needed, split the chunk\n chunks.push(chunk.subarray(0, needed));\n // Put back the remaining data\n await iterator.return?.(chunk.subarray(needed));\n totalRead = size;\n }\n }\n\n return Buffer.concat(chunks, size);\n};\n\n/**\n * Skip exact number of bytes from stream without buffering\n * @param iterator - The async iterator\n * @param size - The number of bytes to skip\n * @param signal - The abort signal\n */\nconst skipExactBytes = async (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined): Promise<void> => {\n\n let totalSkipped = 0;\n\n while (totalSkipped < size) {\n signal?.throwIfAborted();\n \n const { value, done } = await iterator.next();\n if (done) {\n throw new Error(`Unexpected end of stream: expected to skip ${size} bytes, skipped ${totalSkipped} bytes`);\n }\n\n const chunk = getBuffer(value);\n const needed = size - totalSkipped;\n \n if (chunk.length <= needed) {\n totalSkipped += chunk.length;\n } else {\n // We read more than needed, put back the remaining data\n await iterator.return?.(chunk.subarray(needed));\n totalSkipped = size;\n }\n }\n};\n\n/**\n * Iterator will be skip padding bytes.\n * @param iterator - Async iterator\n * @param contentSize - Total content size to calculate boundary position\n * @param signal - Abort signal\n */\nconst skipPaddingBytesTo512Boundary = async (\n iterator: AsyncIterator<string | Buffer>,\n contentSize: number,\n signal: AbortSignal | undefined) => {\n // Skip padding bytes to next 512-byte boundary\n const padding = (512 - (contentSize % 512)) % 512;\n if (padding > 0) {\n await skipExactBytes(iterator, padding, signal);\n }\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Tar file/directory entry item.\n */\ninterface EntryItemInfo {\n readonly kind: 'file' | 'directory';\n readonly path: string;\n readonly size: number;\n readonly mode: number;\n readonly uid: number;\n readonly gid: number;\n readonly mtime: Date;\n readonly uname: string;\n readonly gname: string;\n readonly checksum: number;\n /**\n * This entry (file) item is consumed.\n */\n consumed: boolean;\n}\n\n/**\n * Parse tar header from buffer\n * @param buffer - The buffer containing the tar header\n * @returns The parsed entry information or null if end of archive\n */\nconst parseTarHeader = (buffer: Buffer): EntryItemInfo | undefined => {\n // Check if this is the end of archive (all zeros)\n if (buffer.every(b => b === 0)) {\n return undefined;\n }\n\n // Parse header fields\n const name = parseString(buffer, 0, 100);\n const mode = parseOctalBytes(buffer, 100, 8);\n const uid = parseOctalBytes(buffer, 108, 8);\n const gid = parseOctalBytes(buffer, 116, 8);\n const size = parseOctalBytes(buffer, 124, 12);\n const mtime = new Date(parseOctalBytes(buffer, 136, 12) * 1000);\n const checksum = parseOctalBytes(buffer, 148, 8);\n const typeflag = parseString(buffer, 156, 1);\n const magic = parseString(buffer, 257, 6);\n const uname = parseString(buffer, 265, 32);\n const gname = parseString(buffer, 297, 32);\n const prefix = parseString(buffer, 345, 155);\n\n // Verify magic (should be \"ustar\" for POSIX tar)\n if (magic !== 'ustar') {\n throw new Error(`Invalid tar format: magic=\"${magic}\"`);\n }\n\n // Calculate checksum\n let calculatedSum = 0;\n for (let i = 0; i < 512; i++) {\n if (i >= 148 && i < 156) {\n calculatedSum += 32; // Space character\n } else {\n calculatedSum += buffer[i];\n }\n }\n\n if (calculatedSum !== checksum) {\n throw new Error(`Invalid checksum: expected ${checksum}, got ${calculatedSum}`);\n }\n\n // Construct full path and remove trailing slash for directories\n let path = prefix ? `${prefix}/${name}` : name;\n if (path.endsWith('/')) {\n path = path.slice(0, -1);\n }\n\n // Determine type\n const kind = typeflag === '5' ? 'directory' : 'file';\n\n return {\n kind,\n path,\n size,\n mode,\n uid,\n gid,\n mtime,\n uname: uname || uid.toString(),\n gname: gname || gid.toString(),\n checksum,\n consumed: false\n };\n};\n\n/**\n * Create a buffered async iterator that allows returning data\n */\nconst createBufferedAsyncIterator = (\n iterable: AsyncIterable<string | Buffer>,\n signal: AbortSignal | undefined\n): AsyncIterator<string | Buffer> => {\n const buffer: (string | Buffer)[] = [];\n const iterator = iterable[Symbol.asyncIterator]();\n return {\n next: async () => {\n signal?.throwIfAborted();\n if (buffer.length > 0) {\n return { value: buffer.shift()!, done: false };\n }\n return iterator.next();\n },\n return: async (value?: string | Buffer) => {\n if (value !== undefined) {\n buffer.unshift(value);\n }\n return { value: undefined, done: false };\n }\n };\n};\n\n/**\n * Create a readable stream from an async iterator with size limit\n * @param iterator - The async iterator to read from\n * @param size - The number of bytes to read\n * @param signal - The abort signal\n * @returns Readable stream\n */\nconst createReadableFromIterator = (\n iterator: AsyncIterator<string | Buffer>,\n size: number,\n signal: AbortSignal | undefined,\n consumedRef: { consumed: boolean }\n): Readable => {\n const generator = async function*() {\n let remainingBytes = size;\n \n while (remainingBytes > 0) {\n signal?.throwIfAborted();\n\n const { value, done } = await iterator.next();\n if (done) {\n throw new Error(`Unexpected end of stream: expected ${size} bytes, remaining ${remainingBytes} bytes`);\n }\n\n const chunk = getBuffer(value);\n if (chunk.length <= remainingBytes) {\n remainingBytes -= chunk.length;\n yield chunk;\n } else {\n // We read more than needed\n const needed = chunk.subarray(0, remainingBytes);\n const excess = chunk.subarray(remainingBytes);\n remainingBytes = 0;\n \n // Return excess data to the iterator\n await iterator.return?.(excess);\n yield needed;\n break;\n }\n }\n\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, size, signal);\n\n // Finished to consume\n consumedRef.consumed = true;\n };\n\n return Readable.from(generator(), { signal });\n};\n\n///////////////////////////////////////////////////////////////////////////////////\n\n/**\n * Create a tar extractor\n * @param readable - The readable stream containing tar data\n * @param compressionType - The compression type (default: 'none')\n * @param signal - The abort signal\n * @returns Async generator of entry items\n */\nexport const createTarExtractor = async function* (\n readable: Readable,\n compressionType?: CompressionTypes,\n signal?: AbortSignal): AsyncGenerator<ExtractedEntryItem, void, unknown> {\n\n const ct = compressionType ?? 'none';\n\n // Apply decompression if needed\n let inputStream: Readable;\n switch (ct) {\n case 'gzip':\n const gunzip = createGunzip();\n readable.pipe(gunzip);\n inputStream = gunzip;\n break;\n case 'none':\n default:\n inputStream = readable;\n break;\n }\n\n // Get async iterator from the stream\n const iterator = createBufferedAsyncIterator(inputStream, signal);\n\n // Last entry item\n let header: EntryItemInfo | undefined;\n\n // For each tar items\n while (true) {\n signal?.throwIfAborted();\n\n // Did not consume last file item yielding?\n if (header?.kind === 'file' && !header.consumed) {\n // Have to skip the file contents and boundary\n\n // Skip entire contents without buffering\n await skipExactBytes(iterator, header.size, signal);\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, header.size, signal);\n\n // Mark consumed\n header.consumed = true;\n }\n\n // Read header (512 bytes)\n let headerBuffer: Buffer | undefined;\n try {\n headerBuffer = await readExactBytes(iterator, 512, signal);\n } catch (error) {\n if (error instanceof Error && error.message.includes('Unexpected end of stream')) {\n throw new Error('Invalid tar format: incomplete header');\n }\n throw error;\n }\n \n if (headerBuffer === undefined) {\n break; // End of stream\n }\n\n // Parse header\n header = parseTarHeader(headerBuffer);\n if (!header) {\n // Check for second terminator block\n const secondBlock = await readExactBytes(iterator, 512, signal);\n if (secondBlock === undefined || secondBlock.every(b => b === 0)) {\n break; // Proper end of archive\n }\n throw new Error('Invalid tar format: expected terminator block');\n }\n\n if (header.kind === 'directory') {\n // Yield directory entry\n yield {\n kind: 'directory',\n path: header.path,\n mode: header.mode,\n uid: header.uid,\n gid: header.gid,\n uname: header.uname,\n gname: header.gname,\n date: header.mtime\n } as ExtractedDirectoryItem;\n } else {\n // Capture current header to avoid closure issues\n const currentHeader = header;\n \n // Yield file entry with lazy getContent\n yield {\n kind: 'file',\n path: currentHeader.path,\n mode: currentHeader.mode,\n uid: currentHeader.uid,\n gid: currentHeader.gid,\n uname: currentHeader.uname,\n gname: currentHeader.gname,\n date: currentHeader.mtime,\n getContent: async (type: any) => {\n // Is multiple called\n if (currentHeader.consumed) {\n throw new Error('Content has already been consumed. Multiple calls to getContent are not supported.');\n }\n\n switch (type) {\n // For string\n case 'string': {\n // Read entire contents just now\n const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);\n if (dataBuffer === undefined) {\n throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);\n }\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);\n currentHeader.consumed = true;\n return dataBuffer.toString('utf8');\n }\n // For buffer\n case 'buffer': {\n // Read entire contents just now\n const dataBuffer = await readExactBytes(iterator, currentHeader.size, signal);\n if (dataBuffer === undefined) {\n throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);\n }\n // Finalize to skip boundary\n await skipPaddingBytesTo512Boundary(iterator, currentHeader.size, signal);\n currentHeader.consumed = true;\n return dataBuffer;\n }\n // For Readble stream\n case 'readable': {\n // Get Readble object (to delegate)\n const readable = createReadableFromIterator(iterator, currentHeader.size, signal, currentHeader);\n return readable;\n }\n default:\n throw new Error(`Unsupported content type: ${type}`);\n }\n }\n } as ExtractedFileItem;\n }\n }\n};\n"],"names":["readable"],"mappings":";;;;;;AAaO,MAAM,WAAW;AACjB,MAAM,aAAa;AAS1B,MAAM,WAAW,CAAC,eAAmC,aAAqB,gBAA0C;AAClH,SAAO,kBAAkB,gBAAgB,QAAQ,YAAY,aAAa;AAC5E;AAOO,MAAM,YAAY,CAAC,SAA0B;AAClD,SAAO,OAAO,SAAS,IAAI,IAAI,OAAO,OAAO,KAAK,MAAM,MAAM;AAChE;AAaO,MAAM,sBAAsB,OACjC,MACA,aACA,SACA,WAC2B;AAC3B,QAAM,KAAK,eAAe;AAE1B,MAAI,OAAO,UAAU,SAAS,eAAe;AAC3C,YAAQ,eAAA;AACR,UAAM,QAAQ,MAAM,KAAK,QAAQ,aAAa;AAC9C,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,MAAM,SAAS,OAAO,MAAM;AAClC,UAAM,OAAO,SAAS,QAAQ,MAAM;AACpC,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC,OAAO;AACL,UAAM,OAAO,SAAS,QAAQ;AAC9B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,MAAM,SAAS,OAAO;AAC5B,UAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAClC,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,UAAM,QAAQ,SAAS,SAAS,OAAO,QAAW,EAAE;AACpD,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,IAAA;AAAA,EAExC;AACF;AAUO,MAAM,iBAAiB,OAC5B,MACA,SACA,SACA,WACsB;AACtB,UAAQ,eAAA;AAER,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,IACpC;AAAA,EAAA;AAEJ;AAUO,MAAM,yBAAyB,OACpC,MACA,UACA,SACA,WACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,UAAU;AAClC,cAAQ,eAAA;AACR,YAAM,SAAS,UAAU,KAAK;AAC9B,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA,UAAU,SAAS,KAAK,QAAQ,EAAE,QAAQ;AAAA,MAAA;AAAA,IAC5C;AAAA,EAEJ,OAAO;AAEL,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AACF;AAUO,MAAM,0BAA0B,OACrC,MACA,WACA,SACA,WACsB;AACtB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,MAAM,SAAS,OAAO;AAC5B,QAAM,OAAO,SAAS,QAAQ,oBAAI,KAAA;AAElC,QAAM,QAAQ,SAAS,SAAS;AAChC,QAAM,QAAQ,SAAS,SAAS;AAGhC,MAAI,SAAS,SAAS;AACtB,MAAI,CAAC,QAAQ;AAEX,UAAM,SAAmB,CAAA;AACzB,aAAS;AAGT,qBAAiB,SAAS,WAAW;AACnC,cAAQ,eAAA;AACR,YAAM,SAAS,UAAU,KAAK;AAC9B,aAAO,KAAK,MAAM;AAClB,gBAAU,OAAO;AAAA,IACnB;AAGA,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA,UAAU,SAAS,KAAK,QAAQ,EAAE,QAAQ;AAAA,MAAA;AAAA,IAC5C;AAAA,EAEJ,OAAO;AAEL,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAK;AAAA,MAAK;AAAA,MACpC,SAAS;AAAA,QACP,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AACF;AAWO,MAAM,qBAAqB,OAChC,MACA,UACA,aACA,SACA,WACsB;AACtB,QAAM,KAAK,eAAe;AAG1B,UAAQ,eAAA;AACR,QAAM,QAAQ,MAAM,KAAK,QAAQ;AAEjC,QAAM,SAAS,iBAAiB,UAAU,EAAE,QAAQ;AAEpD,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,OAAO;AAC5D,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,MAAM,SAAS,QAAQ,OAAO,SAAS,MAAM,MAAM;AACzD,QAAM,OAAO,SAAS,SAAS,OAAO,SAAS,MAAM,QAAQ;AAE7D,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AACpD,QAAM,QAAQ,SAAS,SAAS,OAAO,MAAM,KAAK,EAAE;AAGpD,SAAO,MAAM,uBAAuB,MAAM,QAAQ;AAAA,IAChD,QAAQ,MAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAK;AAAA,IAAK;AAAA,EAAA,GACjD,MAAM;AACX;AAWO,MAAM,oBAAoB,OAAO,QAAkB,MAAc,WAAyB;AAC/F,QAAM,SAAS,kBAAkB,MAAM,EAAE,QAAQ;AACjD,QAAM,SAAS,QAAQ,QAAQ,EAAE,QAAQ;AAC3C;AAUA,MAAM,yBAAyB,OAC7B,SAAiB,WAAuD;AAExE,QAAM,eAAe,OAAO,YAAoB,iBAA4C;AAC1F,YAAQ,eAAA;AAER,QAAI;AACF,YAAM,UAAU,MAAM,QAAQ,YAAY,EAAE,eAAe,MAAM;AACjE,YAAM,SAAmB,CAAA;AAGzB,YAAM,QAAQ,QAAQ,IAAI,OAAO,UAAU;AACzC,gBAAQ,eAAA;AAER,cAAM,oBAAoB,KAAK,cAAc,MAAM,IAAI;AAEvD,YAAI,MAAM,eAAe;AACvB,gBAAM,gBAAgB,KAAK,YAAY,MAAM,IAAI;AAEjD,gBAAM,oBAAoB,MAAM,aAAa,eAAe,iBAAiB;AAC7E,iBAAO,CAAC,mBAAmB,GAAG,iBAAiB;AAAA,QACjD,OAAO;AAEL,iBAAO,CAAC,iBAAiB;AAAA,QAC3B;AAAA,MACF,CAAC;AAED,YAAM,aAAa,MAAM,QAAQ,IAAI,KAAK;AAG1C,iBAAW,gBAAgB,YAAY;AACrC,eAAO,KAAK,GAAG,YAAY;AAAA,MAC7B;AAEA,aAAO;AAAA,IACT,SAAS,OAAO;AACd,cAAQ,KAAK,qCAAqC,UAAU,KAAK,KAAK;AACtE,aAAO,CAAA;AAAA,IACT;AAAA,EACF;AAEA,SAAO,MAAM,aAAa,SAAS,EAAE;AACvC;AAUO,MAAM,2BAA2B,iBACtC,SACA,eACA,aACA,QAC0C;AAC1C,QAAM,KAAK,eAAe;AAG1B,QAAM,iBAAiB,iBAAiB,MAAM,uBAAuB,SAAS,MAAM;AAEpF,aAAW,gBAAgB,gBAAgB;AACzC,YAAQ,eAAA;AAER,UAAM,SAAS,KAAK,SAAS,YAAY;AAEzC,QAAI;AACF,cAAQ,eAAA;AACR,YAAM,QAAQ,MAAM,KAAK,MAAM;AAE/B,UAAI,MAAM,eAAe;AAEvB,cAAM,MAAM,oBAAoB,cAAc,IAAI;AAAA,UAChD,eAAe;AAAA,QAAA,GACd,MAAM;AAAA,MACX,WAAW,MAAM,UAAU;AAEzB,cAAM,MAAM,mBAAmB,cAAc,QAAQ,IAAI,QAAW,MAAM;AAAA,MAC5E;AAAA,IACF,SAAS,OAAO;AAEd,cAAQ,KAAK,6BAA6B,MAAM,KAAK,KAAK;AAC1D;AAAA,IACF;AAAA,EACF;AACF;AASO,MAAM,YAAY,OACvB,UACA,UACA,WACkB;AAClB,mBAAiB,SAAS,UAAU;AAClC,YAAQ,eAAA;AAER,UAAM,aAAa,KAAK,UAAU,MAAM,IAAI;AAE5C,QAAI,MAAM,SAAS,aAAa;AAE9B,UAAI;AACF,gBAAQ,eAAA;AACR,cAAM,MAAM,YAAY,EAAE,WAAW,MAAM,MAAM,MAAM,MAAM;AAAA,MAC/D,SAAS,OAAO;AAEd,YAAK,MAAc,SAAS,UAAU;AACpC,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF,WAAW,MAAM,SAAS,QAAQ;AAEhC,YAAM,YAAY,QAAQ,UAAU;AACpC,cAAQ,eAAA;AACR,YAAM,MAAM,WAAW,EAAE,WAAW,MAAM;AAG1C,YAAM,YAAY;AAClB,YAAM,UAAU,MAAM,UAAU,WAAW,QAAQ;AACnD,YAAM,UAAU,YAAY,SAAS,EAAE,MAAM,MAAM,MAAM,QAAQ;AAAA,IACnE;AAAA,EACF;AACF;AC1ZA,MAAM,iBAAiB,CAAC,QAAgB;AACtC,SAAO,OAAO,WAAW,KAAK,MAAM;AACtC;AAQA,MAAM,mBAAmB,CAAC,KAAa,aAAqB;AAC1D,MAAI,QAAQ;AACZ,MAAI,IAAI;AACR,SAAO,IAAI,IAAI,QAAQ;AACrB,UAAM,YAAY,IAAI,YAAY,CAAC;AACnC,UAAM,OAAO,OAAO,cAAc,SAAS;AAC3C,UAAM,YAAY,OAAO,WAAW,MAAM,MAAM;AAChD,QAAI,QAAQ,YAAY,SAAU;AAClC,aAAS;AACT,SAAK,KAAK;AAAA,EACZ;AACA,SAAO,IAAI,MAAM,GAAG,CAAC;AACvB;AAOA,MAAM,YAAY,CAAC,SAAiB;AAClC,MAAI,eAAe,IAAI,KAAK,UAAU;AACpC,WAAO,EAAE,QAAQ,IAAI,MAAM,KAAA;AAAA,EAC7B;AAGA,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,OAAO,MAAM,IAAA,KAAS;AAC1B,MAAI,SAAS,MAAM,KAAK,GAAG;AAG3B,MAAI,eAAe,IAAI,IAAI,UAAU;AACnC,WAAO,iBAAiB,MAAM,QAAQ;AAAA,EACxC;AAGA,SAAO,eAAe,MAAM,IAAI,YAAY;AAC1C,aAAS,iBAAiB,QAAQ,UAAU;AAAA,EAC9C;AAEA,SAAO,EAAE,QAAQ,KAAA;AACnB;AAUA,MAAM,gBAAgB,CAAC,OAAe,WAAmB;AACvD,QAAM,MAAM,MAAM,SAAS,CAAC,EAAE,SAAS,SAAS,GAAG,GAAG,IAAI;AAC1D,SAAO,OAAO,KAAK,KAAK,OAAO;AACjC;AAOA,MAAM,iBAAiB,CAAC,WAAmB;AACzC,QAAM,QAAQ,OAAO,SAAS;AAC9B,MAAI,UAAU,GAAG;AACf,WAAO;AAAA,EACT,OAAO;AACL,WAAO,OAAO,OAAO,CAAC,QAAQ,OAAO,MAAM,MAAM,OAAO,CAAC,CAAC,CAAC;AAAA,EAC7D;AACF;AAKA,MAAM,kBAAkB,OAAO,MAAM,MAAM,CAAC;AAW5C,MAAM,kBAAkB,CACtB,MACA,MACA,MACA,MACA,OACA,OACA,KACA,KACA,SACG;AAEH,QAAM,SAAS,OAAO,MAAM,KAAK,CAAC;AAGlC,QAAM,EAAE,MAAM,WAAW,UAAU,IAAI;AAGvC,SAAO,MAAM,MAAM,GAAG,KAAK,MAAM;AACjC,gBAAc,OAAO,MAAQ,CAAC,EAAE,KAAK,QAAQ,GAAG;AAChD,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AACtC,gBAAc,MAAM,EAAE,EAAE,KAAK,QAAQ,GAAG;AACxC,gBAAc,KAAK,MAAM,KAAK,QAAA,IAAY,GAAI,GAAG,EAAE,EAAE,KAAK,QAAQ,GAAG;AAGrE,SAAO,KAAK,YAAY,OAAO,EAAE,KAAK,QAAQ,GAAG;AAEjD,MAAI,SAAS,QAAQ;AACnB,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC,OAAO;AACL,WAAO,MAAM,KAAK,KAAK,GAAG,OAAO;AAAA,EACnC;AACA,SAAO,MAAM,WAAW,KAAK,GAAG,OAAO;AACvC,SAAO,MAAM,MAAM,KAAK,GAAG,OAAO;AAClC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,OAAO,KAAK,IAAI,MAAM;AACnC,SAAO,MAAM,QAAQ,KAAK,KAAK,MAAM;AAGrC,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,WAAO,OAAO,CAAC;AAAA,EACjB;AACA,gBAAc,KAAK,CAAC,EAAE,KAAK,QAAQ,GAAG;AAEtC,SAAO;AACT;AAWO,MAAM,kBAAkB,CAC7B,oBACA,iBACA,WAAyB;AAGzB,QAAM,oBAAoB,mBAAkB;AAE1C,qBAAiB,aAAa,oBAAoB;AAChD,cAAQ,eAAA;AAER,cAAQ,UAAU,MAAA;AAAA;AAAA,QAEhB,KAAK,QAAQ;AACX,gBAAM,mBAAmB,UAAU;AAEnC,cAAI,OAAO,qBAAqB,YAAY,OAAO,SAAS,gBAAgB,GAAG;AAE7E,kBAAM,eAAe,UAAU,gBAAgB;AAG/C,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,aAAa;AAAA,cACb,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAGN,kBAAM,0BAA0B,eAAe,YAAY;AAC3D,kBAAM;AAAA,UACR,OAAO;AAEL,kBAAM,UAAU;AAGhB,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA,UAAU;AAAA,cACV,QAAQ;AAAA,cACR,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,cACV,UAAU;AAAA,YAAA;AACZ,kBAAM;AAEN,gBAAI,WAAW;AACf,oBAAQ,QAAQ,MAAA;AAAA;AAAA,cAEd,KAAK,aAAa;AAChB,iCAAiB,gBAAgB,QAAQ,WAAW;AAClD,0BAAQ,eAAA;AACR,wBAAM;AACN,8BAAY,aAAa;AAAA,gBAC3B;AACA;AAAA,cACF;AAAA;AAAA,cAEA,KAAK,YAAY;AACf,iCAAiB,SAAS,QAAQ,UAAU;AAC1C,0BAAQ,eAAA;AACR,wBAAM,eAAe,UAAU,KAAK;AACpC,wBAAM;AACN,8BAAY,aAAa;AAAA,gBAC3B;AACA;AAAA,cACF;AAAA,YAAA;AAIF,gBAAI,WAAW,QAAQ,GAAG;AACxB,sBAAQ,eAAA;AACR,oBAAM,OAAO,MAAM,MAAO,WAAW,KAAM,CAAC;AAAA,YAC9C;AAAA,UACF;AACA;AAAA,QACF;AAAA;AAAA,QAEA,KAAK,aAAa;AAEhB,gBAAM,iBAAiB;AAAA,YACrB;AAAA,YACA,UAAU;AAAA,YACV;AAAA,YACA,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,YACV,UAAU;AAAA,UAAA;AAEZ,gBAAM;AACN;AAAA,QACF;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM;AAAA,EACR;AAEA,QAAM,KAAK,mBAAmB;AAE9B,UAAQ,IAAA;AAAA;AAAA,IAEN,KAAK,QAAQ;AAEX,aAAO,SAAS,KAAK,kBAAA,GAAqB,EAAE,QAAQ;AAAA,IACtD;AAAA;AAAA,IAEA,KAAK,QAAQ;AAEX,YAAM,aAAa,WAAW,EAAE,OAAO,GAAG;AAE1C,YAAM,kBAAkB,SAAS,KAAK,qBAAqB,EAAE,QAAQ;AAErE,sBAAgB,KAAK,UAAU;AAE/B,aAAO;AAAA,IACT;AAAA,EAAA;AAEJ;ACrRA,MAAM,kBAAkB,CAAC,QAAgB,QAAgB,WAA2B;AAClF,QAAM,MAAM,OAAO,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,OAAO,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAA;AAC1F,SAAO,MAAM,SAAS,KAAK,CAAC,IAAI;AAClC;AASA,MAAM,cAAc,CAAC,QAAgB,QAAgB,WAA2B;AAC9E,SAAO,OAAO,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,MAAM,EAAE,QAAQ,OAAO,EAAE,EAAE,KAAA;AACtF;AASA,MAAM,iBAAiB,OACrB,UACA,MACA,WAAiE;AAEjE,QAAM,SAAmB,CAAA;AACzB,MAAI,YAAY;AAEhB,SAAO,YAAY,MAAM;AACvB,YAAQ,eAAA;AAER,UAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,QAAI,MAAM;AACR,UAAI,cAAc,GAAG;AACnB,eAAO;AAAA,MACT,OAAO;AACL,cAAM,IAAI,MAAM,sCAAsC,IAAI,eAAe,SAAS,QAAQ;AAAA,MAC5F;AAAA,IACF;AAEA,UAAM,QAAQ,UAAU,KAAK;AAC7B,UAAM,SAAS,OAAO;AAEtB,QAAI,MAAM,UAAU,QAAQ;AAC1B,aAAO,KAAK,KAAK;AACjB,mBAAa,MAAM;AAAA,IACrB,OAAO;AAEL,aAAO,KAAK,MAAM,SAAS,GAAG,MAAM,CAAC;AAErC,YAAM,SAAS,SAAS,MAAM,SAAS,MAAM,CAAC;AAC9C,kBAAY;AAAA,IACd;AAAA,EACF;AAEA,SAAO,OAAO,OAAO,QAAQ,IAAI;AACnC;AAQA,MAAM,iBAAiB,OACrB,UACA,MACA,WAAmD;AAEnD,MAAI,eAAe;AAEnB,SAAO,eAAe,MAAM;AAC1B,YAAQ,eAAA;AAER,UAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,QAAI,MAAM;AACR,YAAM,IAAI,MAAM,8CAA8C,IAAI,mBAAmB,YAAY,QAAQ;AAAA,IAC3G;AAEA,UAAM,QAAQ,UAAU,KAAK;AAC7B,UAAM,SAAS,OAAO;AAEtB,QAAI,MAAM,UAAU,QAAQ;AAC1B,sBAAgB,MAAM;AAAA,IACxB,OAAO;AAEL,YAAM,SAAS,SAAS,MAAM,SAAS,MAAM,CAAC;AAC9C,qBAAe;AAAA,IACjB;AAAA,EACF;AACF;AAQA,MAAM,gCAAgC,OACpC,UACA,aACA,WAAoC;AAEpC,QAAM,WAAW,MAAO,cAAc,OAAQ;AAC9C,MAAI,UAAU,GAAG;AACf,UAAM,eAAe,UAAU,SAAS,MAAM;AAAA,EAChD;AACF;AA6BA,MAAM,iBAAiB,CAAC,WAA8C;AAEpE,MAAI,OAAO,MAAM,CAAA,MAAK,MAAM,CAAC,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,QAAM,OAAO,YAAY,QAAQ,GAAG,GAAG;AACvC,QAAM,OAAO,gBAAgB,QAAQ,KAAK,CAAC;AAC3C,QAAM,MAAM,gBAAgB,QAAQ,KAAK,CAAC;AAC1C,QAAM,MAAM,gBAAgB,QAAQ,KAAK,CAAC;AAC1C,QAAM,OAAO,gBAAgB,QAAQ,KAAK,EAAE;AAC5C,QAAM,QAAQ,IAAI,KAAK,gBAAgB,QAAQ,KAAK,EAAE,IAAI,GAAI;AAC9D,QAAM,WAAW,gBAAgB,QAAQ,KAAK,CAAC;AAC/C,QAAM,WAAW,YAAY,QAAQ,KAAK,CAAC;AAC3C,QAAM,QAAQ,YAAY,QAAQ,KAAK,CAAC;AACxC,QAAM,QAAQ,YAAY,QAAQ,KAAK,EAAE;AACzC,QAAM,QAAQ,YAAY,QAAQ,KAAK,EAAE;AACzC,QAAM,SAAS,YAAY,QAAQ,KAAK,GAAG;AAG3C,MAAI,UAAU,SAAS;AACrB,UAAM,IAAI,MAAM,8BAA8B,KAAK,GAAG;AAAA,EACxD;AAGA,MAAI,gBAAgB;AACpB,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,QAAI,KAAK,OAAO,IAAI,KAAK;AACvB,uBAAiB;AAAA,IACnB,OAAO;AACL,uBAAiB,OAAO,CAAC;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI,kBAAkB,UAAU;AAC9B,UAAM,IAAI,MAAM,8BAA8B,QAAQ,SAAS,aAAa,EAAE;AAAA,EAChF;AAGA,MAAI,OAAO,SAAS,GAAG,MAAM,IAAI,IAAI,KAAK;AAC1C,MAAI,KAAK,SAAS,GAAG,GAAG;AACtB,WAAO,KAAK,MAAM,GAAG,EAAE;AAAA,EACzB;AAGA,QAAM,OAAO,aAAa,MAAM,cAAc;AAE9C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,SAAS,IAAI,SAAA;AAAA,IACpB,OAAO,SAAS,IAAI,SAAA;AAAA,IACpB;AAAA,IACA,UAAU;AAAA,EAAA;AAEd;AAKA,MAAM,8BAA8B,CAClC,UACA,WACmC;AACnC,QAAM,SAA8B,CAAA;AACpC,QAAM,WAAW,SAAS,OAAO,aAAa,EAAA;AAC9C,SAAO;AAAA,IACL,MAAM,YAAY;AAChB,cAAQ,eAAA;AACR,UAAI,OAAO,SAAS,GAAG;AACrB,eAAO,EAAE,OAAO,OAAO,MAAA,GAAU,MAAM,MAAA;AAAA,MACzC;AACA,aAAO,SAAS,KAAA;AAAA,IAClB;AAAA,IACA,QAAQ,OAAO,UAA4B;AACzC,UAAI,UAAU,QAAW;AACvB,eAAO,QAAQ,KAAK;AAAA,MACtB;AACA,aAAO,EAAE,OAAO,QAAW,MAAM,MAAA;AAAA,IACnC;AAAA,EAAA;AAEJ;AASA,MAAM,6BAA6B,CACjC,UACA,MACA,QACA,gBACa;AACb,QAAM,YAAY,mBAAkB;AAClC,QAAI,iBAAiB;AAErB,WAAO,iBAAiB,GAAG;AACzB,cAAQ,eAAA;AAER,YAAM,EAAE,OAAO,KAAA,IAAS,MAAM,SAAS,KAAA;AACvC,UAAI,MAAM;AACR,cAAM,IAAI,MAAM,sCAAsC,IAAI,qBAAqB,cAAc,QAAQ;AAAA,MACvG;AAEA,YAAM,QAAQ,UAAU,KAAK;AAC7B,UAAI,MAAM,UAAU,gBAAgB;AAClC,0BAAkB,MAAM;AACxB,cAAM;AAAA,MACR,OAAO;AAEL,cAAM,SAAS,MAAM,SAAS,GAAG,cAAc;AAC/C,cAAM,SAAS,MAAM,SAAS,cAAc;AAC5C,yBAAiB;AAGjB,cAAM,SAAS,SAAS,MAAM;AAC9B,cAAM;AACN;AAAA,MACF;AAAA,IACF;AAGA,UAAM,8BAA8B,UAAU,MAAM,MAAM;AAG1D,gBAAY,WAAW;AAAA,EACzB;AAEA,SAAO,SAAS,KAAK,UAAA,GAAa,EAAE,QAAQ;AAC9C;AAWO,MAAM,qBAAqB,iBAChC,UACA,iBACA,QAAyE;AAEzE,QAAM,KAAK,mBAAmB;AAG9B,MAAI;AACJ,UAAQ,IAAA;AAAA,IACN,KAAK;AACH,YAAM,SAAS,aAAA;AACf,eAAS,KAAK,MAAM;AACpB,oBAAc;AACd;AAAA,IACF,KAAK;AAAA,IACL;AACE,oBAAc;AACd;AAAA,EAAA;AAIJ,QAAM,WAAW,4BAA4B,aAAa,MAAM;AAGhE,MAAI;AAGJ,SAAO,MAAM;AACX,YAAQ,eAAA;AAGR,QAAI,QAAQ,SAAS,UAAU,CAAC,OAAO,UAAU;AAI/C,YAAM,eAAe,UAAU,OAAO,MAAM,MAAM;AAElD,YAAM,8BAA8B,UAAU,OAAO,MAAM,MAAM;AAGjE,aAAO,WAAW;AAAA,IACpB;AAGA,QAAI;AACJ,QAAI;AACF,qBAAe,MAAM,eAAe,UAAU,KAAK,MAAM;AAAA,IAC3D,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,0BAA0B,GAAG;AAChF,cAAM,IAAI,MAAM,uCAAuC;AAAA,MACzD;AACA,YAAM;AAAA,IACR;AAEA,QAAI,iBAAiB,QAAW;AAC9B;AAAA,IACF;AAGA,aAAS,eAAe,YAAY;AACpC,QAAI,CAAC,QAAQ;AAEX,YAAM,cAAc,MAAM,eAAe,UAAU,KAAK,MAAM;AAC9D,UAAI,gBAAgB,UAAa,YAAY,MAAM,CAAA,MAAK,MAAM,CAAC,GAAG;AAChE;AAAA,MACF;AACA,YAAM,IAAI,MAAM,+CAA+C;AAAA,IACjE;AAEA,QAAI,OAAO,SAAS,aAAa;AAE/B,YAAM;AAAA,QACJ,MAAM;AAAA,QACN,MAAM,OAAO;AAAA,QACb,MAAM,OAAO;AAAA,QACb,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,OAAO,OAAO;AAAA,QACd,OAAO,OAAO;AAAA,QACd,MAAM,OAAO;AAAA,MAAA;AAAA,IAEjB,OAAO;AAEL,YAAM,gBAAgB;AAGtB,YAAM;AAAA,QACJ,MAAM;AAAA,QACN,MAAM,cAAc;AAAA,QACpB,MAAM,cAAc;AAAA,QACpB,KAAK,cAAc;AAAA,QACnB,KAAK,cAAc;AAAA,QACnB,OAAO,cAAc;AAAA,QACrB,OAAO,cAAc;AAAA,QACrB,MAAM,cAAc;AAAA,QACpB,YAAY,OAAO,SAAc;AAE/B,cAAI,cAAc,UAAU;AAC1B,kBAAM,IAAI,MAAM,oFAAoF;AAAA,UACtG;AAEA,kBAAQ,MAAA;AAAA;AAAA,YAEN,KAAK,UAAU;AAEb,oBAAM,aAAa,MAAM,eAAe,UAAU,cAAc,MAAM,MAAM;AAC5E,kBAAI,eAAe,QAAW;AAC5B,sBAAM,IAAI,MAAM,wDAAwD,cAAc,IAAI,EAAE;AAAA,cAC9F;AAEA,oBAAM,8BAA8B,UAAU,cAAc,MAAM,MAAM;AACxE,4BAAc,WAAW;AACzB,qBAAO,WAAW,SAAS,MAAM;AAAA,YACnC;AAAA;AAAA,YAEA,KAAK,UAAU;AAEb,oBAAM,aAAa,MAAM,eAAe,UAAU,cAAc,MAAM,MAAM;AAC5E,kBAAI,eAAe,QAAW;AAC5B,sBAAM,IAAI,MAAM,wDAAwD,cAAc,IAAI,EAAE;AAAA,cAC9F;AAEA,oBAAM,8BAA8B,UAAU,cAAc,MAAM,MAAM;AACxE,4BAAc,WAAW;AACzB,qBAAO;AAAA,YACT;AAAA;AAAA,YAEA,KAAK,YAAY;AAEf,oBAAMA,YAAW,2BAA2B,UAAU,cAAc,MAAM,QAAQ,aAAa;AAC/F,qBAAOA;AAAAA,YACT;AAAA,YACA;AACE,oBAAM,IAAI,MAAM,6BAA6B,IAAI,EAAE;AAAA,UAAA;AAAA,QAEzD;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AACF;"}
|
package/dist/packer.d.ts
CHANGED
@@ -1,11 +1,11 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version: 1.
|
3
|
+
* version: 1.1.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
7
7
|
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
-
* git.commit.hash:
|
8
|
+
* git.commit.hash: 6d4ff13b538b16545ccc55b2e74f8e5f73999a34
|
9
9
|
*/
|
10
10
|
|
11
11
|
import { Readable } from 'stream';
|
package/dist/types.d.ts
CHANGED
@@ -1,11 +1,11 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version: 1.
|
3
|
+
* version: 1.1.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
7
7
|
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
-
* git.commit.hash:
|
8
|
+
* git.commit.hash: 6d4ff13b538b16545ccc55b2e74f8e5f73999a34
|
9
9
|
*/
|
10
10
|
|
11
11
|
import { Readable } from 'stream';
|
package/dist/utils.d.ts
CHANGED
@@ -1,11 +1,11 @@
|
|
1
1
|
/*!
|
2
2
|
* name: tar-vern
|
3
|
-
* version: 1.
|
3
|
+
* version: 1.1.0
|
4
4
|
* description: Tape archiver library for Typescript
|
5
5
|
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
6
6
|
* license: MIT
|
7
7
|
* repository.url: https://github.com/kekyo/tar-vern.git
|
8
|
-
* git.commit.hash:
|
8
|
+
* git.commit.hash: 6d4ff13b538b16545ccc55b2e74f8e5f73999a34
|
9
9
|
*/
|
10
10
|
|
11
11
|
import { Readable } from 'stream';
|
@@ -76,12 +76,12 @@ export declare const storeReaderToFile: (reader: Readable, path: string, signal?
|
|
76
76
|
/**
|
77
77
|
* Create an async generator that yields EntryItem objects from filesystem paths
|
78
78
|
* @param baseDir - Base directory path for resolving relative paths
|
79
|
-
* @param relativePaths - Array of relative paths to include in the tar archive
|
79
|
+
* @param relativePaths - Array of relative paths to include in the tar archive (optional)
|
80
80
|
* @param reflectStat - Whether to reflect file stats (Default: 'exceptName')
|
81
81
|
* @param signal - Optional abort signal to cancel the operation
|
82
82
|
* @returns Async generator that yields EntryItem objects
|
83
83
|
*/
|
84
|
-
export declare const createEntryItemGenerator: (baseDir: string, relativePaths
|
84
|
+
export declare const createEntryItemGenerator: (baseDir: string, relativePaths?: string[], reflectStat?: ReflectStats, signal?: AbortSignal) => AsyncGenerator<EntryItem, void, unknown>;
|
85
85
|
/**
|
86
86
|
* Extract entries from a tar extractor to a directory on the filesystem
|
87
87
|
* @param iterator - Async generator of extracted entry items
|
package/dist/utils.d.ts.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAGlC,OAAO,EAAE,iBAAiB,EAAE,6BAA6B,EAAE,QAAQ,EAAE,aAAa,EAAE,YAAY,EAAE,0BAA0B,EAAE,SAAS,EAAE,kBAAkB,EAAqB,MAAM,SAAS,CAAC;AAGhM,eAAO,MAAM,QAAQ,MAAM,CAAC;AAC5B,eAAO,MAAM,UAAU,MAAM,CAAC;AAa9B;;;;GAIG;AACH,eAAO,MAAM,SAAS,GAAI,MAAM,MAAM,GAAG,MAAM,4BAE9C,CAAA;AAID;;;;;;;;GAQG;AACH,eAAO,MAAM,mBAAmB,GAC9B,MAAM,MAAM,EACZ,cAAc,YAAY,EAC1B,UAAU,0BAA0B,EACpC,SAAS,WAAW,KACnB,OAAO,CAAC,aAAa,CA4BvB,CAAC;AAEF;;;;;;;GAOG;AACH,eAAO,MAAM,cAAc,GACzB,MAAM,MAAM,EACZ,SAAS,MAAM,GAAG,MAAM,EACxB,UAAU,iBAAiB,EAC3B,SAAS,WAAW,KACnB,OAAO,CAAC,QAAQ,CAiBlB,CAAC;AAEF;;;;;;;GAOG;AACH,eAAO,MAAM,sBAAsB,GACjC,MAAM,MAAM,EACZ,UAAU,QAAQ,EAClB,UAAU,6BAA6B,EACvC,SAAS,WAAW,KACnB,OAAO,CAAC,QAAQ,CA8ClB,CAAC;AAEF;;;;;;;GAOG;AACH,eAAO,MAAM,uBAAuB,GAClC,MAAM,MAAM,EACZ,WAAW,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,EAChD,UAAU,6BAA6B,EACvC,SAAS,WAAW,KACnB,OAAO,CAAC,QAAQ,CA8ClB,CAAC;AAEF;;;;;;;;GAQG;AACH,eAAO,MAAM,kBAAkB,GAC7B,MAAM,MAAM,EACZ,UAAU,MAAM,EAChB,cAAc,YAAY,EAC1B,UAAU,iBAAiB,EAC3B,SAAS,WAAW,KACnB,OAAO,CAAC,QAAQ,CAqBlB,CAAC;AAIF;;;;;;GAMG;AACH,eAAO,MAAM,iBAAiB,GAAU,QAAQ,QAAQ,EAAE,MAAM,MAAM,EAAE,SAAS,WAAW,kBAG3F,CAAC;
|
1
|
+
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAGlC,OAAO,EAAE,iBAAiB,EAAE,6BAA6B,EAAE,QAAQ,EAAE,aAAa,EAAE,YAAY,EAAE,0BAA0B,EAAE,SAAS,EAAE,kBAAkB,EAAqB,MAAM,SAAS,CAAC;AAGhM,eAAO,MAAM,QAAQ,MAAM,CAAC;AAC5B,eAAO,MAAM,UAAU,MAAM,CAAC;AAa9B;;;;GAIG;AACH,eAAO,MAAM,SAAS,GAAI,MAAM,MAAM,GAAG,MAAM,4BAE9C,CAAA;AAID;;;;;;;;GAQG;AACH,eAAO,MAAM,mBAAmB,GAC9B,MAAM,MAAM,EACZ,cAAc,YAAY,EAC1B,UAAU,0BAA0B,EACpC,SAAS,WAAW,KACnB,OAAO,CAAC,aAAa,CA4BvB,CAAC;AAEF;;;;;;;GAOG;AACH,eAAO,MAAM,cAAc,GACzB,MAAM,MAAM,EACZ,SAAS,MAAM,GAAG,MAAM,EACxB,UAAU,iBAAiB,EAC3B,SAAS,WAAW,KACnB,OAAO,CAAC,QAAQ,CAiBlB,CAAC;AAEF;;;;;;;GAOG;AACH,eAAO,MAAM,sBAAsB,GACjC,MAAM,MAAM,EACZ,UAAU,QAAQ,EAClB,UAAU,6BAA6B,EACvC,SAAS,WAAW,KACnB,OAAO,CAAC,QAAQ,CA8ClB,CAAC;AAEF;;;;;;;GAOG;AACH,eAAO,MAAM,uBAAuB,GAClC,MAAM,MAAM,EACZ,WAAW,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,EAChD,UAAU,6BAA6B,EACvC,SAAS,WAAW,KACnB,OAAO,CAAC,QAAQ,CA8ClB,CAAC;AAEF;;;;;;;;GAQG;AACH,eAAO,MAAM,kBAAkB,GAC7B,MAAM,MAAM,EACZ,UAAU,MAAM,EAChB,cAAc,YAAY,EAC1B,UAAU,iBAAiB,EAC3B,SAAS,WAAW,KACnB,OAAO,CAAC,QAAQ,CAqBlB,CAAC;AAIF;;;;;;GAMG;AACH,eAAO,MAAM,iBAAiB,GAAU,QAAQ,QAAQ,EAAE,MAAM,MAAM,EAAE,SAAS,WAAW,kBAG3F,CAAC;AAsDF;;;;;;;GAOG;AACH,eAAO,MAAM,wBAAwB,GACnC,SAAS,MAAM,EACf,gBAAgB,MAAM,EAAE,EACxB,cAAc,YAAY,EAC1B,SAAS,WAAW,KACnB,cAAc,CAAC,SAAS,EAAE,IAAI,EAAE,OAAO,CA8BzC,CAAC;AAEF;;;;;;GAMG;AACH,eAAO,MAAM,SAAS,GACpB,UAAU,cAAc,CAAC,kBAAkB,EAAE,IAAI,EAAE,OAAO,CAAC,EAC3D,UAAU,MAAM,EAChB,SAAS,WAAW,KACnB,OAAO,CAAC,IAAI,CA6Bd,CAAC"}
|
package/package.json
CHANGED
@@ -1,20 +1,20 @@
|
|
1
1
|
{
|
2
2
|
"git": {
|
3
3
|
"tags": [
|
4
|
-
"1.
|
4
|
+
"1.1.0"
|
5
5
|
],
|
6
6
|
"branches": [
|
7
7
|
"main"
|
8
8
|
],
|
9
|
-
"version": "1.
|
9
|
+
"version": "1.1.0",
|
10
10
|
"commit": {
|
11
|
-
"hash": "
|
12
|
-
"shortHash": "
|
13
|
-
"date": "2025-07-
|
11
|
+
"hash": "6d4ff13b538b16545ccc55b2e74f8e5f73999a34",
|
12
|
+
"shortHash": "6d4ff13",
|
13
|
+
"date": "2025-07-29T18:35:55+09:00Z",
|
14
14
|
"message": "Merge branch 'develop'"
|
15
15
|
}
|
16
16
|
},
|
17
|
-
"version": "1.
|
17
|
+
"version": "1.1.0",
|
18
18
|
"name": "tar-vern",
|
19
19
|
"description": "Tape archiver library for Typescript",
|
20
20
|
"keywords": [
|