bun-types 1.3.6-canary.20260109T140758 → 1.3.6-canary.20260111T140550
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bun.d.ts +290 -0
- package/docs/runtime/archive.mdx +444 -0
- package/docs/runtime/ffi.mdx +2 -0
- package/package.json +1 -1
package/bun.d.ts
CHANGED
|
@@ -6965,6 +6965,296 @@ declare module "bun" {
|
|
|
6965
6965
|
match(str: string): boolean;
|
|
6966
6966
|
}
|
|
6967
6967
|
|
|
6968
|
+
/**
|
|
6969
|
+
* Input data for creating an archive. Can be:
|
|
6970
|
+
* - An object mapping paths to file contents (string, Blob, TypedArray, or ArrayBuffer)
|
|
6971
|
+
* - A Blob containing existing archive data
|
|
6972
|
+
* - A TypedArray or ArrayBuffer containing existing archive data
|
|
6973
|
+
*/
|
|
6974
|
+
type ArchiveInput = Record<string, BlobPart> | Blob | ArrayBufferView | ArrayBufferLike;
|
|
6975
|
+
|
|
6976
|
+
/**
|
|
6977
|
+
* Compression format for archive output.
|
|
6978
|
+
* - `"gzip"` - Compress with gzip
|
|
6979
|
+
* - `true` - Same as `"gzip"`
|
|
6980
|
+
* - `false` - Explicitly disable compression (no compression)
|
|
6981
|
+
* - `undefined` - No compression (default behavior when omitted)
|
|
6982
|
+
*
|
|
6983
|
+
* Both `false` and `undefined` result in no compression; `false` can be used
|
|
6984
|
+
* to explicitly indicate "no compression" in code where the intent should be clear.
|
|
6985
|
+
*/
|
|
6986
|
+
type ArchiveCompression = "gzip" | boolean;
|
|
6987
|
+
|
|
6988
|
+
/**
|
|
6989
|
+
* Options for extracting archive contents.
|
|
6990
|
+
*/
|
|
6991
|
+
interface ArchiveExtractOptions {
|
|
6992
|
+
/**
|
|
6993
|
+
* Glob pattern(s) to filter which entries are extracted.
|
|
6994
|
+
* Uses the same syntax as {@link Bun.Glob}, including support for wildcards (`*`, `**`),
|
|
6995
|
+
* character classes (`[abc]`), alternation (`{a,b}`), and negation (`!pattern`).
|
|
6996
|
+
*
|
|
6997
|
+
* Patterns are matched against archive entry paths normalized to use forward slashes (`/`),
|
|
6998
|
+
* regardless of the host operating system. Always write patterns using `/` as the separator.
|
|
6999
|
+
*
|
|
7000
|
+
* - Positive patterns: Only entries matching at least one pattern will be extracted.
|
|
7001
|
+
* - Negative patterns (prefixed with `!`): Entries matching these patterns will be excluded.
|
|
7002
|
+
* Negative patterns are applied after positive patterns.
|
|
7003
|
+
*
|
|
7004
|
+
* If not specified, all entries are extracted.
|
|
7005
|
+
*
|
|
7006
|
+
* @example
|
|
7007
|
+
* ```ts
|
|
7008
|
+
* // Extract only TypeScript files
|
|
7009
|
+
* await archive.extract("./out", { glob: "**" + "/*.ts" });
|
|
7010
|
+
*
|
|
7011
|
+
* // Extract files from multiple directories
|
|
7012
|
+
* await archive.extract("./out", { glob: ["src/**", "lib/**"] });
|
|
7013
|
+
*
|
|
7014
|
+
* // Exclude node_modules using negative pattern
|
|
7015
|
+
* await archive.extract("./out", { glob: ["**", "!node_modules/**"] });
|
|
7016
|
+
*
|
|
7017
|
+
* // Extract source files but exclude tests
|
|
7018
|
+
* await archive.extract("./out", { glob: ["src/**", "!**" + "/*.test.ts"] });
|
|
7019
|
+
* ```
|
|
7020
|
+
*/
|
|
7021
|
+
glob?: string | readonly string[];
|
|
7022
|
+
}
|
|
7023
|
+
|
|
7024
|
+
/**
|
|
7025
|
+
* A class for creating and extracting tar archives with optional gzip compression.
|
|
7026
|
+
*
|
|
7027
|
+
* `Bun.Archive` provides a fast, native implementation for working with tar archives.
|
|
7028
|
+
* It supports creating archives from in-memory data or extracting existing archives
|
|
7029
|
+
* to disk or memory.
|
|
7030
|
+
*
|
|
7031
|
+
* @example
|
|
7032
|
+
* **Create an archive from an object:**
|
|
7033
|
+
* ```ts
|
|
7034
|
+
* const archive = Bun.Archive.from({
|
|
7035
|
+
* "hello.txt": "Hello, World!",
|
|
7036
|
+
* "data.json": JSON.stringify({ foo: "bar" }),
|
|
7037
|
+
* "binary.bin": new Uint8Array([1, 2, 3, 4]),
|
|
7038
|
+
* });
|
|
7039
|
+
* ```
|
|
7040
|
+
*
|
|
7041
|
+
* @example
|
|
7042
|
+
* **Extract an archive to disk:**
|
|
7043
|
+
* ```ts
|
|
7044
|
+
* const archive = Bun.Archive.from(tarballBytes);
|
|
7045
|
+
* const entryCount = await archive.extract("./output");
|
|
7046
|
+
* console.log(`Extracted ${entryCount} entries`);
|
|
7047
|
+
* ```
|
|
7048
|
+
*
|
|
7049
|
+
* @example
|
|
7050
|
+
* **Get archive contents as a Map of File objects:**
|
|
7051
|
+
* ```ts
|
|
7052
|
+
* const archive = Bun.Archive.from(tarballBytes);
|
|
7053
|
+
* const entries = await archive.files();
|
|
7054
|
+
* for (const [path, file] of entries) {
|
|
7055
|
+
* console.log(path, await file.text());
|
|
7056
|
+
* }
|
|
7057
|
+
* ```
|
|
7058
|
+
*
|
|
7059
|
+
* @example
|
|
7060
|
+
* **Write a gzipped archive directly to disk:**
|
|
7061
|
+
* ```ts
|
|
7062
|
+
* await Bun.Archive.write("bundle.tar.gz", {
|
|
7063
|
+
* "src/index.ts": sourceCode,
|
|
7064
|
+
* "package.json": packageJson,
|
|
7065
|
+
* }, "gzip");
|
|
7066
|
+
* ```
|
|
7067
|
+
*/
|
|
7068
|
+
export class Archive {
|
|
7069
|
+
/**
|
|
7070
|
+
* Create an `Archive` instance from input data.
|
|
7071
|
+
*
|
|
7072
|
+
* @param data - The input data for the archive:
|
|
7073
|
+
* - **Object**: Creates a new tarball with the object's keys as file paths and values as file contents
|
|
7074
|
+
* - **Blob/TypedArray/ArrayBuffer**: Wraps existing archive data (tar or tar.gz)
|
|
7075
|
+
*
|
|
7076
|
+
* @returns A new `Archive` instance
|
|
7077
|
+
*
|
|
7078
|
+
* @example
|
|
7079
|
+
* **From an object (creates new tarball):**
|
|
7080
|
+
* ```ts
|
|
7081
|
+
* const archive = Bun.Archive.from({
|
|
7082
|
+
* "hello.txt": "Hello, World!",
|
|
7083
|
+
* "nested/file.txt": "Nested content",
|
|
7084
|
+
* });
|
|
7085
|
+
* ```
|
|
7086
|
+
*
|
|
7087
|
+
* @example
|
|
7088
|
+
* **From existing archive data:**
|
|
7089
|
+
* ```ts
|
|
7090
|
+
* const response = await fetch("https://example.com/package.tar.gz");
|
|
7091
|
+
* const archive = Bun.Archive.from(await response.blob());
|
|
7092
|
+
* ```
|
|
7093
|
+
*/
|
|
7094
|
+
static from(data: ArchiveInput): Archive;
|
|
7095
|
+
|
|
7096
|
+
/**
|
|
7097
|
+
* Create and write an archive directly to disk in one operation.
|
|
7098
|
+
*
|
|
7099
|
+
* This is more efficient than creating an archive and then writing it separately,
|
|
7100
|
+
* as it streams the data directly to disk.
|
|
7101
|
+
*
|
|
7102
|
+
* @param path - The file path to write the archive to
|
|
7103
|
+
* @param data - The input data for the archive (same as `Archive.from()`)
|
|
7104
|
+
* @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none
|
|
7105
|
+
*
|
|
7106
|
+
* @returns A promise that resolves when the write is complete
|
|
7107
|
+
*
|
|
7108
|
+
* @example
|
|
7109
|
+
* **Write uncompressed tarball:**
|
|
7110
|
+
* ```ts
|
|
7111
|
+
* await Bun.Archive.write("output.tar", {
|
|
7112
|
+
* "file1.txt": "content1",
|
|
7113
|
+
* "file2.txt": "content2",
|
|
7114
|
+
* });
|
|
7115
|
+
* ```
|
|
7116
|
+
*
|
|
7117
|
+
* @example
|
|
7118
|
+
* **Write gzipped tarball:**
|
|
7119
|
+
* ```ts
|
|
7120
|
+
* await Bun.Archive.write("output.tar.gz", files, "gzip");
|
|
7121
|
+
* ```
|
|
7122
|
+
*/
|
|
7123
|
+
static write(path: string, data: ArchiveInput | Archive, compress?: ArchiveCompression): Promise<void>;
|
|
7124
|
+
|
|
7125
|
+
/**
|
|
7126
|
+
* Extract the archive contents to a directory on disk.
|
|
7127
|
+
*
|
|
7128
|
+
* Creates the target directory and any necessary parent directories if they don't exist.
|
|
7129
|
+
* Existing files will be overwritten.
|
|
7130
|
+
*
|
|
7131
|
+
* @param path - The directory path to extract to
|
|
7132
|
+
* @param options - Optional extraction options
|
|
7133
|
+
* @param options.glob - Glob pattern(s) to filter entries (positive patterns include, negative patterns starting with `!` exclude)
|
|
7134
|
+
* @returns A promise that resolves with the number of entries extracted (files, directories, and symlinks)
|
|
7135
|
+
*
|
|
7136
|
+
* @example
|
|
7137
|
+
* **Extract all entries:**
|
|
7138
|
+
* ```ts
|
|
7139
|
+
* const archive = Bun.Archive.from(tarballBytes);
|
|
7140
|
+
* const count = await archive.extract("./extracted");
|
|
7141
|
+
* console.log(`Extracted ${count} entries`);
|
|
7142
|
+
* ```
|
|
7143
|
+
*
|
|
7144
|
+
* @example
|
|
7145
|
+
* **Extract only TypeScript files:**
|
|
7146
|
+
* ```ts
|
|
7147
|
+
* const count = await archive.extract("./src", { glob: "**" + "/*.ts" });
|
|
7148
|
+
* ```
|
|
7149
|
+
*
|
|
7150
|
+
* @example
|
|
7151
|
+
* **Extract everything except tests:**
|
|
7152
|
+
* ```ts
|
|
7153
|
+
* const count = await archive.extract("./dist", { glob: ["**", "!**" + "/*.test.*"] });
|
|
7154
|
+
* ```
|
|
7155
|
+
*
|
|
7156
|
+
* @example
|
|
7157
|
+
* **Extract source files but exclude tests:**
|
|
7158
|
+
* ```ts
|
|
7159
|
+
* const count = await archive.extract("./output", {
|
|
7160
|
+
* glob: ["src/**", "lib/**", "!**" + "/*.test.ts", "!**" + "/__tests__/**"]
|
|
7161
|
+
* });
|
|
7162
|
+
* ```
|
|
7163
|
+
*/
|
|
7164
|
+
extract(path: string, options?: ArchiveExtractOptions): Promise<number>;
|
|
7165
|
+
|
|
7166
|
+
/**
|
|
7167
|
+
* Get the archive contents as a `Blob`.
|
|
7168
|
+
*
|
|
7169
|
+
* @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none
|
|
7170
|
+
* @returns A promise that resolves with the archive data as a Blob
|
|
7171
|
+
*
|
|
7172
|
+
* @example
|
|
7173
|
+
* **Get uncompressed tarball:**
|
|
7174
|
+
* ```ts
|
|
7175
|
+
* const blob = await archive.blob();
|
|
7176
|
+
* ```
|
|
7177
|
+
*
|
|
7178
|
+
* @example
|
|
7179
|
+
* **Get gzipped tarball:**
|
|
7180
|
+
* ```ts
|
|
7181
|
+
* const gzippedBlob = await archive.blob("gzip");
|
|
7182
|
+
* ```
|
|
7183
|
+
*/
|
|
7184
|
+
blob(compress?: ArchiveCompression): Promise<Blob>;
|
|
7185
|
+
|
|
7186
|
+
/**
|
|
7187
|
+
* Get the archive contents as a `Uint8Array`.
|
|
7188
|
+
*
|
|
7189
|
+
* @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none
|
|
7190
|
+
* @returns A promise that resolves with the archive data as a Uint8Array
|
|
7191
|
+
*
|
|
7192
|
+
* @example
|
|
7193
|
+
* **Get uncompressed tarball bytes:**
|
|
7194
|
+
* ```ts
|
|
7195
|
+
* const bytes = await archive.bytes();
|
|
7196
|
+
* ```
|
|
7197
|
+
*
|
|
7198
|
+
* @example
|
|
7199
|
+
* **Get gzipped tarball bytes:**
|
|
7200
|
+
* ```ts
|
|
7201
|
+
* const gzippedBytes = await archive.bytes("gzip");
|
|
7202
|
+
* ```
|
|
7203
|
+
*/
|
|
7204
|
+
bytes(compress?: ArchiveCompression): Promise<Uint8Array<ArrayBuffer>>;
|
|
7205
|
+
|
|
7206
|
+
/**
|
|
7207
|
+
* Get the archive contents as a `Map` of `File` objects.
|
|
7208
|
+
*
|
|
7209
|
+
* Each file in the archive is returned as a `File` object with:
|
|
7210
|
+
* - `name`: The file path within the archive
|
|
7211
|
+
* - `lastModified`: The file's modification time from the archive
|
|
7212
|
+
* - Standard Blob methods (`text()`, `arrayBuffer()`, `stream()`, etc.)
|
|
7213
|
+
*
|
|
7214
|
+
* Only regular files are included; directories are not returned.
|
|
7215
|
+
* File contents are loaded into memory, so for large archives consider using `extract()` instead.
|
|
7216
|
+
*
|
|
7217
|
+
* @param glob - Optional glob pattern(s) to filter files. Supports the same syntax as {@link Bun.Glob},
|
|
7218
|
+
* including negation patterns (prefixed with `!`). Patterns are matched against paths normalized
|
|
7219
|
+
* to use forward slashes (`/`).
|
|
7220
|
+
* @returns A promise that resolves with a Map where keys are file paths (always using forward slashes `/` as separators) and values are File objects
|
|
7221
|
+
*
|
|
7222
|
+
* @example
|
|
7223
|
+
* **Get all files:**
|
|
7224
|
+
* ```ts
|
|
7225
|
+
* const entries = await archive.files();
|
|
7226
|
+
* for (const [path, file] of entries) {
|
|
7227
|
+
* console.log(`${path}: ${file.size} bytes`);
|
|
7228
|
+
* }
|
|
7229
|
+
* ```
|
|
7230
|
+
*
|
|
7231
|
+
* @example
|
|
7232
|
+
* **Filter by glob pattern:**
|
|
7233
|
+
* ```ts
|
|
7234
|
+
* const tsFiles = await archive.files("**" + "/*.ts");
|
|
7235
|
+
* const srcFiles = await archive.files(["src/**", "lib/**"]);
|
|
7236
|
+
* ```
|
|
7237
|
+
*
|
|
7238
|
+
* @example
|
|
7239
|
+
* **Exclude files with negative patterns:**
|
|
7240
|
+
* ```ts
|
|
7241
|
+
* // Get all source files except tests
|
|
7242
|
+
* const srcFiles = await archive.files(["src/**", "!**" + "/*.test.ts"]);
|
|
7243
|
+
* ```
|
|
7244
|
+
*
|
|
7245
|
+
* @example
|
|
7246
|
+
* **Read file contents:**
|
|
7247
|
+
* ```ts
|
|
7248
|
+
* const entries = await archive.files();
|
|
7249
|
+
* const readme = entries.get("README.md");
|
|
7250
|
+
* if (readme) {
|
|
7251
|
+
* console.log(await readme.text());
|
|
7252
|
+
* }
|
|
7253
|
+
* ```
|
|
7254
|
+
*/
|
|
7255
|
+
files(glob?: string | readonly string[]): Promise<Map<string, File>>;
|
|
7256
|
+
}
|
|
7257
|
+
|
|
6968
7258
|
/**
|
|
6969
7259
|
* Generate a UUIDv7, which is a sequential ID based on the current timestamp with a random component.
|
|
6970
7260
|
*
|
|
@@ -0,0 +1,444 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Archive
|
|
3
|
+
description: Create and extract tar archives with Bun's fast native implementation
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
Bun provides a fast, native implementation for working with tar archives through `Bun.Archive`. It supports creating archives from in-memory data, extracting archives to disk, and reading archive contents without extraction.
|
|
7
|
+
|
|
8
|
+
## Quickstart
|
|
9
|
+
|
|
10
|
+
**Create an archive from files:**
|
|
11
|
+
|
|
12
|
+
```ts
|
|
13
|
+
const archive = Bun.Archive.from({
|
|
14
|
+
"hello.txt": "Hello, World!",
|
|
15
|
+
"data.json": JSON.stringify({ foo: "bar" }),
|
|
16
|
+
"nested/file.txt": "Nested content",
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
// Write to disk
|
|
20
|
+
await Bun.Archive.write("bundle.tar", archive);
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
**Extract an archive:**
|
|
24
|
+
|
|
25
|
+
```ts
|
|
26
|
+
const tarball = await Bun.file("package.tar.gz").bytes();
|
|
27
|
+
const archive = Bun.Archive.from(tarball);
|
|
28
|
+
const entryCount = await archive.extract("./output");
|
|
29
|
+
console.log(`Extracted ${entryCount} entries`);
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
**Read archive contents without extracting:**
|
|
33
|
+
|
|
34
|
+
```ts
|
|
35
|
+
const tarball = await Bun.file("package.tar.gz").bytes();
|
|
36
|
+
const archive = Bun.Archive.from(tarball);
|
|
37
|
+
const files = await archive.files();
|
|
38
|
+
|
|
39
|
+
for (const [path, file] of files) {
|
|
40
|
+
console.log(`${path}: ${await file.text()}`);
|
|
41
|
+
}
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Creating Archives
|
|
45
|
+
|
|
46
|
+
Use `Bun.Archive.from()` to create an archive from an object where keys are file paths and values are file contents:
|
|
47
|
+
|
|
48
|
+
```ts
|
|
49
|
+
const archive = Bun.Archive.from({
|
|
50
|
+
"README.md": "# My Project",
|
|
51
|
+
"src/index.ts": "console.log('Hello');",
|
|
52
|
+
"package.json": JSON.stringify({ name: "my-project" }),
|
|
53
|
+
});
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
File contents can be:
|
|
57
|
+
|
|
58
|
+
- **Strings** - Text content
|
|
59
|
+
- **Blobs** - Binary data
|
|
60
|
+
- **ArrayBufferViews** (e.g., `Uint8Array`) - Raw bytes
|
|
61
|
+
- **ArrayBuffers** - Raw binary data
|
|
62
|
+
|
|
63
|
+
```ts
|
|
64
|
+
const data = "binary data";
|
|
65
|
+
const arrayBuffer = new ArrayBuffer(8);
|
|
66
|
+
|
|
67
|
+
const archive = Bun.Archive.from({
|
|
68
|
+
"text.txt": "Plain text",
|
|
69
|
+
"blob.bin": new Blob([data]),
|
|
70
|
+
"bytes.bin": new Uint8Array([1, 2, 3, 4]),
|
|
71
|
+
"buffer.bin": arrayBuffer,
|
|
72
|
+
});
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
### Writing Archives to Disk
|
|
76
|
+
|
|
77
|
+
Use `Bun.Archive.write()` to create and write an archive in one operation:
|
|
78
|
+
|
|
79
|
+
```ts
|
|
80
|
+
// Write uncompressed tar
|
|
81
|
+
await Bun.Archive.write("output.tar", {
|
|
82
|
+
"file1.txt": "content1",
|
|
83
|
+
"file2.txt": "content2",
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
// Write gzipped tar
|
|
87
|
+
const files = { "src/index.ts": "console.log('Hello');" };
|
|
88
|
+
await Bun.Archive.write("output.tar.gz", files, "gzip");
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
### Getting Archive Bytes
|
|
92
|
+
|
|
93
|
+
Get the archive data as bytes or a Blob:
|
|
94
|
+
|
|
95
|
+
```ts
|
|
96
|
+
const files = { "hello.txt": "Hello, World!" };
|
|
97
|
+
const archive = Bun.Archive.from(files);
|
|
98
|
+
|
|
99
|
+
// As Uint8Array
|
|
100
|
+
const bytes = await archive.bytes();
|
|
101
|
+
|
|
102
|
+
// As Blob
|
|
103
|
+
const blob = await archive.blob();
|
|
104
|
+
|
|
105
|
+
// With gzip compression
|
|
106
|
+
const gzippedBytes = await archive.bytes("gzip");
|
|
107
|
+
const gzippedBlob = await archive.blob("gzip");
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
## Extracting Archives
|
|
111
|
+
|
|
112
|
+
### From Existing Archive Data
|
|
113
|
+
|
|
114
|
+
Create an archive from existing tar/tar.gz data:
|
|
115
|
+
|
|
116
|
+
```ts
|
|
117
|
+
// From a file
|
|
118
|
+
const tarball = await Bun.file("package.tar.gz").bytes();
|
|
119
|
+
const archiveFromFile = Bun.Archive.from(tarball);
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
```ts
|
|
123
|
+
// From a fetch response
|
|
124
|
+
const response = await fetch("https://example.com/archive.tar.gz");
|
|
125
|
+
const archiveFromFetch = Bun.Archive.from(await response.blob());
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
### Extracting to Disk
|
|
129
|
+
|
|
130
|
+
Use `.extract()` to write all files to a directory:
|
|
131
|
+
|
|
132
|
+
```ts
|
|
133
|
+
const tarball = await Bun.file("package.tar.gz").bytes();
|
|
134
|
+
const archive = Bun.Archive.from(tarball);
|
|
135
|
+
const count = await archive.extract("./extracted");
|
|
136
|
+
console.log(`Extracted ${count} entries`);
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
The target directory is created automatically if it doesn't exist. Existing files are overwritten. The returned count includes files, directories, and symlinks (on POSIX systems).
|
|
140
|
+
|
|
141
|
+
**Note**: On Windows, symbolic links in archives are always skipped during extraction. Bun does not attempt to create them regardless of privilege level. On Linux and macOS, symlinks are extracted normally.
|
|
142
|
+
|
|
143
|
+
**Security note**: Bun.Archive validates paths during extraction, rejecting absolute paths (POSIX `/`, Windows drive letters like `C:\` or `C:/`, and UNC paths like `\\server\share`). Path traversal components (`..`) are normalized away (e.g., `dir/sub/../file` becomes `dir/file`) to prevent directory escape attacks.
|
|
144
|
+
|
|
145
|
+
### Filtering Extracted Files
|
|
146
|
+
|
|
147
|
+
Use glob patterns to extract only specific files. Patterns are matched against archive entry paths normalized to use forward slashes (`/`). Positive patterns specify what to include, and negative patterns (prefixed with `!`) specify what to exclude. Negative patterns are applied after positive patterns, so **using only negative patterns will match nothing** (you must include a positive pattern like `**` first):
|
|
148
|
+
|
|
149
|
+
```ts
|
|
150
|
+
const tarball = await Bun.file("package.tar.gz").bytes();
|
|
151
|
+
const archive = Bun.Archive.from(tarball);
|
|
152
|
+
|
|
153
|
+
// Extract only TypeScript files
|
|
154
|
+
const tsCount = await archive.extract("./extracted", { glob: "**/*.ts" });
|
|
155
|
+
|
|
156
|
+
// Extract files from multiple directories
|
|
157
|
+
const multiCount = await archive.extract("./extracted", {
|
|
158
|
+
glob: ["src/**", "lib/**"],
|
|
159
|
+
});
|
|
160
|
+
```
|
|
161
|
+
|
|
162
|
+
Use negative patterns (prefixed with `!`) to exclude files. When mixing positive and negative patterns, entries must match at least one positive pattern and not match any negative pattern:
|
|
163
|
+
|
|
164
|
+
```ts
|
|
165
|
+
// Extract everything except node_modules
|
|
166
|
+
const distCount = await archive.extract("./extracted", {
|
|
167
|
+
glob: ["**", "!node_modules/**"],
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
// Extract source files but exclude tests
|
|
171
|
+
const srcCount = await archive.extract("./extracted", {
|
|
172
|
+
glob: ["src/**", "!**/*.test.ts", "!**/__tests__/**"],
|
|
173
|
+
});
|
|
174
|
+
```
|
|
175
|
+
|
|
176
|
+
## Reading Archive Contents
|
|
177
|
+
|
|
178
|
+
### Get All Files
|
|
179
|
+
|
|
180
|
+
Use `.files()` to get archive contents as a `Map` of `File` objects without extracting to disk. Unlike `extract()` which processes all entry types, `files()` returns only regular files (no directories):
|
|
181
|
+
|
|
182
|
+
```ts
|
|
183
|
+
const tarball = await Bun.file("package.tar.gz").bytes();
|
|
184
|
+
const archive = Bun.Archive.from(tarball);
|
|
185
|
+
const files = await archive.files();
|
|
186
|
+
|
|
187
|
+
for (const [path, file] of files) {
|
|
188
|
+
console.log(`${path}: ${file.size} bytes`);
|
|
189
|
+
console.log(await file.text());
|
|
190
|
+
}
|
|
191
|
+
```
|
|
192
|
+
|
|
193
|
+
Each `File` object includes:
|
|
194
|
+
|
|
195
|
+
- `name` - The file path within the archive (always uses forward slashes `/` as separators)
|
|
196
|
+
- `size` - File size in bytes
|
|
197
|
+
- `lastModified` - Modification timestamp
|
|
198
|
+
- Standard `Blob` methods: `text()`, `arrayBuffer()`, `stream()`, etc.
|
|
199
|
+
|
|
200
|
+
**Note**: `files()` loads file contents into memory. For large archives, consider using `extract()` to write directly to disk instead.
|
|
201
|
+
|
|
202
|
+
### Error Handling
|
|
203
|
+
|
|
204
|
+
Archive operations can fail due to corrupted data, I/O errors, or invalid paths. Use try/catch to handle these cases:
|
|
205
|
+
|
|
206
|
+
```ts
|
|
207
|
+
try {
|
|
208
|
+
const tarball = await Bun.file("package.tar.gz").bytes();
|
|
209
|
+
const archive = Bun.Archive.from(tarball);
|
|
210
|
+
const count = await archive.extract("./output");
|
|
211
|
+
console.log(`Extracted ${count} entries`);
|
|
212
|
+
} catch (e: unknown) {
|
|
213
|
+
if (e instanceof Error) {
|
|
214
|
+
const error = e as Error & { code?: string };
|
|
215
|
+
if (error.code === "EACCES") {
|
|
216
|
+
console.error("Permission denied");
|
|
217
|
+
} else if (error.code === "ENOSPC") {
|
|
218
|
+
console.error("Disk full");
|
|
219
|
+
} else {
|
|
220
|
+
console.error("Archive error:", error.message);
|
|
221
|
+
}
|
|
222
|
+
} else {
|
|
223
|
+
console.error("Archive error:", String(e));
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
```
|
|
227
|
+
|
|
228
|
+
Common error scenarios:
|
|
229
|
+
|
|
230
|
+
- **Corrupted/truncated archives** - `Archive.from()` loads the archive data; errors may be deferred until read/extract operations
|
|
231
|
+
- **Permission denied** - `extract()` throws if the target directory is not writable
|
|
232
|
+
- **Disk full** - `extract()` throws if there's insufficient space
|
|
233
|
+
- **Invalid paths** - Operations throw for malformed file paths
|
|
234
|
+
|
|
235
|
+
The count returned by `extract()` includes all successfully written entries (files, directories, and symlinks on POSIX systems).
|
|
236
|
+
|
|
237
|
+
**Security note**: Bun.Archive automatically validates paths during extraction. Absolute paths (POSIX `/`, Windows drive letters, UNC paths) and unsafe symlink targets are rejected. Path traversal components (`..`) are normalized away to prevent directory escape.
|
|
238
|
+
|
|
239
|
+
For additional security with untrusted archives, you can enumerate and validate paths before extraction:
|
|
240
|
+
|
|
241
|
+
```ts
|
|
242
|
+
const archive = Bun.Archive.from(untrustedData);
|
|
243
|
+
const files = await archive.files();
|
|
244
|
+
|
|
245
|
+
// Optional: Custom validation for additional checks
|
|
246
|
+
for (const [path] of files) {
|
|
247
|
+
// Example: Reject hidden files
|
|
248
|
+
if (path.startsWith(".") || path.includes("/.")) {
|
|
249
|
+
throw new Error(`Hidden file rejected: ${path}`);
|
|
250
|
+
}
|
|
251
|
+
// Example: Whitelist specific directories
|
|
252
|
+
if (!path.startsWith("src/") && !path.startsWith("lib/")) {
|
|
253
|
+
throw new Error(`Unexpected path: ${path}`);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// Extract to a controlled destination
|
|
258
|
+
await archive.extract("./safe-output");
|
|
259
|
+
```
|
|
260
|
+
|
|
261
|
+
When using `files()` with a glob pattern, an empty `Map` is returned if no files match:
|
|
262
|
+
|
|
263
|
+
```ts
|
|
264
|
+
const matches = await archive.files("*.nonexistent");
|
|
265
|
+
if (matches.size === 0) {
|
|
266
|
+
console.log("No matching files found");
|
|
267
|
+
}
|
|
268
|
+
```
|
|
269
|
+
|
|
270
|
+
### Filtering with Glob Patterns
|
|
271
|
+
|
|
272
|
+
Pass a glob pattern to filter which files are returned:
|
|
273
|
+
|
|
274
|
+
```ts
|
|
275
|
+
// Get only TypeScript files
|
|
276
|
+
const tsFiles = await archive.files("**/*.ts");
|
|
277
|
+
|
|
278
|
+
// Get files in src directory
|
|
279
|
+
const srcFiles = await archive.files("src/*");
|
|
280
|
+
|
|
281
|
+
// Get all JSON files (recursive)
|
|
282
|
+
const jsonFiles = await archive.files("**/*.json");
|
|
283
|
+
|
|
284
|
+
// Get multiple file types with array of patterns
|
|
285
|
+
const codeFiles = await archive.files(["**/*.ts", "**/*.js"]);
|
|
286
|
+
```
|
|
287
|
+
|
|
288
|
+
Supported glob patterns (subset of [Bun.Glob](/docs/api/glob) syntax):
|
|
289
|
+
|
|
290
|
+
- `*` - Match any characters except `/`
|
|
291
|
+
- `**` - Match any characters including `/`
|
|
292
|
+
- `?` - Match single character
|
|
293
|
+
- `[abc]` - Match character set
|
|
294
|
+
- `{a,b}` - Match alternatives
|
|
295
|
+
- `!pattern` - Exclude files matching pattern (negation). Must be combined with positive patterns; using only negative patterns matches nothing.
|
|
296
|
+
|
|
297
|
+
See [Bun.Glob](/docs/api/glob) for the full glob syntax including escaping and advanced patterns.
|
|
298
|
+
|
|
299
|
+
## Compression
|
|
300
|
+
|
|
301
|
+
Bun.Archive supports gzip compression for both reading and writing:
|
|
302
|
+
|
|
303
|
+
```ts
|
|
304
|
+
// Reading: automatically detects gzip
|
|
305
|
+
const gzippedTarball = await Bun.file("archive.tar.gz").bytes();
|
|
306
|
+
const archive = Bun.Archive.from(gzippedTarball);
|
|
307
|
+
|
|
308
|
+
// Writing: specify compression
|
|
309
|
+
const files = { "hello.txt": "Hello, World!" };
|
|
310
|
+
await Bun.Archive.write("output.tar.gz", files, "gzip");
|
|
311
|
+
|
|
312
|
+
// Getting bytes: specify compression
|
|
313
|
+
const gzippedBytes = await archive.bytes("gzip");
|
|
314
|
+
```
|
|
315
|
+
|
|
316
|
+
The compression argument accepts:
|
|
317
|
+
|
|
318
|
+
- `"gzip"` - Enable gzip compression
|
|
319
|
+
- `true` - Same as `"gzip"`
|
|
320
|
+
- `false` or `undefined` - No compression
|
|
321
|
+
|
|
322
|
+
## Examples
|
|
323
|
+
|
|
324
|
+
### Bundle Project Files
|
|
325
|
+
|
|
326
|
+
```ts
|
|
327
|
+
import { Glob } from "bun";
|
|
328
|
+
|
|
329
|
+
// Collect source files
|
|
330
|
+
const files: Record<string, string> = {};
|
|
331
|
+
const glob = new Glob("src/**/*.ts");
|
|
332
|
+
|
|
333
|
+
for await (const path of glob.scan(".")) {
|
|
334
|
+
// Normalize path separators to forward slashes for cross-platform compatibility
|
|
335
|
+
const archivePath = path.replaceAll("\\", "/");
|
|
336
|
+
files[archivePath] = await Bun.file(path).text();
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
// Add package.json
|
|
340
|
+
files["package.json"] = await Bun.file("package.json").text();
|
|
341
|
+
|
|
342
|
+
// Create compressed archive
|
|
343
|
+
await Bun.Archive.write("bundle.tar.gz", files, "gzip");
|
|
344
|
+
```
|
|
345
|
+
|
|
346
|
+
### Extract and Process npm Package
|
|
347
|
+
|
|
348
|
+
```ts
|
|
349
|
+
const response = await fetch("https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz");
|
|
350
|
+
const archive = Bun.Archive.from(await response.blob());
|
|
351
|
+
|
|
352
|
+
// Get package.json
|
|
353
|
+
const files = await archive.files("package/package.json");
|
|
354
|
+
const packageJson = files.get("package/package.json");
|
|
355
|
+
|
|
356
|
+
if (packageJson) {
|
|
357
|
+
const pkg = JSON.parse(await packageJson.text());
|
|
358
|
+
console.log(`Package: ${pkg.name}@${pkg.version}`);
|
|
359
|
+
}
|
|
360
|
+
```
|
|
361
|
+
|
|
362
|
+
### Create Archive from Directory
|
|
363
|
+
|
|
364
|
+
```ts
|
|
365
|
+
import { readdir } from "node:fs/promises";
|
|
366
|
+
import { join } from "node:path";
|
|
367
|
+
|
|
368
|
+
async function archiveDirectory(dir: string): Promise<Bun.Archive> {
|
|
369
|
+
const files: Record<string, Blob> = {};
|
|
370
|
+
|
|
371
|
+
async function walk(currentDir: string, prefix: string = "") {
|
|
372
|
+
const entries = await readdir(currentDir, { withFileTypes: true });
|
|
373
|
+
|
|
374
|
+
for (const entry of entries) {
|
|
375
|
+
const fullPath = join(currentDir, entry.name);
|
|
376
|
+
const archivePath = prefix ? `${prefix}/${entry.name}` : entry.name;
|
|
377
|
+
|
|
378
|
+
if (entry.isDirectory()) {
|
|
379
|
+
await walk(fullPath, archivePath);
|
|
380
|
+
} else {
|
|
381
|
+
files[archivePath] = Bun.file(fullPath);
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
await walk(dir);
|
|
387
|
+
return Bun.Archive.from(files);
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
const archive = await archiveDirectory("./my-project");
|
|
391
|
+
await Bun.Archive.write("my-project.tar.gz", archive, "gzip");
|
|
392
|
+
```
|
|
393
|
+
|
|
394
|
+
## Reference
|
|
395
|
+
|
|
396
|
+
> **Note**: The following type signatures are simplified for documentation purposes. See [`packages/bun-types/bun.d.ts`](https://github.com/oven-sh/bun/blob/main/packages/bun-types/bun.d.ts) for the full type definitions.
|
|
397
|
+
|
|
398
|
+
```ts
|
|
399
|
+
type ArchiveCompression = "gzip" | boolean;
|
|
400
|
+
|
|
401
|
+
type ArchiveInput =
|
|
402
|
+
| Record<string, string | Blob | Bun.ArrayBufferView | ArrayBufferLike>
|
|
403
|
+
| Blob
|
|
404
|
+
| Bun.ArrayBufferView
|
|
405
|
+
| ArrayBufferLike;
|
|
406
|
+
|
|
407
|
+
interface ArchiveExtractOptions {
|
|
408
|
+
/** Glob pattern(s) to filter extraction. Supports negative patterns with "!" prefix. */
|
|
409
|
+
glob?: string | readonly string[];
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
class Archive {
|
|
413
|
+
/**
|
|
414
|
+
* Create an Archive from input data
|
|
415
|
+
*/
|
|
416
|
+
static from(data: ArchiveInput): Archive;
|
|
417
|
+
|
|
418
|
+
/**
|
|
419
|
+
* Write an archive directly to disk
|
|
420
|
+
*/
|
|
421
|
+
static write(path: string, data: ArchiveInput | Archive, compress?: ArchiveCompression): Promise<void>;
|
|
422
|
+
|
|
423
|
+
/**
|
|
424
|
+
* Extract archive to a directory
|
|
425
|
+
* @returns Number of entries extracted (files, directories, and symlinks)
|
|
426
|
+
*/
|
|
427
|
+
extract(path: string, options?: ArchiveExtractOptions): Promise<number>;
|
|
428
|
+
|
|
429
|
+
/**
|
|
430
|
+
* Get archive as a Blob
|
|
431
|
+
*/
|
|
432
|
+
blob(compress?: ArchiveCompression): Promise<Blob>;
|
|
433
|
+
|
|
434
|
+
/**
|
|
435
|
+
* Get archive as a Uint8Array
|
|
436
|
+
*/
|
|
437
|
+
bytes(compress?: ArchiveCompression): Promise<Uint8Array<ArrayBuffer>>;
|
|
438
|
+
|
|
439
|
+
/**
|
|
440
|
+
* Get archive contents as File objects (regular files only, no directories)
|
|
441
|
+
*/
|
|
442
|
+
files(glob?: string | readonly string[]): Promise<Map<string, File>>;
|
|
443
|
+
}
|
|
444
|
+
```
|
package/docs/runtime/ffi.mdx
CHANGED
|
@@ -358,6 +358,8 @@ Bun represents [pointers](<https://en.wikipedia.org/wiki/Pointer_(computer_progr
|
|
|
358
358
|
|
|
359
359
|
**Why not `BigInt`?** `BigInt` is slower. JavaScript engines allocate a separate `BigInt` which means they can't fit into a regular JavaScript value. If you pass a `BigInt` to a function, it will be converted to a `number`
|
|
360
360
|
|
|
361
|
+
**Windows Note**: The Windows API type HANDLE does not represent a virtual address, and using `ptr` for it will _not_ work as expected. Use `u64` to safely represent HANDLE values.
|
|
362
|
+
|
|
361
363
|
</Accordion>
|
|
362
364
|
|
|
363
365
|
To convert from a `TypedArray` to a pointer:
|
package/package.json
CHANGED