@resourcexjs/type 2.5.0 → 2.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +91 -42
- package/dist/index.js.map +2 -2
- package/package.json +3 -3
package/README.md
CHANGED
|
@@ -10,14 +10,13 @@ bun add @resourcexjs/type
|
|
|
10
10
|
|
|
11
11
|
## Overview
|
|
12
12
|
|
|
13
|
-
The `@resourcexjs/type` package provides the type system for ResourceX, managing how different resource types are resolved and executed.
|
|
13
|
+
The `@resourcexjs/type` package provides the type system for ResourceX, managing how different resource types are resolved and executed in sandboxed environments.
|
|
14
14
|
|
|
15
15
|
### Key Concepts
|
|
16
16
|
|
|
17
17
|
- **BundledType**: Pre-bundled resource type ready for sandbox execution
|
|
18
18
|
- **TypeHandlerChain**: Type registry managing type lookup and registration
|
|
19
|
-
- **
|
|
20
|
-
- **ResolvedResource**: Result object with execute function and optional schema
|
|
19
|
+
- **ResolveContext**: Serializable context passed to resolvers in sandbox
|
|
21
20
|
- **Builtin Types**: Text, JSON, and Binary types are included by default
|
|
22
21
|
|
|
23
22
|
## Usage
|
|
@@ -38,10 +37,12 @@ chain.canHandle("binary"); // true
|
|
|
38
37
|
// Builtin aliases
|
|
39
38
|
chain.canHandle("txt"); // true (alias for text)
|
|
40
39
|
chain.canHandle("config"); // true (alias for json)
|
|
40
|
+
chain.canHandle("bin"); // true (alias for binary)
|
|
41
41
|
|
|
42
42
|
// Get handler
|
|
43
43
|
const handler = chain.getHandler("text");
|
|
44
44
|
console.log(handler.name); // "text"
|
|
45
|
+
console.log(handler.code); // bundled resolver code
|
|
45
46
|
|
|
46
47
|
// Get all supported types
|
|
47
48
|
const types = chain.getSupportedTypes();
|
|
@@ -52,7 +53,6 @@ const types = chain.getSupportedTypes();
|
|
|
52
53
|
|
|
53
54
|
```typescript
|
|
54
55
|
import { TypeHandlerChain, bundleResourceType } from "@resourcexjs/type";
|
|
55
|
-
import type { BundledType } from "@resourcexjs/type";
|
|
56
56
|
|
|
57
57
|
// Bundle a resource type from source file
|
|
58
58
|
const promptType = await bundleResourceType("./prompt.type.ts");
|
|
@@ -60,6 +60,9 @@ const promptType = await bundleResourceType("./prompt.type.ts");
|
|
|
60
60
|
// Register with chain
|
|
61
61
|
const chain = TypeHandlerChain.create();
|
|
62
62
|
chain.register(promptType);
|
|
63
|
+
|
|
64
|
+
// Now the chain can handle the new type
|
|
65
|
+
chain.canHandle("prompt"); // true
|
|
63
66
|
```
|
|
64
67
|
|
|
65
68
|
### Bundling Resource Types
|
|
@@ -71,7 +74,7 @@ import { bundleResourceType } from "@resourcexjs/type";
|
|
|
71
74
|
|
|
72
75
|
// Bundle from source file
|
|
73
76
|
const myType = await bundleResourceType("./my-resource.type.ts");
|
|
74
|
-
//
|
|
77
|
+
// Returns: { name, aliases, description, schema, code }
|
|
75
78
|
```
|
|
76
79
|
|
|
77
80
|
**Source file format (`my-resource.type.ts`):**
|
|
@@ -81,7 +84,7 @@ export default {
|
|
|
81
84
|
name: "prompt",
|
|
82
85
|
aliases: ["deepractice-prompt"],
|
|
83
86
|
description: "AI Prompt template",
|
|
84
|
-
|
|
87
|
+
|
|
85
88
|
async resolve(ctx) {
|
|
86
89
|
// ctx.manifest - resource metadata
|
|
87
90
|
// ctx.files - extracted files as Record<string, Uint8Array>
|
|
@@ -91,6 +94,22 @@ export default {
|
|
|
91
94
|
};
|
|
92
95
|
```
|
|
93
96
|
|
|
97
|
+
### Accessing Builtin Types Directly
|
|
98
|
+
|
|
99
|
+
```typescript
|
|
100
|
+
import { textType, jsonType, binaryType, builtinTypes } from "@resourcexjs/type";
|
|
101
|
+
|
|
102
|
+
// Individual types
|
|
103
|
+
console.log(textType.name); // "text"
|
|
104
|
+
console.log(jsonType.aliases); // ["config", "manifest"]
|
|
105
|
+
console.log(binaryType.description); // "Binary content"
|
|
106
|
+
|
|
107
|
+
// All builtin types as array
|
|
108
|
+
for (const type of builtinTypes) {
|
|
109
|
+
console.log(type.name);
|
|
110
|
+
}
|
|
111
|
+
```
|
|
112
|
+
|
|
94
113
|
## API Reference
|
|
95
114
|
|
|
96
115
|
### `TypeHandlerChain`
|
|
@@ -125,6 +144,7 @@ Check if a type is supported.
|
|
|
125
144
|
|
|
126
145
|
```typescript
|
|
127
146
|
chain.canHandle("text"); // true
|
|
147
|
+
chain.canHandle("unknown"); // false
|
|
128
148
|
```
|
|
129
149
|
|
|
130
150
|
##### `getHandler(typeName: string): BundledType`
|
|
@@ -192,6 +212,23 @@ interface BundledType {
|
|
|
192
212
|
}
|
|
193
213
|
```
|
|
194
214
|
|
|
215
|
+
### ResolveContext
|
|
216
|
+
|
|
217
|
+
Context passed to resolver in sandbox:
|
|
218
|
+
|
|
219
|
+
```typescript
|
|
220
|
+
interface ResolveContext {
|
|
221
|
+
manifest: {
|
|
222
|
+
domain: string;
|
|
223
|
+
path?: string;
|
|
224
|
+
name: string;
|
|
225
|
+
type: string;
|
|
226
|
+
version: string;
|
|
227
|
+
};
|
|
228
|
+
files: Record<string, Uint8Array>;
|
|
229
|
+
}
|
|
230
|
+
```
|
|
231
|
+
|
|
195
232
|
### ResolvedResource
|
|
196
233
|
|
|
197
234
|
Result object returned after resolution:
|
|
@@ -204,20 +241,16 @@ interface ResolvedResource<TArgs = void, TResult = unknown> {
|
|
|
204
241
|
}
|
|
205
242
|
```
|
|
206
243
|
|
|
207
|
-
###
|
|
244
|
+
### ResourceType
|
|
208
245
|
|
|
209
|
-
|
|
246
|
+
Interface for defining custom types (before bundling):
|
|
210
247
|
|
|
211
248
|
```typescript
|
|
212
|
-
interface
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
type: string;
|
|
218
|
-
version: string;
|
|
219
|
-
};
|
|
220
|
-
files: Record<string, Uint8Array>;
|
|
249
|
+
interface ResourceType<TArgs = void, TResult = unknown> {
|
|
250
|
+
name: string;
|
|
251
|
+
aliases?: string[];
|
|
252
|
+
description: string;
|
|
253
|
+
resolver: ResourceResolver<TArgs, TResult>;
|
|
221
254
|
}
|
|
222
255
|
```
|
|
223
256
|
|
|
@@ -236,29 +269,17 @@ type IsolatorType = "none" | "srt" | "cloudflare" | "e2b";
|
|
|
236
269
|
|
|
237
270
|
## Builtin Types
|
|
238
271
|
|
|
239
|
-
|
|
272
|
+
Three builtin types are included by default:
|
|
240
273
|
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
### JSON Type
|
|
248
|
-
|
|
249
|
-
- **Name**: `json`
|
|
250
|
-
- **Aliases**: `config`, `manifest`
|
|
251
|
-
- **Resolves to**: `unknown`
|
|
252
|
-
|
|
253
|
-
### Binary Type
|
|
254
|
-
|
|
255
|
-
- **Name**: `binary`
|
|
256
|
-
- **Aliases**: `bin`, `blob`, `raw`
|
|
257
|
-
- **Resolves to**: `Uint8Array`
|
|
274
|
+
| Type | Aliases | Resolves to | Description |
|
|
275
|
+
| -------- | -------------------- | ------------ | ------------------ |
|
|
276
|
+
| `text` | `txt`, `plaintext` | `string` | Plain text content |
|
|
277
|
+
| `json` | `config`, `manifest` | `unknown` | JSON content |
|
|
278
|
+
| `binary` | `bin`, `blob`, `raw` | `Uint8Array` | Binary content |
|
|
258
279
|
|
|
259
280
|
## Creating Custom Types
|
|
260
281
|
|
|
261
|
-
### Example:
|
|
282
|
+
### Example: Simple Type (No Arguments)
|
|
262
283
|
|
|
263
284
|
**prompt.type.ts:**
|
|
264
285
|
|
|
@@ -267,7 +288,7 @@ export default {
|
|
|
267
288
|
name: "prompt",
|
|
268
289
|
aliases: ["deepractice-prompt"],
|
|
269
290
|
description: "AI Prompt template",
|
|
270
|
-
|
|
291
|
+
|
|
271
292
|
async resolve(ctx) {
|
|
272
293
|
const content = new TextDecoder().decode(ctx.files["content"]);
|
|
273
294
|
return content;
|
|
@@ -275,7 +296,7 @@ export default {
|
|
|
275
296
|
};
|
|
276
297
|
```
|
|
277
298
|
|
|
278
|
-
### Example:
|
|
299
|
+
### Example: Type with Schema
|
|
279
300
|
|
|
280
301
|
**tool.type.ts:**
|
|
281
302
|
|
|
@@ -291,9 +312,12 @@ export default {
|
|
|
291
312
|
},
|
|
292
313
|
required: ["query"],
|
|
293
314
|
},
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
315
|
+
|
|
316
|
+
async resolve(ctx) {
|
|
317
|
+
// The schema is used by the executor to validate/render UI
|
|
318
|
+
// The resolver returns the tool definition
|
|
319
|
+
const code = new TextDecoder().decode(ctx.files["content"]);
|
|
320
|
+
return { code, manifest: ctx.manifest };
|
|
297
321
|
},
|
|
298
322
|
};
|
|
299
323
|
```
|
|
@@ -308,10 +332,35 @@ try {
|
|
|
308
332
|
} catch (error) {
|
|
309
333
|
if (error instanceof ResourceTypeError) {
|
|
310
334
|
console.error("Type error:", error.message);
|
|
335
|
+
// "Unsupported resource type: unknown"
|
|
311
336
|
}
|
|
312
337
|
}
|
|
313
338
|
```
|
|
314
339
|
|
|
340
|
+
## Exports
|
|
341
|
+
|
|
342
|
+
```typescript
|
|
343
|
+
// Types
|
|
344
|
+
export type {
|
|
345
|
+
ResourceType,
|
|
346
|
+
ResourceResolver,
|
|
347
|
+
ResolvedResource,
|
|
348
|
+
ResolveContext,
|
|
349
|
+
JSONSchema,
|
|
350
|
+
JSONSchemaProperty,
|
|
351
|
+
BundledType,
|
|
352
|
+
IsolatorType,
|
|
353
|
+
} from "@resourcexjs/type";
|
|
354
|
+
|
|
355
|
+
// Classes and Functions
|
|
356
|
+
export { TypeHandlerChain } from "@resourcexjs/type";
|
|
357
|
+
export { bundleResourceType } from "@resourcexjs/type";
|
|
358
|
+
export { ResourceTypeError } from "@resourcexjs/type";
|
|
359
|
+
|
|
360
|
+
// Builtin Types
|
|
361
|
+
export { textType, jsonType, binaryType, builtinTypes } from "@resourcexjs/type";
|
|
362
|
+
```
|
|
363
|
+
|
|
315
364
|
## License
|
|
316
365
|
|
|
317
|
-
|
|
366
|
+
Apache-2.0
|
package/dist/index.js.map
CHANGED
|
@@ -3,12 +3,12 @@
|
|
|
3
3
|
"sources": ["../src/bundler.ts", "../../core/dist/index.js", "../src/errors.ts", "../src/builtinTypes.ts", "../src/TypeHandlerChain.ts"],
|
|
4
4
|
"sourcesContent": [
|
|
5
5
|
"import { readFile } from \"node:fs/promises\";\nimport { resolve, isAbsolute } from \"node:path\";\nimport type { BundledType, JSONSchema } from \"./types.js\";\n\n/**\n * ResourceType source file structure.\n * This is what users write in their .type.ts files.\n */\ninterface ResourceTypeSource {\n name: string;\n aliases?: string[];\n description: string;\n schema?: JSONSchema;\n resolve: (rxr: unknown) => Promise<unknown>;\n}\n\n/**\n * Bundle a resource type from a source file.\n *\n * @param sourcePath - Path to the .type.ts file\n * @param basePath - Base path for resolving relative paths (defaults to cwd)\n * @returns BundledType ready for registry\n *\n * @example\n * ```typescript\n * const promptType = await bundleResourceType(\"./prompt.type.ts\");\n * ```\n */\nexport async function bundleResourceType(\n sourcePath: string,\n basePath?: string\n): Promise<BundledType> {\n // Resolve path\n const fullPath = isAbsolute(sourcePath)\n ? sourcePath\n : resolve(basePath ?? process.cwd(), sourcePath);\n\n // Read source file\n const source = await readFile(fullPath, \"utf-8\");\n\n // Bundle using Bun.build\n // @ts-expect-error - Bun global is available at runtime\n const result = await Bun.build({\n stdin: {\n contents: source,\n resolveDir: resolve(fullPath, \"..\"),\n loader: \"ts\",\n },\n target: \"bun\",\n format: \"esm\",\n minify: false,\n });\n\n if (!result.success) {\n const errors = result.logs.map((log: { message: string }) => log.message).join(\"\\n\");\n throw new Error(`Failed to bundle ${sourcePath}: ${errors}`);\n }\n\n // Get bundled code\n const bundledCode = await result.outputs[0].text();\n\n // Extract metadata by evaluating the module\n // We need to import dynamically to get the metadata\n const tempModule = await import(fullPath);\n const typeSource: ResourceTypeSource = tempModule.default;\n\n if (!typeSource.name) {\n throw new Error(`Resource type at ${sourcePath} must have a name`);\n }\n\n if (typeof typeSource.resolve !== \"function\") {\n throw new Error(`Resource type at ${sourcePath} must have a resolve function`);\n }\n\n return {\n name: typeSource.name,\n aliases: typeSource.aliases,\n description: typeSource.description ?? \"\",\n schema: typeSource.schema,\n code: bundledCode,\n };\n}\n",
|
|
6
|
-
"// src/errors.ts\nclass ResourceXError extends Error {\n constructor(message, options) {\n super(message, options);\n this.name = \"ResourceXError\";\n }\n}\n\nclass LocatorError extends ResourceXError {\n locator;\n constructor(message, locator) {\n super(message);\n this.locator = locator;\n this.name = \"LocatorError\";\n }\n}\n\nclass ManifestError extends ResourceXError {\n constructor(message) {\n super(message);\n this.name = \"ManifestError\";\n }\n}\n\nclass ContentError extends ResourceXError {\n constructor(message) {\n super(message);\n this.name = \"ContentError\";\n }\n}\n\nclass DefinitionError extends ResourceXError {\n constructor(message) {\n super(message);\n this.name = \"DefinitionError\";\n }\n}\n// src/primitives/define.ts\nfunction define(input) {\n if (input === null || typeof input !== \"object\") {\n throw new DefinitionError(\"definition must be an object\");\n }\n const obj = input;\n if (!obj.name || typeof obj.name !== \"string\") {\n throw new DefinitionError(\"name is required\");\n }\n if (!obj.type || typeof obj.type !== \"string\") {\n throw new DefinitionError(\"type is required\");\n }\n if (!obj.version || typeof obj.version !== \"string\") {\n throw new DefinitionError(\"version is required\");\n }\n const rxd = {\n ...obj,\n name: obj.name,\n type: obj.type,\n version: obj.version,\n domain: typeof obj.domain === \"string\" ? obj.domain : \"localhost\",\n path: typeof obj.path === \"string\" ? obj.path : undefined,\n description: typeof obj.description === \"string\" ? obj.description : undefined,\n author: typeof obj.author === \"string\" ? obj.author : undefined,\n license: typeof obj.license === \"string\" ? obj.license : undefined,\n keywords: Array.isArray(obj.keywords) ? obj.keywords : undefined,\n repository: typeof obj.repository === \"string\" ? obj.repository : undefined\n };\n return rxd;\n}\n// src/primitives/manifest.ts\nfunction manifest(rxd) {\n return {\n domain: rxd.domain,\n path: rxd.path,\n name: rxd.name,\n type: rxd.type,\n version: rxd.version\n };\n}\n// src/primitives/archive.ts\nimport { gzip } from \"node:zlib\";\nimport { promisify } from \"node:util\";\n\n// ../../node_modules/.bun/modern-tar@0.7.3/node_modules/modern-tar/dist/unpacker-BpPBxY8N.js\nvar BLOCK_SIZE = 512;\nvar BLOCK_SIZE_MASK = 511;\nvar DEFAULT_FILE_MODE = 420;\nvar DEFAULT_DIR_MODE = 493;\nvar USTAR_NAME_OFFSET = 0;\nvar USTAR_NAME_SIZE = 100;\nvar USTAR_MODE_OFFSET = 100;\nvar USTAR_MODE_SIZE = 8;\nvar USTAR_UID_OFFSET = 108;\nvar USTAR_UID_SIZE = 8;\nvar USTAR_GID_OFFSET = 116;\nvar USTAR_GID_SIZE = 8;\nvar USTAR_SIZE_OFFSET = 124;\nvar USTAR_SIZE_SIZE = 12;\nvar USTAR_MTIME_OFFSET = 136;\nvar USTAR_MTIME_SIZE = 12;\nvar USTAR_CHECKSUM_OFFSET = 148;\nvar USTAR_CHECKSUM_SIZE = 8;\nvar USTAR_TYPEFLAG_OFFSET = 156;\nvar USTAR_TYPEFLAG_SIZE = 1;\nvar USTAR_LINKNAME_OFFSET = 157;\nvar USTAR_LINKNAME_SIZE = 100;\nvar USTAR_MAGIC_OFFSET = 257;\nvar USTAR_MAGIC_SIZE = 6;\nvar USTAR_VERSION_OFFSET = 263;\nvar USTAR_VERSION_SIZE = 2;\nvar USTAR_UNAME_OFFSET = 265;\nvar USTAR_UNAME_SIZE = 32;\nvar USTAR_GNAME_OFFSET = 297;\nvar USTAR_GNAME_SIZE = 32;\nvar USTAR_PREFIX_OFFSET = 345;\nvar USTAR_PREFIX_SIZE = 155;\nvar USTAR_VERSION = \"00\";\nvar USTAR_MAX_UID_GID = 2097151;\nvar USTAR_MAX_SIZE = 8589934591;\nvar FILE = \"file\";\nvar LINK = \"link\";\nvar SYMLINK = \"symlink\";\nvar DIRECTORY = \"directory\";\nvar TYPEFLAG = {\n file: \"0\",\n link: \"1\",\n symlink: \"2\",\n \"character-device\": \"3\",\n \"block-device\": \"4\",\n directory: \"5\",\n fifo: \"6\",\n \"pax-header\": \"x\",\n \"pax-global-header\": \"g\",\n \"gnu-long-name\": \"L\",\n \"gnu-long-link-name\": \"K\"\n};\nvar FLAGTYPE = {\n \"0\": FILE,\n \"1\": LINK,\n \"2\": SYMLINK,\n \"3\": \"character-device\",\n \"4\": \"block-device\",\n \"5\": DIRECTORY,\n \"6\": \"fifo\",\n x: \"pax-header\",\n g: \"pax-global-header\",\n L: \"gnu-long-name\",\n K: \"gnu-long-link-name\"\n};\nvar ZERO_BLOCK = new Uint8Array(BLOCK_SIZE);\nvar EMPTY = new Uint8Array(0);\nvar encoder = new TextEncoder;\nvar decoder = new TextDecoder;\nfunction writeString(view, offset, size, value) {\n if (value)\n encoder.encodeInto(value, view.subarray(offset, offset + size));\n}\nfunction writeOctal(view, offset, size, value) {\n if (value === undefined)\n return;\n const octalString = value.toString(8).padStart(size - 1, \"0\");\n encoder.encodeInto(octalString, view.subarray(offset, offset + size - 1));\n}\nfunction readString(view, offset, size) {\n const end = view.indexOf(0, offset);\n const sliceEnd = end === -1 || end > offset + size ? offset + size : end;\n return decoder.decode(view.subarray(offset, sliceEnd));\n}\nfunction readOctal(view, offset, size) {\n let value = 0;\n const end = offset + size;\n for (let i = offset;i < end; i++) {\n const charCode = view[i];\n if (charCode === 0)\n break;\n if (charCode === 32)\n continue;\n value = value * 8 + (charCode - 48);\n }\n return value;\n}\nfunction readNumeric(view, offset, size) {\n if (view[offset] & 128) {\n let result = 0;\n result = view[offset] & 127;\n for (let i = 1;i < size; i++)\n result = result * 256 + view[offset + i];\n if (!Number.isSafeInteger(result))\n throw new Error(\"TAR number too large\");\n return result;\n }\n return readOctal(view, offset, size);\n}\nvar isBodyless = (header) => header.type === DIRECTORY || header.type === SYMLINK || header.type === LINK;\nasync function normalizeBody(body) {\n if (body === null || body === undefined)\n return EMPTY;\n if (body instanceof Uint8Array)\n return body;\n if (typeof body === \"string\")\n return encoder.encode(body);\n if (body instanceof ArrayBuffer)\n return new Uint8Array(body);\n if (body instanceof Blob)\n return new Uint8Array(await body.arrayBuffer());\n throw new TypeError(\"Unsupported content type for entry body.\");\n}\nfunction transformHeader(header, options) {\n const { strip, filter, map } = options;\n if (!strip && !filter && !map)\n return header;\n const h = { ...header };\n if (strip && strip > 0) {\n const components = h.name.split(\"/\").filter(Boolean);\n if (strip >= components.length)\n return null;\n const newName = components.slice(strip).join(\"/\");\n h.name = h.type === DIRECTORY && !newName.endsWith(\"/\") ? `${newName}/` : newName;\n if (h.linkname?.startsWith(\"/\")) {\n const linkComponents = h.linkname.split(\"/\").filter(Boolean);\n h.linkname = strip >= linkComponents.length ? \"/\" : `/${linkComponents.slice(strip).join(\"/\")}`;\n }\n }\n if (filter?.(h) === false)\n return null;\n const result = map ? map(h) : h;\n if (result && (!result.name || !result.name.trim() || result.name === \".\" || result.name === \"/\"))\n return null;\n return result;\n}\nvar CHECKSUM_SPACE = 32;\nvar ASCII_ZERO = 48;\nfunction validateChecksum(block) {\n const stored = readOctal(block, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_SIZE);\n let sum = 0;\n for (let i = 0;i < block.length; i++)\n if (i >= USTAR_CHECKSUM_OFFSET && i < USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE)\n sum += CHECKSUM_SPACE;\n else\n sum += block[i];\n return stored === sum;\n}\nfunction writeChecksum(block) {\n block.fill(CHECKSUM_SPACE, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE);\n let checksum = 0;\n for (const byte of block)\n checksum += byte;\n for (let i = USTAR_CHECKSUM_OFFSET + 6 - 1;i >= USTAR_CHECKSUM_OFFSET; i--) {\n block[i] = (checksum & 7) + ASCII_ZERO;\n checksum >>= 3;\n }\n block[USTAR_CHECKSUM_OFFSET + 6] = 0;\n block[USTAR_CHECKSUM_OFFSET + 7] = CHECKSUM_SPACE;\n}\nfunction generatePax(header) {\n const paxRecords = {};\n if (header.name.length > USTAR_NAME_SIZE) {\n if (findUstarSplit(header.name) === null)\n paxRecords.path = header.name;\n }\n if (header.linkname && header.linkname.length > USTAR_NAME_SIZE)\n paxRecords.linkpath = header.linkname;\n if (header.uname && header.uname.length > USTAR_UNAME_SIZE)\n paxRecords.uname = header.uname;\n if (header.gname && header.gname.length > USTAR_GNAME_SIZE)\n paxRecords.gname = header.gname;\n if (header.uid != null && header.uid > USTAR_MAX_UID_GID)\n paxRecords.uid = String(header.uid);\n if (header.gid != null && header.gid > USTAR_MAX_UID_GID)\n paxRecords.gid = String(header.gid);\n if (header.size != null && header.size > USTAR_MAX_SIZE)\n paxRecords.size = String(header.size);\n if (header.pax)\n Object.assign(paxRecords, header.pax);\n const paxEntries = Object.entries(paxRecords);\n if (paxEntries.length === 0)\n return null;\n const paxBody = encoder.encode(paxEntries.map(([key, value]) => {\n const record = `${key}=${value}\n`;\n const partLength = encoder.encode(record).length + 1;\n let totalLength = partLength + String(partLength).length;\n totalLength = partLength + String(totalLength).length;\n return `${totalLength} ${record}`;\n }).join(\"\"));\n return {\n paxHeader: createTarHeader({\n name: decoder.decode(encoder.encode(`PaxHeader/${header.name}`).slice(0, 100)),\n size: paxBody.length,\n type: \"pax-header\",\n mode: 420,\n mtime: header.mtime,\n uname: header.uname,\n gname: header.gname,\n uid: header.uid,\n gid: header.gid\n }),\n paxBody\n };\n}\nfunction findUstarSplit(path) {\n if (path.length <= USTAR_NAME_SIZE)\n return null;\n const minSlashIndex = path.length - USTAR_NAME_SIZE - 1;\n const slashIndex = path.lastIndexOf(\"/\", USTAR_PREFIX_SIZE);\n if (slashIndex > 0 && slashIndex >= minSlashIndex)\n return {\n prefix: path.slice(0, slashIndex),\n name: path.slice(slashIndex + 1)\n };\n return null;\n}\nfunction createTarHeader(header) {\n const view = new Uint8Array(BLOCK_SIZE);\n const size = isBodyless(header) ? 0 : header.size ?? 0;\n let name = header.name;\n let prefix = \"\";\n if (!header.pax?.path) {\n const split = findUstarSplit(name);\n if (split) {\n name = split.name;\n prefix = split.prefix;\n }\n }\n writeString(view, USTAR_NAME_OFFSET, USTAR_NAME_SIZE, name);\n writeOctal(view, USTAR_MODE_OFFSET, USTAR_MODE_SIZE, header.mode ?? (header.type === DIRECTORY ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));\n writeOctal(view, USTAR_UID_OFFSET, USTAR_UID_SIZE, header.uid ?? 0);\n writeOctal(view, USTAR_GID_OFFSET, USTAR_GID_SIZE, header.gid ?? 0);\n writeOctal(view, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE, size);\n writeOctal(view, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1000));\n writeString(view, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE, TYPEFLAG[header.type ?? FILE]);\n writeString(view, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE, header.linkname);\n writeString(view, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE, \"ustar\\x00\");\n writeString(view, USTAR_VERSION_OFFSET, USTAR_VERSION_SIZE, USTAR_VERSION);\n writeString(view, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE, header.uname);\n writeString(view, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE, header.gname);\n writeString(view, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE, prefix);\n writeChecksum(view);\n return view;\n}\nfunction parseUstarHeader(block, strict) {\n if (strict && !validateChecksum(block))\n throw new Error(\"Invalid tar header checksum.\");\n const typeflag = readString(block, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE);\n const header = {\n name: readString(block, USTAR_NAME_OFFSET, USTAR_NAME_SIZE),\n mode: readOctal(block, USTAR_MODE_OFFSET, USTAR_MODE_SIZE),\n uid: readNumeric(block, USTAR_UID_OFFSET, USTAR_UID_SIZE),\n gid: readNumeric(block, USTAR_GID_OFFSET, USTAR_GID_SIZE),\n size: readNumeric(block, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE),\n mtime: /* @__PURE__ */ new Date(readNumeric(block, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE) * 1000),\n type: FLAGTYPE[typeflag] || FILE,\n linkname: readString(block, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE)\n };\n const magic = readString(block, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE);\n if (magic.trim() === \"ustar\") {\n header.uname = readString(block, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE);\n header.gname = readString(block, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE);\n }\n if (magic === \"ustar\")\n header.prefix = readString(block, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE);\n return header;\n}\nvar PAX_MAPPING = {\n path: [\"name\", (v) => v],\n linkpath: [\"linkname\", (v) => v],\n size: [\"size\", (v) => parseInt(v, 10)],\n mtime: [\"mtime\", parseFloat],\n uid: [\"uid\", (v) => parseInt(v, 10)],\n gid: [\"gid\", (v) => parseInt(v, 10)],\n uname: [\"uname\", (v) => v],\n gname: [\"gname\", (v) => v]\n};\nfunction parsePax(buffer) {\n const decoder$1 = new TextDecoder(\"utf-8\");\n const overrides = {};\n const pax = {};\n let offset = 0;\n while (offset < buffer.length) {\n const spaceIndex = buffer.indexOf(32, offset);\n if (spaceIndex === -1)\n break;\n const length = parseInt(decoder$1.decode(buffer.subarray(offset, spaceIndex)), 10);\n if (Number.isNaN(length) || length === 0)\n break;\n const recordEnd = offset + length;\n const [key, value] = decoder$1.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split(\"=\", 2);\n if (key && value !== undefined) {\n pax[key] = value;\n const mapping = PAX_MAPPING[key];\n if (mapping) {\n const [targetKey, parser] = mapping;\n const parsedValue = parser(value);\n if (typeof parsedValue === \"string\" || !Number.isNaN(parsedValue))\n overrides[targetKey] = parsedValue;\n }\n }\n offset = recordEnd;\n }\n if (Object.keys(pax).length > 0)\n overrides.pax = pax;\n return overrides;\n}\nfunction applyOverrides(header, overrides) {\n if (overrides.name !== undefined)\n header.name = overrides.name;\n if (overrides.linkname !== undefined)\n header.linkname = overrides.linkname;\n if (overrides.size !== undefined)\n header.size = overrides.size;\n if (overrides.mtime !== undefined)\n header.mtime = /* @__PURE__ */ new Date(overrides.mtime * 1000);\n if (overrides.uid !== undefined)\n header.uid = overrides.uid;\n if (overrides.gid !== undefined)\n header.gid = overrides.gid;\n if (overrides.uname !== undefined)\n header.uname = overrides.uname;\n if (overrides.gname !== undefined)\n header.gname = overrides.gname;\n if (overrides.pax)\n header.pax = Object.assign({}, header.pax ?? {}, overrides.pax);\n}\nfunction getMetaParser(type) {\n switch (type) {\n case \"pax-global-header\":\n case \"pax-header\":\n return parsePax;\n case \"gnu-long-name\":\n return (data) => ({ name: readString(data, 0, data.length) });\n case \"gnu-long-link-name\":\n return (data) => ({ linkname: readString(data, 0, data.length) });\n default:\n return;\n }\n}\nfunction getHeaderBlocks(header) {\n const base = createTarHeader(header);\n const pax = generatePax(header);\n if (!pax)\n return [base];\n const paxPadding = -pax.paxBody.length & BLOCK_SIZE_MASK;\n const paddingBlocks = paxPadding > 0 ? [ZERO_BLOCK.subarray(0, paxPadding)] : [];\n return [\n pax.paxHeader,\n pax.paxBody,\n ...paddingBlocks,\n base\n ];\n}\nvar EOF_BUFFER = new Uint8Array(BLOCK_SIZE * 2);\nfunction createTarPacker(onData, onError, onFinalize) {\n let currentHeader = null;\n let bytesWritten = 0;\n let finalized = false;\n return {\n add(header) {\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"No new tar entries after finalize.\");\n onError(error);\n throw error;\n }\n if (currentHeader !== null) {\n const error = /* @__PURE__ */ new Error(\"Previous entry must be completed before adding a new one\");\n onError(error);\n throw error;\n }\n try {\n const size = isBodyless(header) ? 0 : header.size ?? 0;\n const headerBlocks = getHeaderBlocks({\n ...header,\n size\n });\n for (const block of headerBlocks)\n onData(block);\n currentHeader = {\n ...header,\n size\n };\n bytesWritten = 0;\n } catch (error) {\n onError(error);\n }\n },\n write(chunk) {\n if (!currentHeader) {\n const error = /* @__PURE__ */ new Error(\"No active tar entry.\");\n onError(error);\n throw error;\n }\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"Cannot write data after finalize.\");\n onError(error);\n throw error;\n }\n const newTotal = bytesWritten + chunk.length;\n if (newTotal > currentHeader.size) {\n const error = /* @__PURE__ */ new Error(`\"${currentHeader.name}\" exceeds given size of ${currentHeader.size} bytes.`);\n onError(error);\n throw error;\n }\n try {\n bytesWritten = newTotal;\n onData(chunk);\n } catch (error) {\n onError(error);\n }\n },\n endEntry() {\n if (!currentHeader) {\n const error = /* @__PURE__ */ new Error(\"No active entry to end.\");\n onError(error);\n throw error;\n }\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"Cannot end entry after finalize.\");\n onError(error);\n throw error;\n }\n try {\n if (bytesWritten !== currentHeader.size) {\n const error = /* @__PURE__ */ new Error(`Size mismatch for \"${currentHeader.name}\".`);\n onError(error);\n throw error;\n }\n const paddingSize = -currentHeader.size & BLOCK_SIZE_MASK;\n if (paddingSize > 0)\n onData(new Uint8Array(paddingSize));\n currentHeader = null;\n bytesWritten = 0;\n } catch (error) {\n onError(error);\n throw error;\n }\n },\n finalize() {\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"Archive has already been finalized\");\n onError(error);\n throw error;\n }\n if (currentHeader !== null) {\n const error = /* @__PURE__ */ new Error(\"Cannot finalize while an entry is still active\");\n onError(error);\n throw error;\n }\n try {\n onData(EOF_BUFFER);\n finalized = true;\n if (onFinalize)\n onFinalize();\n } catch (error) {\n onError(error);\n }\n }\n };\n}\nvar INITIAL_CAPACITY = 256;\nfunction createChunkQueue() {\n let chunks = new Array(INITIAL_CAPACITY);\n let capacityMask = chunks.length - 1;\n let head = 0;\n let tail = 0;\n let totalAvailable = 0;\n const consumeFromHead = (count) => {\n const chunk = chunks[head];\n if (count === chunk.length) {\n chunks[head] = EMPTY;\n head = head + 1 & capacityMask;\n } else\n chunks[head] = chunk.subarray(count);\n totalAvailable -= count;\n if (totalAvailable === 0 && chunks.length > INITIAL_CAPACITY) {\n chunks = new Array(INITIAL_CAPACITY);\n capacityMask = INITIAL_CAPACITY - 1;\n head = 0;\n tail = 0;\n }\n };\n function pull(bytes, callback) {\n if (callback) {\n let fed = 0;\n let remaining$1 = Math.min(bytes, totalAvailable);\n while (remaining$1 > 0) {\n const chunk = chunks[head];\n const toFeed = Math.min(remaining$1, chunk.length);\n const segment = toFeed === chunk.length ? chunk : chunk.subarray(0, toFeed);\n consumeFromHead(toFeed);\n remaining$1 -= toFeed;\n fed += toFeed;\n if (!callback(segment))\n break;\n }\n return fed;\n }\n if (totalAvailable < bytes)\n return null;\n if (bytes === 0)\n return EMPTY;\n const firstChunk = chunks[head];\n if (firstChunk.length >= bytes) {\n const view = firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);\n consumeFromHead(bytes);\n return view;\n }\n const result = new Uint8Array(bytes);\n let copied = 0;\n let remaining = bytes;\n while (remaining > 0) {\n const chunk = chunks[head];\n const toCopy = Math.min(remaining, chunk.length);\n result.set(toCopy === chunk.length ? chunk : chunk.subarray(0, toCopy), copied);\n copied += toCopy;\n remaining -= toCopy;\n consumeFromHead(toCopy);\n }\n return result;\n }\n return {\n push: (chunk) => {\n if (chunk.length === 0)\n return;\n let nextTail = tail + 1 & capacityMask;\n if (nextTail === head) {\n const oldLen = chunks.length;\n const newLen = oldLen * 2;\n const newChunks = new Array(newLen);\n const count = tail - head + oldLen & oldLen - 1;\n if (head < tail)\n for (let i = 0;i < count; i++)\n newChunks[i] = chunks[head + i];\n else if (count > 0) {\n const firstPart = oldLen - head;\n for (let i = 0;i < firstPart; i++)\n newChunks[i] = chunks[head + i];\n for (let i = 0;i < tail; i++)\n newChunks[firstPart + i] = chunks[i];\n }\n chunks = newChunks;\n capacityMask = newLen - 1;\n head = 0;\n tail = count;\n nextTail = tail + 1 & capacityMask;\n }\n chunks[tail] = chunk;\n tail = nextTail;\n totalAvailable += chunk.length;\n },\n available: () => totalAvailable,\n peek: (bytes) => {\n if (totalAvailable < bytes)\n return null;\n if (bytes === 0)\n return EMPTY;\n const firstChunk = chunks[head];\n if (firstChunk.length >= bytes)\n return firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);\n const result = new Uint8Array(bytes);\n let copied = 0;\n let index = head;\n while (copied < bytes) {\n const chunk = chunks[index];\n const toCopy = Math.min(bytes - copied, chunk.length);\n if (toCopy === chunk.length)\n result.set(chunk, copied);\n else\n result.set(chunk.subarray(0, toCopy), copied);\n copied += toCopy;\n index = index + 1 & capacityMask;\n }\n return result;\n },\n discard: (bytes) => {\n if (bytes > totalAvailable)\n throw new Error(\"Too many bytes consumed\");\n if (bytes === 0)\n return;\n let remaining = bytes;\n while (remaining > 0) {\n const chunk = chunks[head];\n const toConsume = Math.min(remaining, chunk.length);\n consumeFromHead(toConsume);\n remaining -= toConsume;\n }\n },\n pull\n };\n}\nvar STATE_HEADER = 0;\nvar STATE_BODY = 1;\nvar truncateErr = /* @__PURE__ */ new Error(\"Tar archive is truncated.\");\nfunction createUnpacker(options = {}) {\n const strict = options.strict ?? false;\n const { available, peek, push, discard, pull } = createChunkQueue();\n let state = STATE_HEADER;\n let ended = false;\n let done = false;\n let eof = false;\n let currentEntry = null;\n const paxGlobals = {};\n let nextEntryOverrides = {};\n const unpacker = {\n isEntryActive: () => state === STATE_BODY,\n isBodyComplete: () => !currentEntry || currentEntry.remaining === 0,\n write(chunk) {\n if (ended)\n throw new Error(\"Archive already ended.\");\n push(chunk);\n },\n end() {\n ended = true;\n },\n readHeader() {\n if (state !== STATE_HEADER)\n throw new Error(\"Cannot read header while an entry is active\");\n if (done)\n return;\n while (!done) {\n if (available() < BLOCK_SIZE) {\n if (ended) {\n if (available() > 0 && strict)\n throw truncateErr;\n done = true;\n return;\n }\n return null;\n }\n const headerBlock = peek(BLOCK_SIZE);\n if (isZeroBlock(headerBlock)) {\n if (available() < BLOCK_SIZE * 2) {\n if (ended) {\n if (strict)\n throw truncateErr;\n done = true;\n return;\n }\n return null;\n }\n if (isZeroBlock(peek(BLOCK_SIZE * 2).subarray(BLOCK_SIZE))) {\n discard(BLOCK_SIZE * 2);\n done = true;\n eof = true;\n return;\n }\n if (strict)\n throw new Error(\"Invalid tar header.\");\n discard(BLOCK_SIZE);\n continue;\n }\n let internalHeader;\n try {\n internalHeader = parseUstarHeader(headerBlock, strict);\n } catch (err) {\n if (strict)\n throw err;\n discard(BLOCK_SIZE);\n continue;\n }\n const metaParser = getMetaParser(internalHeader.type);\n if (metaParser) {\n const paddedSize = internalHeader.size + BLOCK_SIZE_MASK & ~BLOCK_SIZE_MASK;\n if (available() < BLOCK_SIZE + paddedSize) {\n if (ended && strict)\n throw truncateErr;\n return null;\n }\n discard(BLOCK_SIZE);\n const overrides = metaParser(pull(paddedSize).subarray(0, internalHeader.size));\n const target = internalHeader.type === \"pax-global-header\" ? paxGlobals : nextEntryOverrides;\n for (const key in overrides)\n target[key] = overrides[key];\n continue;\n }\n discard(BLOCK_SIZE);\n const header = internalHeader;\n if (internalHeader.prefix)\n header.name = `${internalHeader.prefix}/${header.name}`;\n applyOverrides(header, paxGlobals);\n applyOverrides(header, nextEntryOverrides);\n nextEntryOverrides = {};\n currentEntry = {\n header,\n remaining: header.size,\n padding: -header.size & BLOCK_SIZE_MASK\n };\n state = STATE_BODY;\n return header;\n }\n },\n streamBody(callback) {\n if (state !== STATE_BODY || !currentEntry || currentEntry.remaining === 0)\n return 0;\n const bytesToFeed = Math.min(currentEntry.remaining, available());\n if (bytesToFeed === 0)\n return 0;\n const fed = pull(bytesToFeed, callback);\n currentEntry.remaining -= fed;\n return fed;\n },\n skipPadding() {\n if (state !== STATE_BODY || !currentEntry)\n return true;\n if (currentEntry.remaining > 0)\n throw new Error(\"Body not fully consumed\");\n if (available() < currentEntry.padding)\n return false;\n discard(currentEntry.padding);\n currentEntry = null;\n state = STATE_HEADER;\n return true;\n },\n skipEntry() {\n if (state !== STATE_BODY || !currentEntry)\n return true;\n const toDiscard = Math.min(currentEntry.remaining, available());\n if (toDiscard > 0) {\n discard(toDiscard);\n currentEntry.remaining -= toDiscard;\n }\n if (currentEntry.remaining > 0)\n return false;\n return unpacker.skipPadding();\n },\n validateEOF() {\n if (strict) {\n if (!eof)\n throw truncateErr;\n if (available() > 0) {\n if (pull(available()).some((byte) => byte !== 0))\n throw new Error(\"Invalid EOF.\");\n }\n }\n }\n };\n return unpacker;\n}\nfunction isZeroBlock(block) {\n if (block.byteOffset % 8 === 0) {\n const view = new BigUint64Array(block.buffer, block.byteOffset, block.length / 8);\n for (let i = 0;i < view.length; i++)\n if (view[i] !== 0n)\n return false;\n return true;\n }\n for (let i = 0;i < block.length; i++)\n if (block[i] !== 0)\n return false;\n return true;\n}\n\n// ../../node_modules/.bun/modern-tar@0.7.3/node_modules/modern-tar/dist/web/index.js\nfunction createTarPacker2() {\n let streamController;\n let packer;\n return {\n readable: new ReadableStream({ start(controller) {\n streamController = controller;\n packer = createTarPacker(controller.enqueue.bind(controller), controller.error.bind(controller), controller.close.bind(controller));\n } }),\n controller: {\n add(header) {\n const bodyless = isBodyless(header);\n const h = { ...header };\n if (bodyless)\n h.size = 0;\n packer.add(h);\n if (bodyless)\n packer.endEntry();\n return new WritableStream({\n write(chunk) {\n packer.write(chunk);\n },\n close() {\n if (!bodyless)\n packer.endEntry();\n },\n abort(reason) {\n streamController.error(reason);\n }\n });\n },\n finalize() {\n packer.finalize();\n },\n error(err) {\n streamController.error(err);\n }\n }\n };\n}\nasync function streamToBuffer(stream) {\n const chunks = [];\n const reader = stream.getReader();\n let totalLength = 0;\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done)\n break;\n chunks.push(value);\n totalLength += value.length;\n }\n const result = new Uint8Array(totalLength);\n let offset = 0;\n for (const chunk of chunks) {\n result.set(chunk, offset);\n offset += chunk.length;\n }\n return result;\n } finally {\n reader.releaseLock();\n }\n}\nvar drain = (stream) => stream.pipeTo(new WritableStream);\nfunction createTarDecoder(options = {}) {\n const unpacker = createUnpacker(options);\n let bodyController = null;\n let pumping = false;\n const pump = (controller) => {\n if (pumping)\n return;\n pumping = true;\n try {\n while (true)\n if (unpacker.isEntryActive()) {\n if (bodyController) {\n if (unpacker.streamBody((c) => (bodyController.enqueue(c), true)) === 0 && !unpacker.isBodyComplete())\n break;\n } else if (!unpacker.skipEntry())\n break;\n if (unpacker.isBodyComplete()) {\n try {\n bodyController?.close();\n } catch {}\n bodyController = null;\n if (!unpacker.skipPadding())\n break;\n }\n } else {\n const header = unpacker.readHeader();\n if (header === null || header === undefined)\n break;\n controller.enqueue({\n header,\n body: new ReadableStream({\n start(c) {\n if (header.size === 0)\n c.close();\n else\n bodyController = c;\n },\n pull: () => pump(controller),\n cancel() {\n bodyController = null;\n pump(controller);\n }\n })\n });\n }\n } catch (error) {\n try {\n bodyController?.error(error);\n } catch {}\n bodyController = null;\n throw error;\n } finally {\n pumping = false;\n }\n };\n return new TransformStream({\n transform(chunk, controller) {\n try {\n unpacker.write(chunk);\n pump(controller);\n } catch (error) {\n try {\n bodyController?.error(error);\n } catch {}\n throw error;\n }\n },\n flush(controller) {\n try {\n unpacker.end();\n pump(controller);\n unpacker.validateEOF();\n if (unpacker.isEntryActive() && !unpacker.isBodyComplete())\n try {\n bodyController?.close();\n } catch {}\n } catch (error) {\n try {\n bodyController?.error(error);\n } catch {}\n throw error;\n }\n }\n }, undefined, { highWaterMark: 1 });\n}\nasync function packTar(entries) {\n const { readable, controller } = createTarPacker2();\n await (async () => {\n for (const entry of entries) {\n const entryStream = controller.add(entry.header);\n const body = \"body\" in entry ? entry.body : entry.data;\n if (!body) {\n await entryStream.close();\n continue;\n }\n if (body instanceof ReadableStream)\n await body.pipeTo(entryStream);\n else if (body instanceof Blob)\n await body.stream().pipeTo(entryStream);\n else\n try {\n const chunk = await normalizeBody(body);\n if (chunk.length > 0) {\n const writer = entryStream.getWriter();\n await writer.write(chunk);\n await writer.close();\n } else\n await entryStream.close();\n } catch {\n throw new TypeError(`Unsupported content type for entry \"${entry.header.name}\".`);\n }\n }\n })().then(() => controller.finalize()).catch((err) => controller.error(err));\n return new Uint8Array(await streamToBuffer(readable));\n}\nasync function unpackTar(archive, options = {}) {\n const sourceStream = archive instanceof ReadableStream ? archive : new ReadableStream({ start(controller) {\n controller.enqueue(archive instanceof Uint8Array ? archive : new Uint8Array(archive));\n controller.close();\n } });\n const results = [];\n const entryStream = sourceStream.pipeThrough(createTarDecoder(options));\n for await (const entry of entryStream) {\n let processedHeader;\n try {\n processedHeader = transformHeader(entry.header, options);\n } catch (error) {\n await entry.body.cancel();\n throw error;\n }\n if (processedHeader === null) {\n await drain(entry.body);\n continue;\n }\n if (isBodyless(processedHeader)) {\n await drain(entry.body);\n results.push({ header: processedHeader });\n } else\n results.push({\n header: processedHeader,\n data: await streamToBuffer(entry.body)\n });\n }\n return results;\n}\n\n// src/primitives/archive.ts\nvar gzipAsync = promisify(gzip);\n\nclass RXAImpl {\n _buffer;\n constructor(buffer) {\n this._buffer = buffer;\n }\n get stream() {\n const buffer = this._buffer;\n return new ReadableStream({\n start(controller) {\n controller.enqueue(new Uint8Array(buffer));\n controller.close();\n }\n });\n }\n async buffer() {\n return this._buffer;\n }\n}\nasync function archive(files) {\n const entries = Object.entries(files).map(([name, content]) => {\n return {\n header: { name, size: content.length, type: \"file\" },\n body: new Uint8Array(content)\n };\n });\n const tarBuffer = await packTar(entries);\n const gzipBuffer = await gzipAsync(Buffer.from(tarBuffer));\n return new RXAImpl(gzipBuffer);\n}\n// src/primitives/locate.ts\nfunction locate(rxm) {\n return {\n domain: rxm.domain,\n path: rxm.path,\n name: rxm.name,\n type: rxm.type,\n version: rxm.version\n };\n}\n// src/primitives/resource.ts\nfunction resource(rxm, rxa) {\n const rxl = locate(rxm);\n return {\n locator: rxl,\n manifest: rxm,\n archive: rxa\n };\n}\n// src/primitives/extract.ts\nimport { gunzip } from \"node:zlib\";\nimport { promisify as promisify2 } from \"node:util\";\nvar gunzipAsync = promisify2(gunzip);\nasync function extract(rxa) {\n const buffer = await rxa.buffer();\n const tarBuffer = await gunzipAsync(buffer);\n const entries = await unpackTar(tarBuffer);\n const files = {};\n for (const entry of entries) {\n if ((entry.header.type === \"file\" || entry.header.type === undefined) && entry.data) {\n files[entry.header.name] = Buffer.from(entry.data);\n }\n }\n return files;\n}\n// src/primitives/format.ts\nfunction format(rxl) {\n let result = rxl.domain + \"/\";\n if (rxl.path) {\n result += rxl.path + \"/\";\n }\n result += rxl.name;\n result += \".\" + rxl.type;\n result += \"@\" + rxl.version;\n return result;\n}\n// src/primitives/parse.ts\nfunction parse(locator) {\n if (!locator || typeof locator !== \"string\") {\n throw new LocatorError(\"locator must be a non-empty string\", locator);\n }\n const atIndex = locator.lastIndexOf(\"@\");\n if (atIndex === -1) {\n throw new LocatorError(\"locator must contain version (@)\", locator);\n }\n const version = locator.slice(atIndex + 1);\n const beforeVersion = locator.slice(0, atIndex);\n if (!version) {\n throw new LocatorError(\"version is required\", locator);\n }\n const dotIndex = beforeVersion.lastIndexOf(\".\");\n if (dotIndex === -1) {\n throw new LocatorError(\"locator must contain type (.)\", locator);\n }\n const type = beforeVersion.slice(dotIndex + 1);\n const beforeType = beforeVersion.slice(0, dotIndex);\n if (!type) {\n throw new LocatorError(\"type is required\", locator);\n }\n const parts = beforeType.split(\"/\");\n if (parts.length < 2) {\n throw new LocatorError(\"locator must contain domain\", locator);\n }\n const domain = parts[0];\n const name = parts[parts.length - 1];\n const path = parts.length > 2 ? parts.slice(1, -1).join(\"/\") : undefined;\n if (!domain) {\n throw new LocatorError(\"domain is required\", locator);\n }\n if (!name) {\n throw new LocatorError(\"name is required\", locator);\n }\n return {\n domain,\n path,\n name,\n type,\n version\n };\n}\n// src/primitives/wrap.ts\nclass RXAImpl2 {\n _buffer;\n constructor(buffer) {\n this._buffer = buffer;\n }\n get stream() {\n const buffer = this._buffer;\n return new ReadableStream({\n start(controller) {\n controller.enqueue(new Uint8Array(buffer));\n controller.close();\n }\n });\n }\n async buffer() {\n return this._buffer;\n }\n}\nfunction wrap(buffer) {\n return new RXAImpl2(buffer);\n}\nexport {\n wrap,\n resource,\n parse,\n manifest,\n locate,\n format,\n extract,\n define,\n archive,\n ResourceXError,\n ManifestError,\n LocatorError,\n DefinitionError,\n ContentError\n};\n\n//# debugId=467D5F5D80C42E4F64756E2164756E21\n",
|
|
6
|
+
"// src/errors.ts\nclass ResourceXError extends Error {\n constructor(message, options) {\n super(message, options);\n this.name = \"ResourceXError\";\n }\n}\n\nclass LocatorError extends ResourceXError {\n locator;\n constructor(message, locator) {\n super(message);\n this.locator = locator;\n this.name = \"LocatorError\";\n }\n}\n\nclass ManifestError extends ResourceXError {\n constructor(message) {\n super(message);\n this.name = \"ManifestError\";\n }\n}\n\nclass ContentError extends ResourceXError {\n constructor(message) {\n super(message);\n this.name = \"ContentError\";\n }\n}\n\nclass DefinitionError extends ResourceXError {\n constructor(message) {\n super(message);\n this.name = \"DefinitionError\";\n }\n}\n// src/primitives/define.ts\nfunction define(input) {\n if (input === null || typeof input !== \"object\") {\n throw new DefinitionError(\"definition must be an object\");\n }\n const obj = input;\n if (!obj.name || typeof obj.name !== \"string\") {\n throw new DefinitionError(\"name is required\");\n }\n if (!obj.type || typeof obj.type !== \"string\") {\n throw new DefinitionError(\"type is required\");\n }\n const tagValue = obj.tag ?? obj.version;\n if (tagValue !== undefined && typeof tagValue !== \"string\") {\n throw new DefinitionError(\"tag must be a string\");\n }\n const rxd = {\n ...obj,\n name: obj.name,\n type: obj.type,\n tag: typeof tagValue === \"string\" ? tagValue : undefined,\n registry: typeof obj.registry === \"string\" ? obj.registry : undefined,\n path: typeof obj.path === \"string\" ? obj.path : undefined,\n description: typeof obj.description === \"string\" ? obj.description : undefined,\n author: typeof obj.author === \"string\" ? obj.author : undefined,\n license: typeof obj.license === \"string\" ? obj.license : undefined,\n keywords: Array.isArray(obj.keywords) ? obj.keywords : undefined,\n repository: typeof obj.repository === \"string\" ? obj.repository : undefined\n };\n return rxd;\n}\n// src/primitives/manifest.ts\nfunction manifest(rxd) {\n return {\n registry: rxd.registry,\n path: rxd.path,\n name: rxd.name,\n type: rxd.type,\n tag: rxd.tag ?? \"latest\"\n };\n}\n// src/primitives/archive.ts\nimport { gzip } from \"node:zlib\";\nimport { promisify } from \"node:util\";\n\n// ../../node_modules/.bun/modern-tar@0.7.3/node_modules/modern-tar/dist/unpacker-BpPBxY8N.js\nvar BLOCK_SIZE = 512;\nvar BLOCK_SIZE_MASK = 511;\nvar DEFAULT_FILE_MODE = 420;\nvar DEFAULT_DIR_MODE = 493;\nvar USTAR_NAME_OFFSET = 0;\nvar USTAR_NAME_SIZE = 100;\nvar USTAR_MODE_OFFSET = 100;\nvar USTAR_MODE_SIZE = 8;\nvar USTAR_UID_OFFSET = 108;\nvar USTAR_UID_SIZE = 8;\nvar USTAR_GID_OFFSET = 116;\nvar USTAR_GID_SIZE = 8;\nvar USTAR_SIZE_OFFSET = 124;\nvar USTAR_SIZE_SIZE = 12;\nvar USTAR_MTIME_OFFSET = 136;\nvar USTAR_MTIME_SIZE = 12;\nvar USTAR_CHECKSUM_OFFSET = 148;\nvar USTAR_CHECKSUM_SIZE = 8;\nvar USTAR_TYPEFLAG_OFFSET = 156;\nvar USTAR_TYPEFLAG_SIZE = 1;\nvar USTAR_LINKNAME_OFFSET = 157;\nvar USTAR_LINKNAME_SIZE = 100;\nvar USTAR_MAGIC_OFFSET = 257;\nvar USTAR_MAGIC_SIZE = 6;\nvar USTAR_VERSION_OFFSET = 263;\nvar USTAR_VERSION_SIZE = 2;\nvar USTAR_UNAME_OFFSET = 265;\nvar USTAR_UNAME_SIZE = 32;\nvar USTAR_GNAME_OFFSET = 297;\nvar USTAR_GNAME_SIZE = 32;\nvar USTAR_PREFIX_OFFSET = 345;\nvar USTAR_PREFIX_SIZE = 155;\nvar USTAR_VERSION = \"00\";\nvar USTAR_MAX_UID_GID = 2097151;\nvar USTAR_MAX_SIZE = 8589934591;\nvar FILE = \"file\";\nvar LINK = \"link\";\nvar SYMLINK = \"symlink\";\nvar DIRECTORY = \"directory\";\nvar TYPEFLAG = {\n file: \"0\",\n link: \"1\",\n symlink: \"2\",\n \"character-device\": \"3\",\n \"block-device\": \"4\",\n directory: \"5\",\n fifo: \"6\",\n \"pax-header\": \"x\",\n \"pax-global-header\": \"g\",\n \"gnu-long-name\": \"L\",\n \"gnu-long-link-name\": \"K\"\n};\nvar FLAGTYPE = {\n \"0\": FILE,\n \"1\": LINK,\n \"2\": SYMLINK,\n \"3\": \"character-device\",\n \"4\": \"block-device\",\n \"5\": DIRECTORY,\n \"6\": \"fifo\",\n x: \"pax-header\",\n g: \"pax-global-header\",\n L: \"gnu-long-name\",\n K: \"gnu-long-link-name\"\n};\nvar ZERO_BLOCK = new Uint8Array(BLOCK_SIZE);\nvar EMPTY = new Uint8Array(0);\nvar encoder = new TextEncoder;\nvar decoder = new TextDecoder;\nfunction writeString(view, offset, size, value) {\n if (value)\n encoder.encodeInto(value, view.subarray(offset, offset + size));\n}\nfunction writeOctal(view, offset, size, value) {\n if (value === undefined)\n return;\n const octalString = value.toString(8).padStart(size - 1, \"0\");\n encoder.encodeInto(octalString, view.subarray(offset, offset + size - 1));\n}\nfunction readString(view, offset, size) {\n const end = view.indexOf(0, offset);\n const sliceEnd = end === -1 || end > offset + size ? offset + size : end;\n return decoder.decode(view.subarray(offset, sliceEnd));\n}\nfunction readOctal(view, offset, size) {\n let value = 0;\n const end = offset + size;\n for (let i = offset;i < end; i++) {\n const charCode = view[i];\n if (charCode === 0)\n break;\n if (charCode === 32)\n continue;\n value = value * 8 + (charCode - 48);\n }\n return value;\n}\nfunction readNumeric(view, offset, size) {\n if (view[offset] & 128) {\n let result = 0;\n result = view[offset] & 127;\n for (let i = 1;i < size; i++)\n result = result * 256 + view[offset + i];\n if (!Number.isSafeInteger(result))\n throw new Error(\"TAR number too large\");\n return result;\n }\n return readOctal(view, offset, size);\n}\nvar isBodyless = (header) => header.type === DIRECTORY || header.type === SYMLINK || header.type === LINK;\nasync function normalizeBody(body) {\n if (body === null || body === undefined)\n return EMPTY;\n if (body instanceof Uint8Array)\n return body;\n if (typeof body === \"string\")\n return encoder.encode(body);\n if (body instanceof ArrayBuffer)\n return new Uint8Array(body);\n if (body instanceof Blob)\n return new Uint8Array(await body.arrayBuffer());\n throw new TypeError(\"Unsupported content type for entry body.\");\n}\nfunction transformHeader(header, options) {\n const { strip, filter, map } = options;\n if (!strip && !filter && !map)\n return header;\n const h = { ...header };\n if (strip && strip > 0) {\n const components = h.name.split(\"/\").filter(Boolean);\n if (strip >= components.length)\n return null;\n const newName = components.slice(strip).join(\"/\");\n h.name = h.type === DIRECTORY && !newName.endsWith(\"/\") ? `${newName}/` : newName;\n if (h.linkname?.startsWith(\"/\")) {\n const linkComponents = h.linkname.split(\"/\").filter(Boolean);\n h.linkname = strip >= linkComponents.length ? \"/\" : `/${linkComponents.slice(strip).join(\"/\")}`;\n }\n }\n if (filter?.(h) === false)\n return null;\n const result = map ? map(h) : h;\n if (result && (!result.name || !result.name.trim() || result.name === \".\" || result.name === \"/\"))\n return null;\n return result;\n}\nvar CHECKSUM_SPACE = 32;\nvar ASCII_ZERO = 48;\nfunction validateChecksum(block) {\n const stored = readOctal(block, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_SIZE);\n let sum = 0;\n for (let i = 0;i < block.length; i++)\n if (i >= USTAR_CHECKSUM_OFFSET && i < USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE)\n sum += CHECKSUM_SPACE;\n else\n sum += block[i];\n return stored === sum;\n}\nfunction writeChecksum(block) {\n block.fill(CHECKSUM_SPACE, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE);\n let checksum = 0;\n for (const byte of block)\n checksum += byte;\n for (let i = USTAR_CHECKSUM_OFFSET + 6 - 1;i >= USTAR_CHECKSUM_OFFSET; i--) {\n block[i] = (checksum & 7) + ASCII_ZERO;\n checksum >>= 3;\n }\n block[USTAR_CHECKSUM_OFFSET + 6] = 0;\n block[USTAR_CHECKSUM_OFFSET + 7] = CHECKSUM_SPACE;\n}\nfunction generatePax(header) {\n const paxRecords = {};\n if (header.name.length > USTAR_NAME_SIZE) {\n if (findUstarSplit(header.name) === null)\n paxRecords.path = header.name;\n }\n if (header.linkname && header.linkname.length > USTAR_NAME_SIZE)\n paxRecords.linkpath = header.linkname;\n if (header.uname && header.uname.length > USTAR_UNAME_SIZE)\n paxRecords.uname = header.uname;\n if (header.gname && header.gname.length > USTAR_GNAME_SIZE)\n paxRecords.gname = header.gname;\n if (header.uid != null && header.uid > USTAR_MAX_UID_GID)\n paxRecords.uid = String(header.uid);\n if (header.gid != null && header.gid > USTAR_MAX_UID_GID)\n paxRecords.gid = String(header.gid);\n if (header.size != null && header.size > USTAR_MAX_SIZE)\n paxRecords.size = String(header.size);\n if (header.pax)\n Object.assign(paxRecords, header.pax);\n const paxEntries = Object.entries(paxRecords);\n if (paxEntries.length === 0)\n return null;\n const paxBody = encoder.encode(paxEntries.map(([key, value]) => {\n const record = `${key}=${value}\n`;\n const partLength = encoder.encode(record).length + 1;\n let totalLength = partLength + String(partLength).length;\n totalLength = partLength + String(totalLength).length;\n return `${totalLength} ${record}`;\n }).join(\"\"));\n return {\n paxHeader: createTarHeader({\n name: decoder.decode(encoder.encode(`PaxHeader/${header.name}`).slice(0, 100)),\n size: paxBody.length,\n type: \"pax-header\",\n mode: 420,\n mtime: header.mtime,\n uname: header.uname,\n gname: header.gname,\n uid: header.uid,\n gid: header.gid\n }),\n paxBody\n };\n}\nfunction findUstarSplit(path) {\n if (path.length <= USTAR_NAME_SIZE)\n return null;\n const minSlashIndex = path.length - USTAR_NAME_SIZE - 1;\n const slashIndex = path.lastIndexOf(\"/\", USTAR_PREFIX_SIZE);\n if (slashIndex > 0 && slashIndex >= minSlashIndex)\n return {\n prefix: path.slice(0, slashIndex),\n name: path.slice(slashIndex + 1)\n };\n return null;\n}\nfunction createTarHeader(header) {\n const view = new Uint8Array(BLOCK_SIZE);\n const size = isBodyless(header) ? 0 : header.size ?? 0;\n let name = header.name;\n let prefix = \"\";\n if (!header.pax?.path) {\n const split = findUstarSplit(name);\n if (split) {\n name = split.name;\n prefix = split.prefix;\n }\n }\n writeString(view, USTAR_NAME_OFFSET, USTAR_NAME_SIZE, name);\n writeOctal(view, USTAR_MODE_OFFSET, USTAR_MODE_SIZE, header.mode ?? (header.type === DIRECTORY ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));\n writeOctal(view, USTAR_UID_OFFSET, USTAR_UID_SIZE, header.uid ?? 0);\n writeOctal(view, USTAR_GID_OFFSET, USTAR_GID_SIZE, header.gid ?? 0);\n writeOctal(view, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE, size);\n writeOctal(view, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1000));\n writeString(view, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE, TYPEFLAG[header.type ?? FILE]);\n writeString(view, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE, header.linkname);\n writeString(view, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE, \"ustar\\x00\");\n writeString(view, USTAR_VERSION_OFFSET, USTAR_VERSION_SIZE, USTAR_VERSION);\n writeString(view, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE, header.uname);\n writeString(view, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE, header.gname);\n writeString(view, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE, prefix);\n writeChecksum(view);\n return view;\n}\nfunction parseUstarHeader(block, strict) {\n if (strict && !validateChecksum(block))\n throw new Error(\"Invalid tar header checksum.\");\n const typeflag = readString(block, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE);\n const header = {\n name: readString(block, USTAR_NAME_OFFSET, USTAR_NAME_SIZE),\n mode: readOctal(block, USTAR_MODE_OFFSET, USTAR_MODE_SIZE),\n uid: readNumeric(block, USTAR_UID_OFFSET, USTAR_UID_SIZE),\n gid: readNumeric(block, USTAR_GID_OFFSET, USTAR_GID_SIZE),\n size: readNumeric(block, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE),\n mtime: /* @__PURE__ */ new Date(readNumeric(block, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE) * 1000),\n type: FLAGTYPE[typeflag] || FILE,\n linkname: readString(block, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE)\n };\n const magic = readString(block, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE);\n if (magic.trim() === \"ustar\") {\n header.uname = readString(block, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE);\n header.gname = readString(block, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE);\n }\n if (magic === \"ustar\")\n header.prefix = readString(block, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE);\n return header;\n}\nvar PAX_MAPPING = {\n path: [\"name\", (v) => v],\n linkpath: [\"linkname\", (v) => v],\n size: [\"size\", (v) => parseInt(v, 10)],\n mtime: [\"mtime\", parseFloat],\n uid: [\"uid\", (v) => parseInt(v, 10)],\n gid: [\"gid\", (v) => parseInt(v, 10)],\n uname: [\"uname\", (v) => v],\n gname: [\"gname\", (v) => v]\n};\nfunction parsePax(buffer) {\n const decoder$1 = new TextDecoder(\"utf-8\");\n const overrides = {};\n const pax = {};\n let offset = 0;\n while (offset < buffer.length) {\n const spaceIndex = buffer.indexOf(32, offset);\n if (spaceIndex === -1)\n break;\n const length = parseInt(decoder$1.decode(buffer.subarray(offset, spaceIndex)), 10);\n if (Number.isNaN(length) || length === 0)\n break;\n const recordEnd = offset + length;\n const [key, value] = decoder$1.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split(\"=\", 2);\n if (key && value !== undefined) {\n pax[key] = value;\n const mapping = PAX_MAPPING[key];\n if (mapping) {\n const [targetKey, parser] = mapping;\n const parsedValue = parser(value);\n if (typeof parsedValue === \"string\" || !Number.isNaN(parsedValue))\n overrides[targetKey] = parsedValue;\n }\n }\n offset = recordEnd;\n }\n if (Object.keys(pax).length > 0)\n overrides.pax = pax;\n return overrides;\n}\nfunction applyOverrides(header, overrides) {\n if (overrides.name !== undefined)\n header.name = overrides.name;\n if (overrides.linkname !== undefined)\n header.linkname = overrides.linkname;\n if (overrides.size !== undefined)\n header.size = overrides.size;\n if (overrides.mtime !== undefined)\n header.mtime = /* @__PURE__ */ new Date(overrides.mtime * 1000);\n if (overrides.uid !== undefined)\n header.uid = overrides.uid;\n if (overrides.gid !== undefined)\n header.gid = overrides.gid;\n if (overrides.uname !== undefined)\n header.uname = overrides.uname;\n if (overrides.gname !== undefined)\n header.gname = overrides.gname;\n if (overrides.pax)\n header.pax = Object.assign({}, header.pax ?? {}, overrides.pax);\n}\nfunction getMetaParser(type) {\n switch (type) {\n case \"pax-global-header\":\n case \"pax-header\":\n return parsePax;\n case \"gnu-long-name\":\n return (data) => ({ name: readString(data, 0, data.length) });\n case \"gnu-long-link-name\":\n return (data) => ({ linkname: readString(data, 0, data.length) });\n default:\n return;\n }\n}\nfunction getHeaderBlocks(header) {\n const base = createTarHeader(header);\n const pax = generatePax(header);\n if (!pax)\n return [base];\n const paxPadding = -pax.paxBody.length & BLOCK_SIZE_MASK;\n const paddingBlocks = paxPadding > 0 ? [ZERO_BLOCK.subarray(0, paxPadding)] : [];\n return [\n pax.paxHeader,\n pax.paxBody,\n ...paddingBlocks,\n base\n ];\n}\nvar EOF_BUFFER = new Uint8Array(BLOCK_SIZE * 2);\nfunction createTarPacker(onData, onError, onFinalize) {\n let currentHeader = null;\n let bytesWritten = 0;\n let finalized = false;\n return {\n add(header) {\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"No new tar entries after finalize.\");\n onError(error);\n throw error;\n }\n if (currentHeader !== null) {\n const error = /* @__PURE__ */ new Error(\"Previous entry must be completed before adding a new one\");\n onError(error);\n throw error;\n }\n try {\n const size = isBodyless(header) ? 0 : header.size ?? 0;\n const headerBlocks = getHeaderBlocks({\n ...header,\n size\n });\n for (const block of headerBlocks)\n onData(block);\n currentHeader = {\n ...header,\n size\n };\n bytesWritten = 0;\n } catch (error) {\n onError(error);\n }\n },\n write(chunk) {\n if (!currentHeader) {\n const error = /* @__PURE__ */ new Error(\"No active tar entry.\");\n onError(error);\n throw error;\n }\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"Cannot write data after finalize.\");\n onError(error);\n throw error;\n }\n const newTotal = bytesWritten + chunk.length;\n if (newTotal > currentHeader.size) {\n const error = /* @__PURE__ */ new Error(`\"${currentHeader.name}\" exceeds given size of ${currentHeader.size} bytes.`);\n onError(error);\n throw error;\n }\n try {\n bytesWritten = newTotal;\n onData(chunk);\n } catch (error) {\n onError(error);\n }\n },\n endEntry() {\n if (!currentHeader) {\n const error = /* @__PURE__ */ new Error(\"No active entry to end.\");\n onError(error);\n throw error;\n }\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"Cannot end entry after finalize.\");\n onError(error);\n throw error;\n }\n try {\n if (bytesWritten !== currentHeader.size) {\n const error = /* @__PURE__ */ new Error(`Size mismatch for \"${currentHeader.name}\".`);\n onError(error);\n throw error;\n }\n const paddingSize = -currentHeader.size & BLOCK_SIZE_MASK;\n if (paddingSize > 0)\n onData(new Uint8Array(paddingSize));\n currentHeader = null;\n bytesWritten = 0;\n } catch (error) {\n onError(error);\n throw error;\n }\n },\n finalize() {\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"Archive has already been finalized\");\n onError(error);\n throw error;\n }\n if (currentHeader !== null) {\n const error = /* @__PURE__ */ new Error(\"Cannot finalize while an entry is still active\");\n onError(error);\n throw error;\n }\n try {\n onData(EOF_BUFFER);\n finalized = true;\n if (onFinalize)\n onFinalize();\n } catch (error) {\n onError(error);\n }\n }\n };\n}\nvar INITIAL_CAPACITY = 256;\nfunction createChunkQueue() {\n let chunks = new Array(INITIAL_CAPACITY);\n let capacityMask = chunks.length - 1;\n let head = 0;\n let tail = 0;\n let totalAvailable = 0;\n const consumeFromHead = (count) => {\n const chunk = chunks[head];\n if (count === chunk.length) {\n chunks[head] = EMPTY;\n head = head + 1 & capacityMask;\n } else\n chunks[head] = chunk.subarray(count);\n totalAvailable -= count;\n if (totalAvailable === 0 && chunks.length > INITIAL_CAPACITY) {\n chunks = new Array(INITIAL_CAPACITY);\n capacityMask = INITIAL_CAPACITY - 1;\n head = 0;\n tail = 0;\n }\n };\n function pull(bytes, callback) {\n if (callback) {\n let fed = 0;\n let remaining$1 = Math.min(bytes, totalAvailable);\n while (remaining$1 > 0) {\n const chunk = chunks[head];\n const toFeed = Math.min(remaining$1, chunk.length);\n const segment = toFeed === chunk.length ? chunk : chunk.subarray(0, toFeed);\n consumeFromHead(toFeed);\n remaining$1 -= toFeed;\n fed += toFeed;\n if (!callback(segment))\n break;\n }\n return fed;\n }\n if (totalAvailable < bytes)\n return null;\n if (bytes === 0)\n return EMPTY;\n const firstChunk = chunks[head];\n if (firstChunk.length >= bytes) {\n const view = firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);\n consumeFromHead(bytes);\n return view;\n }\n const result = new Uint8Array(bytes);\n let copied = 0;\n let remaining = bytes;\n while (remaining > 0) {\n const chunk = chunks[head];\n const toCopy = Math.min(remaining, chunk.length);\n result.set(toCopy === chunk.length ? chunk : chunk.subarray(0, toCopy), copied);\n copied += toCopy;\n remaining -= toCopy;\n consumeFromHead(toCopy);\n }\n return result;\n }\n return {\n push: (chunk) => {\n if (chunk.length === 0)\n return;\n let nextTail = tail + 1 & capacityMask;\n if (nextTail === head) {\n const oldLen = chunks.length;\n const newLen = oldLen * 2;\n const newChunks = new Array(newLen);\n const count = tail - head + oldLen & oldLen - 1;\n if (head < tail)\n for (let i = 0;i < count; i++)\n newChunks[i] = chunks[head + i];\n else if (count > 0) {\n const firstPart = oldLen - head;\n for (let i = 0;i < firstPart; i++)\n newChunks[i] = chunks[head + i];\n for (let i = 0;i < tail; i++)\n newChunks[firstPart + i] = chunks[i];\n }\n chunks = newChunks;\n capacityMask = newLen - 1;\n head = 0;\n tail = count;\n nextTail = tail + 1 & capacityMask;\n }\n chunks[tail] = chunk;\n tail = nextTail;\n totalAvailable += chunk.length;\n },\n available: () => totalAvailable,\n peek: (bytes) => {\n if (totalAvailable < bytes)\n return null;\n if (bytes === 0)\n return EMPTY;\n const firstChunk = chunks[head];\n if (firstChunk.length >= bytes)\n return firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);\n const result = new Uint8Array(bytes);\n let copied = 0;\n let index = head;\n while (copied < bytes) {\n const chunk = chunks[index];\n const toCopy = Math.min(bytes - copied, chunk.length);\n if (toCopy === chunk.length)\n result.set(chunk, copied);\n else\n result.set(chunk.subarray(0, toCopy), copied);\n copied += toCopy;\n index = index + 1 & capacityMask;\n }\n return result;\n },\n discard: (bytes) => {\n if (bytes > totalAvailable)\n throw new Error(\"Too many bytes consumed\");\n if (bytes === 0)\n return;\n let remaining = bytes;\n while (remaining > 0) {\n const chunk = chunks[head];\n const toConsume = Math.min(remaining, chunk.length);\n consumeFromHead(toConsume);\n remaining -= toConsume;\n }\n },\n pull\n };\n}\nvar STATE_HEADER = 0;\nvar STATE_BODY = 1;\nvar truncateErr = /* @__PURE__ */ new Error(\"Tar archive is truncated.\");\nfunction createUnpacker(options = {}) {\n const strict = options.strict ?? false;\n const { available, peek, push, discard, pull } = createChunkQueue();\n let state = STATE_HEADER;\n let ended = false;\n let done = false;\n let eof = false;\n let currentEntry = null;\n const paxGlobals = {};\n let nextEntryOverrides = {};\n const unpacker = {\n isEntryActive: () => state === STATE_BODY,\n isBodyComplete: () => !currentEntry || currentEntry.remaining === 0,\n write(chunk) {\n if (ended)\n throw new Error(\"Archive already ended.\");\n push(chunk);\n },\n end() {\n ended = true;\n },\n readHeader() {\n if (state !== STATE_HEADER)\n throw new Error(\"Cannot read header while an entry is active\");\n if (done)\n return;\n while (!done) {\n if (available() < BLOCK_SIZE) {\n if (ended) {\n if (available() > 0 && strict)\n throw truncateErr;\n done = true;\n return;\n }\n return null;\n }\n const headerBlock = peek(BLOCK_SIZE);\n if (isZeroBlock(headerBlock)) {\n if (available() < BLOCK_SIZE * 2) {\n if (ended) {\n if (strict)\n throw truncateErr;\n done = true;\n return;\n }\n return null;\n }\n if (isZeroBlock(peek(BLOCK_SIZE * 2).subarray(BLOCK_SIZE))) {\n discard(BLOCK_SIZE * 2);\n done = true;\n eof = true;\n return;\n }\n if (strict)\n throw new Error(\"Invalid tar header.\");\n discard(BLOCK_SIZE);\n continue;\n }\n let internalHeader;\n try {\n internalHeader = parseUstarHeader(headerBlock, strict);\n } catch (err) {\n if (strict)\n throw err;\n discard(BLOCK_SIZE);\n continue;\n }\n const metaParser = getMetaParser(internalHeader.type);\n if (metaParser) {\n const paddedSize = internalHeader.size + BLOCK_SIZE_MASK & ~BLOCK_SIZE_MASK;\n if (available() < BLOCK_SIZE + paddedSize) {\n if (ended && strict)\n throw truncateErr;\n return null;\n }\n discard(BLOCK_SIZE);\n const overrides = metaParser(pull(paddedSize).subarray(0, internalHeader.size));\n const target = internalHeader.type === \"pax-global-header\" ? paxGlobals : nextEntryOverrides;\n for (const key in overrides)\n target[key] = overrides[key];\n continue;\n }\n discard(BLOCK_SIZE);\n const header = internalHeader;\n if (internalHeader.prefix)\n header.name = `${internalHeader.prefix}/${header.name}`;\n applyOverrides(header, paxGlobals);\n applyOverrides(header, nextEntryOverrides);\n nextEntryOverrides = {};\n currentEntry = {\n header,\n remaining: header.size,\n padding: -header.size & BLOCK_SIZE_MASK\n };\n state = STATE_BODY;\n return header;\n }\n },\n streamBody(callback) {\n if (state !== STATE_BODY || !currentEntry || currentEntry.remaining === 0)\n return 0;\n const bytesToFeed = Math.min(currentEntry.remaining, available());\n if (bytesToFeed === 0)\n return 0;\n const fed = pull(bytesToFeed, callback);\n currentEntry.remaining -= fed;\n return fed;\n },\n skipPadding() {\n if (state !== STATE_BODY || !currentEntry)\n return true;\n if (currentEntry.remaining > 0)\n throw new Error(\"Body not fully consumed\");\n if (available() < currentEntry.padding)\n return false;\n discard(currentEntry.padding);\n currentEntry = null;\n state = STATE_HEADER;\n return true;\n },\n skipEntry() {\n if (state !== STATE_BODY || !currentEntry)\n return true;\n const toDiscard = Math.min(currentEntry.remaining, available());\n if (toDiscard > 0) {\n discard(toDiscard);\n currentEntry.remaining -= toDiscard;\n }\n if (currentEntry.remaining > 0)\n return false;\n return unpacker.skipPadding();\n },\n validateEOF() {\n if (strict) {\n if (!eof)\n throw truncateErr;\n if (available() > 0) {\n if (pull(available()).some((byte) => byte !== 0))\n throw new Error(\"Invalid EOF.\");\n }\n }\n }\n };\n return unpacker;\n}\nfunction isZeroBlock(block) {\n if (block.byteOffset % 8 === 0) {\n const view = new BigUint64Array(block.buffer, block.byteOffset, block.length / 8);\n for (let i = 0;i < view.length; i++)\n if (view[i] !== 0n)\n return false;\n return true;\n }\n for (let i = 0;i < block.length; i++)\n if (block[i] !== 0)\n return false;\n return true;\n}\n\n// ../../node_modules/.bun/modern-tar@0.7.3/node_modules/modern-tar/dist/web/index.js\nfunction createTarPacker2() {\n let streamController;\n let packer;\n return {\n readable: new ReadableStream({ start(controller) {\n streamController = controller;\n packer = createTarPacker(controller.enqueue.bind(controller), controller.error.bind(controller), controller.close.bind(controller));\n } }),\n controller: {\n add(header) {\n const bodyless = isBodyless(header);\n const h = { ...header };\n if (bodyless)\n h.size = 0;\n packer.add(h);\n if (bodyless)\n packer.endEntry();\n return new WritableStream({\n write(chunk) {\n packer.write(chunk);\n },\n close() {\n if (!bodyless)\n packer.endEntry();\n },\n abort(reason) {\n streamController.error(reason);\n }\n });\n },\n finalize() {\n packer.finalize();\n },\n error(err) {\n streamController.error(err);\n }\n }\n };\n}\nasync function streamToBuffer(stream) {\n const chunks = [];\n const reader = stream.getReader();\n let totalLength = 0;\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done)\n break;\n chunks.push(value);\n totalLength += value.length;\n }\n const result = new Uint8Array(totalLength);\n let offset = 0;\n for (const chunk of chunks) {\n result.set(chunk, offset);\n offset += chunk.length;\n }\n return result;\n } finally {\n reader.releaseLock();\n }\n}\nvar drain = (stream) => stream.pipeTo(new WritableStream);\nfunction createTarDecoder(options = {}) {\n const unpacker = createUnpacker(options);\n let bodyController = null;\n let pumping = false;\n const pump = (controller) => {\n if (pumping)\n return;\n pumping = true;\n try {\n while (true)\n if (unpacker.isEntryActive()) {\n if (bodyController) {\n if (unpacker.streamBody((c) => (bodyController.enqueue(c), true)) === 0 && !unpacker.isBodyComplete())\n break;\n } else if (!unpacker.skipEntry())\n break;\n if (unpacker.isBodyComplete()) {\n try {\n bodyController?.close();\n } catch {}\n bodyController = null;\n if (!unpacker.skipPadding())\n break;\n }\n } else {\n const header = unpacker.readHeader();\n if (header === null || header === undefined)\n break;\n controller.enqueue({\n header,\n body: new ReadableStream({\n start(c) {\n if (header.size === 0)\n c.close();\n else\n bodyController = c;\n },\n pull: () => pump(controller),\n cancel() {\n bodyController = null;\n pump(controller);\n }\n })\n });\n }\n } catch (error) {\n try {\n bodyController?.error(error);\n } catch {}\n bodyController = null;\n throw error;\n } finally {\n pumping = false;\n }\n };\n return new TransformStream({\n transform(chunk, controller) {\n try {\n unpacker.write(chunk);\n pump(controller);\n } catch (error) {\n try {\n bodyController?.error(error);\n } catch {}\n throw error;\n }\n },\n flush(controller) {\n try {\n unpacker.end();\n pump(controller);\n unpacker.validateEOF();\n if (unpacker.isEntryActive() && !unpacker.isBodyComplete())\n try {\n bodyController?.close();\n } catch {}\n } catch (error) {\n try {\n bodyController?.error(error);\n } catch {}\n throw error;\n }\n }\n }, undefined, { highWaterMark: 1 });\n}\nasync function packTar(entries) {\n const { readable, controller } = createTarPacker2();\n await (async () => {\n for (const entry of entries) {\n const entryStream = controller.add(entry.header);\n const body = \"body\" in entry ? entry.body : entry.data;\n if (!body) {\n await entryStream.close();\n continue;\n }\n if (body instanceof ReadableStream)\n await body.pipeTo(entryStream);\n else if (body instanceof Blob)\n await body.stream().pipeTo(entryStream);\n else\n try {\n const chunk = await normalizeBody(body);\n if (chunk.length > 0) {\n const writer = entryStream.getWriter();\n await writer.write(chunk);\n await writer.close();\n } else\n await entryStream.close();\n } catch {\n throw new TypeError(`Unsupported content type for entry \"${entry.header.name}\".`);\n }\n }\n })().then(() => controller.finalize()).catch((err) => controller.error(err));\n return new Uint8Array(await streamToBuffer(readable));\n}\nasync function unpackTar(archive, options = {}) {\n const sourceStream = archive instanceof ReadableStream ? archive : new ReadableStream({ start(controller) {\n controller.enqueue(archive instanceof Uint8Array ? archive : new Uint8Array(archive));\n controller.close();\n } });\n const results = [];\n const entryStream = sourceStream.pipeThrough(createTarDecoder(options));\n for await (const entry of entryStream) {\n let processedHeader;\n try {\n processedHeader = transformHeader(entry.header, options);\n } catch (error) {\n await entry.body.cancel();\n throw error;\n }\n if (processedHeader === null) {\n await drain(entry.body);\n continue;\n }\n if (isBodyless(processedHeader)) {\n await drain(entry.body);\n results.push({ header: processedHeader });\n } else\n results.push({\n header: processedHeader,\n data: await streamToBuffer(entry.body)\n });\n }\n return results;\n}\n\n// src/primitives/archive.ts\nvar gzipAsync = promisify(gzip);\n\nclass RXAImpl {\n _buffer;\n constructor(buffer) {\n this._buffer = buffer;\n }\n get stream() {\n const buffer = this._buffer;\n return new ReadableStream({\n start(controller) {\n controller.enqueue(new Uint8Array(buffer));\n controller.close();\n }\n });\n }\n async buffer() {\n return this._buffer;\n }\n}\nasync function archive(files) {\n const entries = Object.entries(files).map(([name, content]) => {\n return {\n header: { name, size: content.length, type: \"file\" },\n body: new Uint8Array(content)\n };\n });\n const tarBuffer = await packTar(entries);\n const gzipBuffer = await gzipAsync(Buffer.from(tarBuffer));\n return new RXAImpl(gzipBuffer);\n}\n// src/primitives/locate.ts\nfunction locate(rxm) {\n return {\n registry: rxm.registry,\n path: rxm.path,\n name: rxm.name,\n tag: rxm.tag\n };\n}\n// src/primitives/resource.ts\nfunction resource(rxm, rxa) {\n const rxl = locate(rxm);\n return {\n locator: rxl,\n manifest: rxm,\n archive: rxa\n };\n}\n// src/primitives/extract.ts\nimport { gunzip } from \"node:zlib\";\nimport { promisify as promisify2 } from \"node:util\";\nvar gunzipAsync = promisify2(gunzip);\nasync function extract(rxa) {\n const buffer = await rxa.buffer();\n const tarBuffer = await gunzipAsync(buffer);\n const entries = await unpackTar(tarBuffer);\n const files = {};\n for (const entry of entries) {\n if ((entry.header.type === \"file\" || entry.header.type === undefined) && entry.data) {\n files[entry.header.name] = Buffer.from(entry.data);\n }\n }\n return files;\n}\n// src/primitives/format.ts\nfunction format(rxl) {\n let result = \"\";\n if (rxl.registry) {\n result += rxl.registry + \"/\";\n }\n if (rxl.path) {\n result += rxl.path + \"/\";\n }\n result += rxl.name;\n if (rxl.tag && rxl.tag !== \"latest\") {\n result += \":\" + rxl.tag;\n }\n return result;\n}\n// src/primitives/parse.ts\nfunction looksLikeRegistry(str) {\n if (str.includes(\":\") && !str.includes(\"/\")) {\n return true;\n }\n if (str.includes(\".\")) {\n return true;\n }\n if (str === \"localhost\") {\n return true;\n }\n return false;\n}\nfunction parse(locator) {\n if (!locator || typeof locator !== \"string\") {\n throw new LocatorError(\"Locator must be a non-empty string\", locator);\n }\n if (locator.includes(\"@\")) {\n throw new LocatorError(\"Invalid locator format. Use name:tag instead of name@version\", locator);\n }\n const lastSlashIndex = locator.lastIndexOf(\"/\");\n let beforeSlash = \"\";\n let afterSlash = locator;\n if (lastSlashIndex !== -1) {\n beforeSlash = locator.substring(0, lastSlashIndex);\n afterSlash = locator.substring(lastSlashIndex + 1);\n }\n const colonIndex = afterSlash.lastIndexOf(\":\");\n let name;\n let tag;\n if (colonIndex === -1) {\n name = afterSlash;\n tag = \"latest\";\n } else {\n name = afterSlash.substring(0, colonIndex);\n tag = afterSlash.substring(colonIndex + 1);\n }\n if (!name) {\n throw new LocatorError(\"Name is required\", locator);\n }\n if (!tag) {\n throw new LocatorError(\"Tag cannot be empty. Use name:tag format or omit tag for :latest\", locator);\n }\n if (lastSlashIndex === -1) {\n return {\n registry: undefined,\n path: undefined,\n name,\n tag\n };\n }\n const parts = beforeSlash.split(\"/\");\n if (looksLikeRegistry(parts[0])) {\n const registry = parts[0];\n const path = parts.length > 1 ? parts.slice(1).join(\"/\") : undefined;\n return {\n registry,\n path,\n name,\n tag\n };\n }\n return {\n registry: undefined,\n path: beforeSlash,\n name,\n tag\n };\n}\n// src/primitives/wrap.ts\nclass RXAImpl2 {\n _buffer;\n constructor(buffer) {\n this._buffer = buffer;\n }\n get stream() {\n const buffer = this._buffer;\n return new ReadableStream({\n start(controller) {\n controller.enqueue(new Uint8Array(buffer));\n controller.close();\n }\n });\n }\n async buffer() {\n return this._buffer;\n }\n}\nfunction wrap(buffer) {\n return new RXAImpl2(buffer);\n}\nexport {\n wrap,\n resource,\n parse,\n manifest,\n locate,\n format,\n extract,\n define,\n archive,\n ResourceXError,\n ManifestError,\n LocatorError,\n DefinitionError,\n ContentError\n};\n\n//# debugId=E36D0778C816F02964756E2164756E21\n",
|
|
7
7
|
"import { ResourceXError } from \"@resourcexjs/core\";\n\n/**\n * Resource type related error.\n */\nexport class ResourceTypeError extends ResourceXError {\n constructor(message: string) {\n super(message);\n this.name = \"ResourceTypeError\";\n }\n}\n",
|
|
8
8
|
"/**\n * Built-in resource types (pre-bundled).\n * Auto-generated by build.ts - DO NOT EDIT\n *\n * Resolver code receives ResolveContext (ctx) with:\n * - ctx.manifest: { domain, path?, name, type, version }\n * - ctx.files: Record<string, Uint8Array>\n *\n * Code format: ESM bundled code (without export statement)\n * The resolver variable name is stored in the code comment.\n */\n\nimport type { BundledType } from \"./types.js\";\n\n\n/**\n * Plain text content\n */\nexport const textType: BundledType = {\n name: \"text\",\n aliases: [\"txt\",\"plaintext\"],\n description: \"Plain text content\",\n code: `// @resolver: text_type_default\n// src/builtins/text.type.ts\nvar text_type_default = {\n name: \"text\",\n aliases: [\"txt\", \"plaintext\"],\n description: \"Plain text content\",\n async resolve(ctx) {\n const content = ctx.files[\"content\"];\n return new TextDecoder().decode(content);\n }\n};`,\n};\n\n\n/**\n * JSON content\n */\nexport const jsonType: BundledType = {\n name: \"json\",\n aliases: [\"config\",\"manifest\"],\n description: \"JSON content\",\n code: `// @resolver: json_type_default\n// src/builtins/json.type.ts\nvar json_type_default = {\n name: \"json\",\n aliases: [\"config\", \"manifest\"],\n description: \"JSON content\",\n async resolve(ctx) {\n const content = ctx.files[\"content\"];\n return JSON.parse(new TextDecoder().decode(content));\n }\n};`,\n};\n\n\n/**\n * Binary content\n */\nexport const binaryType: BundledType = {\n name: \"binary\",\n aliases: [\"bin\",\"blob\",\"raw\"],\n description: \"Binary content\",\n code: `// @resolver: binary_type_default\n// src/builtins/binary.type.ts\nvar binary_type_default = {\n name: \"binary\",\n aliases: [\"bin\", \"blob\", \"raw\"],\n description: \"Binary content\",\n async resolve(ctx) {\n return ctx.files[\"content\"];\n }\n};`,\n};\n\n\n/**\n * All built-in types as an array.\n */\nexport const builtinTypes: BundledType[] = [textType, jsonType, binaryType];\n",
|
|
9
9
|
"import type { BundledType } from \"./types.js\";\nimport { ResourceTypeError } from \"./errors.js\";\nimport { builtinTypes } from \"./builtinTypes.js\";\n\n/**\n * TypeHandlerChain - Manages resource type registration.\n *\n * Responsibilities:\n * - Register types (name + aliases)\n * - Look up types by name\n *\n * Execution is delegated to ResolverExecutor (in registry package).\n *\n * Built-in types (text, json, binary) are registered by default.\n */\nexport class TypeHandlerChain {\n private handlers: Map<string, BundledType> = new Map();\n\n private constructor() {\n // Register builtin types by default\n for (const type of builtinTypes) {\n this.registerInternal(type);\n }\n }\n\n /**\n * Create a new TypeHandlerChain instance.\n * Built-in types (text, json, binary) are included by default.\n */\n static create(): TypeHandlerChain {\n return new TypeHandlerChain();\n }\n\n /**\n * Internal registration (no duplicate check).\n */\n private registerInternal(type: BundledType): void {\n this.handlers.set(type.name, type);\n if (type.aliases) {\n for (const alias of type.aliases) {\n this.handlers.set(alias, type);\n }\n }\n }\n\n /**\n * Register a type.\n * @throws ResourceTypeError if type is already registered\n */\n register(type: BundledType): void {\n if (this.handlers.has(type.name)) {\n throw new ResourceTypeError(`Type '${type.name}' is already registered`);\n }\n this.handlers.set(type.name, type);\n if (type.aliases) {\n for (const alias of type.aliases) {\n if (this.handlers.has(alias)) {\n throw new ResourceTypeError(`Alias '${alias}' conflicts with existing type or alias`);\n }\n this.handlers.set(alias, type);\n }\n }\n }\n\n /**\n * Check if a type is supported.\n */\n canHandle(typeName: string): boolean {\n return this.handlers.has(typeName);\n }\n\n /**\n * Get handler for a type.\n * @throws ResourceTypeError if type is not supported\n */\n getHandler(typeName: string): BundledType {\n const handler = this.handlers.get(typeName);\n if (!handler) {\n throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);\n }\n return handler;\n }\n\n /**\n * Get handler for a type, or undefined if not found.\n */\n getHandlerOrUndefined(typeName: string): BundledType | undefined {\n return this.handlers.get(typeName);\n }\n\n /**\n * Get all supported type names (including aliases).\n */\n getSupportedTypes(): string[] {\n return Array.from(this.handlers.keys());\n }\n\n /**\n * Clear all registered types (for testing).\n */\n clear(): void {\n this.handlers.clear();\n }\n}\n"
|
|
10
10
|
],
|
|
11
|
-
"mappings": ";AAAA;AACA;AA2BA,eAAsB,kBAAkB,CACtC,YACA,UACsB;AAAA,EAEtB,MAAM,WAAW,WAAW,UAAU,IAClC,aACA,QAAQ,YAAY,QAAQ,IAAI,GAAG,UAAU;AAAA,EAGjD,MAAM,SAAS,MAAM,SAAS,UAAU,OAAO;AAAA,EAI/C,MAAM,SAAS,MAAM,IAAI,MAAM;AAAA,IAC7B,OAAO;AAAA,MACL,UAAU;AAAA,MACV,YAAY,QAAQ,UAAU,IAAI;AAAA,MAClC,QAAQ;AAAA,IACV;AAAA,IACA,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,EACV,CAAC;AAAA,EAED,IAAI,CAAC,OAAO,SAAS;AAAA,IACnB,MAAM,SAAS,OAAO,KAAK,IAAI,CAAC,QAA6B,IAAI,OAAO,EAAE,KAAK;AAAA,CAAI;AAAA,IACnF,MAAM,IAAI,MAAM,oBAAoB,eAAe,QAAQ;AAAA,EAC7D;AAAA,EAGA,MAAM,cAAc,MAAM,OAAO,QAAQ,GAAG,KAAK;AAAA,EAIjD,MAAM,aAAa,MAAa;AAAA,EAChC,MAAM,aAAiC,WAAW;AAAA,EAElD,IAAI,CAAC,WAAW,MAAM;AAAA,IACpB,MAAM,IAAI,MAAM,oBAAoB,6BAA6B;AAAA,EACnE;AAAA,EAEA,IAAI,OAAO,WAAW,YAAY,YAAY;AAAA,IAC5C,MAAM,IAAI,MAAM,oBAAoB,yCAAyC;AAAA,EAC/E;AAAA,EAEA,OAAO;AAAA,IACL,MAAM,WAAW;AAAA,IACjB,SAAS,WAAW;AAAA,IACpB,aAAa,WAAW,eAAe;AAAA,IACvC,QAAQ,WAAW;AAAA,IACnB,MAAM;AAAA,EACR;AAAA;;
|
|
11
|
+
"mappings": ";AAAA;AACA;AA2BA,eAAsB,kBAAkB,CACtC,YACA,UACsB;AAAA,EAEtB,MAAM,WAAW,WAAW,UAAU,IAClC,aACA,QAAQ,YAAY,QAAQ,IAAI,GAAG,UAAU;AAAA,EAGjD,MAAM,SAAS,MAAM,SAAS,UAAU,OAAO;AAAA,EAI/C,MAAM,SAAS,MAAM,IAAI,MAAM;AAAA,IAC7B,OAAO;AAAA,MACL,UAAU;AAAA,MACV,YAAY,QAAQ,UAAU,IAAI;AAAA,MAClC,QAAQ;AAAA,IACV;AAAA,IACA,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,EACV,CAAC;AAAA,EAED,IAAI,CAAC,OAAO,SAAS;AAAA,IACnB,MAAM,SAAS,OAAO,KAAK,IAAI,CAAC,QAA6B,IAAI,OAAO,EAAE,KAAK;AAAA,CAAI;AAAA,IACnF,MAAM,IAAI,MAAM,oBAAoB,eAAe,QAAQ;AAAA,EAC7D;AAAA,EAGA,MAAM,cAAc,MAAM,OAAO,QAAQ,GAAG,KAAK;AAAA,EAIjD,MAAM,aAAa,MAAa;AAAA,EAChC,MAAM,aAAiC,WAAW;AAAA,EAElD,IAAI,CAAC,WAAW,MAAM;AAAA,IACpB,MAAM,IAAI,MAAM,oBAAoB,6BAA6B;AAAA,EACnE;AAAA,EAEA,IAAI,OAAO,WAAW,YAAY,YAAY;AAAA,IAC5C,MAAM,IAAI,MAAM,oBAAoB,yCAAyC;AAAA,EAC/E;AAAA,EAEA,OAAO;AAAA,IACL,MAAM,WAAW;AAAA,IACjB,SAAS,WAAW;AAAA,IACpB,aAAa,WAAW,eAAe;AAAA,IACvC,QAAQ,WAAW;AAAA,IACnB,MAAM;AAAA,EACR;AAAA;;ACDF;AACA;AAsgCA;AACA,sBAAS;AAAA;AAtlCT,MAAM,uBAAuB,MAAM;AAAA,EACjC,WAAW,CAAC,SAAS,SAAS;AAAA,IAC5B,MAAM,SAAS,OAAO;AAAA,IACtB,KAAK,OAAO;AAAA;AAEhB;AA6EA,IAAI,aAAa;AAiEjB,IAAI,aAAa,IAAI,WAAW,UAAU;AAC1C,IAAI,QAAQ,IAAI,WAAW,CAAC;AAC5B,IAAI,UAAU,IAAI;AAClB,IAAI,UAAU,IAAI;AA0SlB,IAAI,aAAa,IAAI,WAAW,aAAa,CAAC;AAmmB9C,IAAI,YAAY,UAAU,IAAI;AAoD9B,IAAI,cAAc,WAAW,MAAM;;;ACnlC5B,MAAM,0BAA0B,eAAe;AAAA,EACpD,WAAW,CAAC,SAAiB;AAAA,IAC3B,MAAM,OAAO;AAAA,IACb,KAAK,OAAO;AAAA;AAEhB;;ACQO,IAAM,WAAwB;AAAA,EACnC,MAAM;AAAA,EACN,SAAS,CAAC,OAAM,WAAW;AAAA,EAC3B,aAAa;AAAA,EACb,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAWR;AAMO,IAAM,WAAwB;AAAA,EACnC,MAAM;AAAA,EACN,SAAS,CAAC,UAAS,UAAU;AAAA,EAC7B,aAAa;AAAA,EACb,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAWR;AAMO,IAAM,aAA0B;AAAA,EACrC,MAAM;AAAA,EACN,SAAS,CAAC,OAAM,QAAO,KAAK;AAAA,EAC5B,aAAa;AAAA,EACb,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUR;AAMO,IAAM,eAA8B,CAAC,UAAU,UAAU,UAAU;;;ACjEnE,MAAM,iBAAiB;AAAA,EACpB,WAAqC,IAAI;AAAA,EAEzC,WAAW,GAAG;AAAA,IAEpB,WAAW,QAAQ,cAAc;AAAA,MAC/B,KAAK,iBAAiB,IAAI;AAAA,IAC5B;AAAA;AAAA,SAOK,MAAM,GAAqB;AAAA,IAChC,OAAO,IAAI;AAAA;AAAA,EAML,gBAAgB,CAAC,MAAyB;AAAA,IAChD,KAAK,SAAS,IAAI,KAAK,MAAM,IAAI;AAAA,IACjC,IAAI,KAAK,SAAS;AAAA,MAChB,WAAW,SAAS,KAAK,SAAS;AAAA,QAChC,KAAK,SAAS,IAAI,OAAO,IAAI;AAAA,MAC/B;AAAA,IACF;AAAA;AAAA,EAOF,QAAQ,CAAC,MAAyB;AAAA,IAChC,IAAI,KAAK,SAAS,IAAI,KAAK,IAAI,GAAG;AAAA,MAChC,MAAM,IAAI,kBAAkB,SAAS,KAAK,6BAA6B;AAAA,IACzE;AAAA,IACA,KAAK,SAAS,IAAI,KAAK,MAAM,IAAI;AAAA,IACjC,IAAI,KAAK,SAAS;AAAA,MAChB,WAAW,SAAS,KAAK,SAAS;AAAA,QAChC,IAAI,KAAK,SAAS,IAAI,KAAK,GAAG;AAAA,UAC5B,MAAM,IAAI,kBAAkB,UAAU,8CAA8C;AAAA,QACtF;AAAA,QACA,KAAK,SAAS,IAAI,OAAO,IAAI;AAAA,MAC/B;AAAA,IACF;AAAA;AAAA,EAMF,SAAS,CAAC,UAA2B;AAAA,IACnC,OAAO,KAAK,SAAS,IAAI,QAAQ;AAAA;AAAA,EAOnC,UAAU,CAAC,UAA+B;AAAA,IACxC,MAAM,UAAU,KAAK,SAAS,IAAI,QAAQ;AAAA,IAC1C,IAAI,CAAC,SAAS;AAAA,MACZ,MAAM,IAAI,kBAAkB,8BAA8B,UAAU;AAAA,IACtE;AAAA,IACA,OAAO;AAAA;AAAA,EAMT,qBAAqB,CAAC,UAA2C;AAAA,IAC/D,OAAO,KAAK,SAAS,IAAI,QAAQ;AAAA;AAAA,EAMnC,iBAAiB,GAAa;AAAA,IAC5B,OAAO,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC;AAAA;AAAA,EAMxC,KAAK,GAAS;AAAA,IACZ,KAAK,SAAS,MAAM;AAAA;AAExB;",
|
|
12
12
|
"debugId": "13A2CA0CDE7CB58B64756E2164756E21",
|
|
13
13
|
"names": []
|
|
14
14
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@resourcexjs/type",
|
|
3
|
-
"version": "2.5.
|
|
3
|
+
"version": "2.5.2",
|
|
4
4
|
"description": "ResourceX Type System - Type handlers and serialization",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"resourcex",
|
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
"url": "git+https://github.com/Deepractice/ResourceX.git",
|
|
14
14
|
"directory": "packages/type"
|
|
15
15
|
},
|
|
16
|
-
"license": "
|
|
16
|
+
"license": "Apache-2.0",
|
|
17
17
|
"engines": {
|
|
18
18
|
"node": ">=22.0.0"
|
|
19
19
|
},
|
|
@@ -38,7 +38,7 @@
|
|
|
38
38
|
"clean": "rm -rf dist"
|
|
39
39
|
},
|
|
40
40
|
"dependencies": {
|
|
41
|
-
"@resourcexjs/core": "^2.5.
|
|
41
|
+
"@resourcexjs/core": "^2.5.2"
|
|
42
42
|
},
|
|
43
43
|
"devDependencies": {},
|
|
44
44
|
"publishConfig": {
|