@resourcexjs/type 2.2.0 → 2.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,3 +1,41 @@
1
+ // src/bundler.ts
2
+ import { readFile } from "node:fs/promises";
3
+ import { resolve, isAbsolute } from "node:path";
4
+ async function bundleResourceType(sourcePath, basePath) {
5
+ const fullPath = isAbsolute(sourcePath) ? sourcePath : resolve(basePath ?? process.cwd(), sourcePath);
6
+ const source = await readFile(fullPath, "utf-8");
7
+ const result = await Bun.build({
8
+ stdin: {
9
+ contents: source,
10
+ resolveDir: resolve(fullPath, ".."),
11
+ loader: "ts"
12
+ },
13
+ target: "bun",
14
+ format: "esm",
15
+ minify: false
16
+ });
17
+ if (!result.success) {
18
+ const errors = result.logs.map((log) => log.message).join(`
19
+ `);
20
+ throw new Error(`Failed to bundle ${sourcePath}: ${errors}`);
21
+ }
22
+ const bundledCode = await result.outputs[0].text();
23
+ const tempModule = await import(fullPath);
24
+ const typeSource = tempModule.default;
25
+ if (!typeSource.name) {
26
+ throw new Error(`Resource type at ${sourcePath} must have a name`);
27
+ }
28
+ if (typeof typeSource.resolve !== "function") {
29
+ throw new Error(`Resource type at ${sourcePath} must have a resolve function`);
30
+ }
31
+ return {
32
+ name: typeSource.name,
33
+ aliases: typeSource.aliases,
34
+ description: typeSource.description ?? "",
35
+ schema: typeSource.schema,
36
+ code: bundledCode
37
+ };
38
+ }
1
39
  // ../core/dist/index.js
2
40
  import { gzip, gunzip } from "node:zlib";
3
41
  import { promisify } from "node:util";
@@ -8,1119 +46,15 @@ class ResourceXError extends Error {
8
46
  this.name = "ResourceXError";
9
47
  }
10
48
  }
11
- class ContentError extends ResourceXError {
12
- constructor(message) {
13
- super(message);
14
- this.name = "ContentError";
15
- }
16
- }
17
-
18
- class RXLImpl {
19
- domain;
20
- path;
21
- name;
22
- type;
23
- version;
24
- constructor(parts) {
25
- this.domain = parts.domain;
26
- this.path = parts.path;
27
- this.name = parts.name;
28
- this.type = parts.type;
29
- this.version = parts.version;
30
- }
31
- toString() {
32
- let result = "";
33
- if (this.domain) {
34
- result += this.domain + "/";
35
- if (this.path) {
36
- result += this.path + "/";
37
- }
38
- }
39
- result += this.name;
40
- if (this.type) {
41
- result += "." + this.type;
42
- }
43
- if (this.version) {
44
- result += "@" + this.version;
45
- }
46
- return result;
47
- }
48
- }
49
- function isDomain(str) {
50
- if (str === "localhost")
51
- return true;
52
- return str.includes(".");
53
- }
54
- function parseRXL(locator) {
55
- let remaining = locator;
56
- let version;
57
- let type;
58
- let domain;
59
- let path;
60
- let name;
61
- const atIndex = remaining.indexOf("@");
62
- if (atIndex !== -1) {
63
- version = remaining.slice(atIndex + 1);
64
- remaining = remaining.slice(0, atIndex);
65
- }
66
- const segments = remaining.split("/");
67
- if (segments.length > 1 && isDomain(segments[0])) {
68
- domain = segments[0];
69
- const lastSegment = segments[segments.length - 1];
70
- if (segments.length > 2) {
71
- path = segments.slice(1, -1).join("/");
72
- }
73
- remaining = lastSegment;
74
- } else {
75
- remaining = segments.join("/");
76
- }
77
- const dotIndex = remaining.lastIndexOf(".");
78
- if (dotIndex !== -1) {
79
- type = remaining.slice(dotIndex + 1);
80
- name = remaining.slice(0, dotIndex);
81
- } else {
82
- name = remaining;
83
- }
84
- return new RXLImpl({ domain, path, name, type, version });
85
- }
86
49
  var BLOCK_SIZE = 512;
87
- var BLOCK_SIZE_MASK = 511;
88
- var DEFAULT_FILE_MODE = 420;
89
- var DEFAULT_DIR_MODE = 493;
90
- var USTAR_NAME_OFFSET = 0;
91
- var USTAR_NAME_SIZE = 100;
92
- var USTAR_MODE_OFFSET = 100;
93
- var USTAR_MODE_SIZE = 8;
94
- var USTAR_UID_OFFSET = 108;
95
- var USTAR_UID_SIZE = 8;
96
- var USTAR_GID_OFFSET = 116;
97
- var USTAR_GID_SIZE = 8;
98
- var USTAR_SIZE_OFFSET = 124;
99
- var USTAR_SIZE_SIZE = 12;
100
- var USTAR_MTIME_OFFSET = 136;
101
- var USTAR_MTIME_SIZE = 12;
102
- var USTAR_CHECKSUM_OFFSET = 148;
103
- var USTAR_CHECKSUM_SIZE = 8;
104
- var USTAR_TYPEFLAG_OFFSET = 156;
105
- var USTAR_TYPEFLAG_SIZE = 1;
106
- var USTAR_LINKNAME_OFFSET = 157;
107
- var USTAR_LINKNAME_SIZE = 100;
108
- var USTAR_MAGIC_OFFSET = 257;
109
- var USTAR_MAGIC_SIZE = 6;
110
- var USTAR_VERSION_OFFSET = 263;
111
- var USTAR_VERSION_SIZE = 2;
112
- var USTAR_UNAME_OFFSET = 265;
113
- var USTAR_UNAME_SIZE = 32;
114
- var USTAR_GNAME_OFFSET = 297;
115
- var USTAR_GNAME_SIZE = 32;
116
- var USTAR_PREFIX_OFFSET = 345;
117
- var USTAR_PREFIX_SIZE = 155;
118
- var USTAR_VERSION = "00";
119
- var USTAR_MAX_UID_GID = 2097151;
120
- var USTAR_MAX_SIZE = 8589934591;
121
- var FILE = "file";
122
- var LINK = "link";
123
- var SYMLINK = "symlink";
124
- var DIRECTORY = "directory";
125
- var TYPEFLAG = {
126
- file: "0",
127
- link: "1",
128
- symlink: "2",
129
- "character-device": "3",
130
- "block-device": "4",
131
- directory: "5",
132
- fifo: "6",
133
- "pax-header": "x",
134
- "pax-global-header": "g",
135
- "gnu-long-name": "L",
136
- "gnu-long-link-name": "K"
137
- };
138
- var FLAGTYPE = {
139
- "0": FILE,
140
- "1": LINK,
141
- "2": SYMLINK,
142
- "3": "character-device",
143
- "4": "block-device",
144
- "5": DIRECTORY,
145
- "6": "fifo",
146
- x: "pax-header",
147
- g: "pax-global-header",
148
- L: "gnu-long-name",
149
- K: "gnu-long-link-name"
150
- };
151
50
  var ZERO_BLOCK = new Uint8Array(BLOCK_SIZE);
152
51
  var EMPTY = new Uint8Array(0);
153
52
  var encoder = new TextEncoder;
154
53
  var decoder = new TextDecoder;
155
- function writeString(view, offset, size, value) {
156
- if (value)
157
- encoder.encodeInto(value, view.subarray(offset, offset + size));
158
- }
159
- function writeOctal(view, offset, size, value) {
160
- if (value === undefined)
161
- return;
162
- const octalString = value.toString(8).padStart(size - 1, "0");
163
- encoder.encodeInto(octalString, view.subarray(offset, offset + size - 1));
164
- }
165
- function readString(view, offset, size) {
166
- const end = view.indexOf(0, offset);
167
- const sliceEnd = end === -1 || end > offset + size ? offset + size : end;
168
- return decoder.decode(view.subarray(offset, sliceEnd));
169
- }
170
- function readOctal(view, offset, size) {
171
- let value = 0;
172
- const end = offset + size;
173
- for (let i = offset;i < end; i++) {
174
- const charCode = view[i];
175
- if (charCode === 0)
176
- break;
177
- if (charCode === 32)
178
- continue;
179
- value = value * 8 + (charCode - 48);
180
- }
181
- return value;
182
- }
183
- function readNumeric(view, offset, size) {
184
- if (view[offset] & 128) {
185
- let result = 0;
186
- result = view[offset] & 127;
187
- for (let i = 1;i < size; i++)
188
- result = result * 256 + view[offset + i];
189
- if (!Number.isSafeInteger(result))
190
- throw new Error("TAR number too large");
191
- return result;
192
- }
193
- return readOctal(view, offset, size);
194
- }
195
- var isBodyless = (header) => header.type === DIRECTORY || header.type === SYMLINK || header.type === LINK;
196
- async function normalizeBody(body) {
197
- if (body === null || body === undefined)
198
- return EMPTY;
199
- if (body instanceof Uint8Array)
200
- return body;
201
- if (typeof body === "string")
202
- return encoder.encode(body);
203
- if (body instanceof ArrayBuffer)
204
- return new Uint8Array(body);
205
- if (body instanceof Blob)
206
- return new Uint8Array(await body.arrayBuffer());
207
- throw new TypeError("Unsupported content type for entry body.");
208
- }
209
- function transformHeader(header, options) {
210
- const { strip, filter, map } = options;
211
- if (!strip && !filter && !map)
212
- return header;
213
- const h = { ...header };
214
- if (strip && strip > 0) {
215
- const components = h.name.split("/").filter(Boolean);
216
- if (strip >= components.length)
217
- return null;
218
- const newName = components.slice(strip).join("/");
219
- h.name = h.type === DIRECTORY && !newName.endsWith("/") ? `${newName}/` : newName;
220
- if (h.linkname?.startsWith("/")) {
221
- const linkComponents = h.linkname.split("/").filter(Boolean);
222
- h.linkname = strip >= linkComponents.length ? "/" : `/${linkComponents.slice(strip).join("/")}`;
223
- }
224
- }
225
- if (filter?.(h) === false)
226
- return null;
227
- const result = map ? map(h) : h;
228
- if (result && (!result.name || !result.name.trim() || result.name === "." || result.name === "/"))
229
- return null;
230
- return result;
231
- }
232
- var CHECKSUM_SPACE = 32;
233
- var ASCII_ZERO = 48;
234
- function validateChecksum(block) {
235
- const stored = readOctal(block, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_SIZE);
236
- let sum = 0;
237
- for (let i = 0;i < block.length; i++)
238
- if (i >= USTAR_CHECKSUM_OFFSET && i < USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE)
239
- sum += CHECKSUM_SPACE;
240
- else
241
- sum += block[i];
242
- return stored === sum;
243
- }
244
- function writeChecksum(block) {
245
- block.fill(CHECKSUM_SPACE, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE);
246
- let checksum = 0;
247
- for (const byte of block)
248
- checksum += byte;
249
- for (let i = USTAR_CHECKSUM_OFFSET + 6 - 1;i >= USTAR_CHECKSUM_OFFSET; i--) {
250
- block[i] = (checksum & 7) + ASCII_ZERO;
251
- checksum >>= 3;
252
- }
253
- block[USTAR_CHECKSUM_OFFSET + 6] = 0;
254
- block[USTAR_CHECKSUM_OFFSET + 7] = CHECKSUM_SPACE;
255
- }
256
- function generatePax(header) {
257
- const paxRecords = {};
258
- if (header.name.length > USTAR_NAME_SIZE) {
259
- if (findUstarSplit(header.name) === null)
260
- paxRecords.path = header.name;
261
- }
262
- if (header.linkname && header.linkname.length > USTAR_NAME_SIZE)
263
- paxRecords.linkpath = header.linkname;
264
- if (header.uname && header.uname.length > USTAR_UNAME_SIZE)
265
- paxRecords.uname = header.uname;
266
- if (header.gname && header.gname.length > USTAR_GNAME_SIZE)
267
- paxRecords.gname = header.gname;
268
- if (header.uid != null && header.uid > USTAR_MAX_UID_GID)
269
- paxRecords.uid = String(header.uid);
270
- if (header.gid != null && header.gid > USTAR_MAX_UID_GID)
271
- paxRecords.gid = String(header.gid);
272
- if (header.size != null && header.size > USTAR_MAX_SIZE)
273
- paxRecords.size = String(header.size);
274
- if (header.pax)
275
- Object.assign(paxRecords, header.pax);
276
- const paxEntries = Object.entries(paxRecords);
277
- if (paxEntries.length === 0)
278
- return null;
279
- const paxBody = encoder.encode(paxEntries.map(([key, value]) => {
280
- const record = `${key}=${value}
281
- `;
282
- const partLength = encoder.encode(record).length + 1;
283
- let totalLength = partLength + String(partLength).length;
284
- totalLength = partLength + String(totalLength).length;
285
- return `${totalLength} ${record}`;
286
- }).join(""));
287
- return {
288
- paxHeader: createTarHeader({
289
- name: decoder.decode(encoder.encode(`PaxHeader/${header.name}`).slice(0, 100)),
290
- size: paxBody.length,
291
- type: "pax-header",
292
- mode: 420,
293
- mtime: header.mtime,
294
- uname: header.uname,
295
- gname: header.gname,
296
- uid: header.uid,
297
- gid: header.gid
298
- }),
299
- paxBody
300
- };
301
- }
302
- function findUstarSplit(path) {
303
- if (path.length <= USTAR_NAME_SIZE)
304
- return null;
305
- const minSlashIndex = path.length - USTAR_NAME_SIZE - 1;
306
- const slashIndex = path.lastIndexOf("/", USTAR_PREFIX_SIZE);
307
- if (slashIndex > 0 && slashIndex >= minSlashIndex)
308
- return {
309
- prefix: path.slice(0, slashIndex),
310
- name: path.slice(slashIndex + 1)
311
- };
312
- return null;
313
- }
314
- function createTarHeader(header) {
315
- const view = new Uint8Array(BLOCK_SIZE);
316
- const size = isBodyless(header) ? 0 : header.size ?? 0;
317
- let name = header.name;
318
- let prefix = "";
319
- if (!header.pax?.path) {
320
- const split = findUstarSplit(name);
321
- if (split) {
322
- name = split.name;
323
- prefix = split.prefix;
324
- }
325
- }
326
- writeString(view, USTAR_NAME_OFFSET, USTAR_NAME_SIZE, name);
327
- writeOctal(view, USTAR_MODE_OFFSET, USTAR_MODE_SIZE, header.mode ?? (header.type === DIRECTORY ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));
328
- writeOctal(view, USTAR_UID_OFFSET, USTAR_UID_SIZE, header.uid ?? 0);
329
- writeOctal(view, USTAR_GID_OFFSET, USTAR_GID_SIZE, header.gid ?? 0);
330
- writeOctal(view, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE, size);
331
- writeOctal(view, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1000));
332
- writeString(view, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE, TYPEFLAG[header.type ?? FILE]);
333
- writeString(view, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE, header.linkname);
334
- writeString(view, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE, "ustar\x00");
335
- writeString(view, USTAR_VERSION_OFFSET, USTAR_VERSION_SIZE, USTAR_VERSION);
336
- writeString(view, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE, header.uname);
337
- writeString(view, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE, header.gname);
338
- writeString(view, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE, prefix);
339
- writeChecksum(view);
340
- return view;
341
- }
342
- function parseUstarHeader(block, strict) {
343
- if (strict && !validateChecksum(block))
344
- throw new Error("Invalid tar header checksum.");
345
- const typeflag = readString(block, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE);
346
- const header = {
347
- name: readString(block, USTAR_NAME_OFFSET, USTAR_NAME_SIZE),
348
- mode: readOctal(block, USTAR_MODE_OFFSET, USTAR_MODE_SIZE),
349
- uid: readNumeric(block, USTAR_UID_OFFSET, USTAR_UID_SIZE),
350
- gid: readNumeric(block, USTAR_GID_OFFSET, USTAR_GID_SIZE),
351
- size: readNumeric(block, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE),
352
- mtime: /* @__PURE__ */ new Date(readNumeric(block, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE) * 1000),
353
- type: FLAGTYPE[typeflag] || FILE,
354
- linkname: readString(block, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE)
355
- };
356
- const magic = readString(block, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE);
357
- if (magic.trim() === "ustar") {
358
- header.uname = readString(block, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE);
359
- header.gname = readString(block, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE);
360
- }
361
- if (magic === "ustar")
362
- header.prefix = readString(block, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE);
363
- return header;
364
- }
365
- var PAX_MAPPING = {
366
- path: ["name", (v) => v],
367
- linkpath: ["linkname", (v) => v],
368
- size: ["size", (v) => parseInt(v, 10)],
369
- mtime: ["mtime", parseFloat],
370
- uid: ["uid", (v) => parseInt(v, 10)],
371
- gid: ["gid", (v) => parseInt(v, 10)],
372
- uname: ["uname", (v) => v],
373
- gname: ["gname", (v) => v]
374
- };
375
- function parsePax(buffer) {
376
- const decoder$1 = new TextDecoder("utf-8");
377
- const overrides = {};
378
- const pax = {};
379
- let offset = 0;
380
- while (offset < buffer.length) {
381
- const spaceIndex = buffer.indexOf(32, offset);
382
- if (spaceIndex === -1)
383
- break;
384
- const length = parseInt(decoder$1.decode(buffer.subarray(offset, spaceIndex)), 10);
385
- if (Number.isNaN(length) || length === 0)
386
- break;
387
- const recordEnd = offset + length;
388
- const [key, value] = decoder$1.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
389
- if (key && value !== undefined) {
390
- pax[key] = value;
391
- const mapping = PAX_MAPPING[key];
392
- if (mapping) {
393
- const [targetKey, parser] = mapping;
394
- const parsedValue = parser(value);
395
- if (typeof parsedValue === "string" || !Number.isNaN(parsedValue))
396
- overrides[targetKey] = parsedValue;
397
- }
398
- }
399
- offset = recordEnd;
400
- }
401
- if (Object.keys(pax).length > 0)
402
- overrides.pax = pax;
403
- return overrides;
404
- }
405
- function applyOverrides(header, overrides) {
406
- if (overrides.name !== undefined)
407
- header.name = overrides.name;
408
- if (overrides.linkname !== undefined)
409
- header.linkname = overrides.linkname;
410
- if (overrides.size !== undefined)
411
- header.size = overrides.size;
412
- if (overrides.mtime !== undefined)
413
- header.mtime = /* @__PURE__ */ new Date(overrides.mtime * 1000);
414
- if (overrides.uid !== undefined)
415
- header.uid = overrides.uid;
416
- if (overrides.gid !== undefined)
417
- header.gid = overrides.gid;
418
- if (overrides.uname !== undefined)
419
- header.uname = overrides.uname;
420
- if (overrides.gname !== undefined)
421
- header.gname = overrides.gname;
422
- if (overrides.pax)
423
- header.pax = Object.assign({}, header.pax ?? {}, overrides.pax);
424
- }
425
- function getMetaParser(type) {
426
- switch (type) {
427
- case "pax-global-header":
428
- case "pax-header":
429
- return parsePax;
430
- case "gnu-long-name":
431
- return (data) => ({ name: readString(data, 0, data.length) });
432
- case "gnu-long-link-name":
433
- return (data) => ({ linkname: readString(data, 0, data.length) });
434
- default:
435
- return;
436
- }
437
- }
438
- function getHeaderBlocks(header) {
439
- const base = createTarHeader(header);
440
- const pax = generatePax(header);
441
- if (!pax)
442
- return [base];
443
- const paxPadding = -pax.paxBody.length & BLOCK_SIZE_MASK;
444
- const paddingBlocks = paxPadding > 0 ? [ZERO_BLOCK.subarray(0, paxPadding)] : [];
445
- return [
446
- pax.paxHeader,
447
- pax.paxBody,
448
- ...paddingBlocks,
449
- base
450
- ];
451
- }
452
54
  var EOF_BUFFER = new Uint8Array(BLOCK_SIZE * 2);
453
- function createTarPacker(onData, onError, onFinalize) {
454
- let currentHeader = null;
455
- let bytesWritten = 0;
456
- let finalized = false;
457
- return {
458
- add(header) {
459
- if (finalized) {
460
- const error = /* @__PURE__ */ new Error("No new tar entries after finalize.");
461
- onError(error);
462
- throw error;
463
- }
464
- if (currentHeader !== null) {
465
- const error = /* @__PURE__ */ new Error("Previous entry must be completed before adding a new one");
466
- onError(error);
467
- throw error;
468
- }
469
- try {
470
- const size = isBodyless(header) ? 0 : header.size ?? 0;
471
- const headerBlocks = getHeaderBlocks({
472
- ...header,
473
- size
474
- });
475
- for (const block of headerBlocks)
476
- onData(block);
477
- currentHeader = {
478
- ...header,
479
- size
480
- };
481
- bytesWritten = 0;
482
- } catch (error) {
483
- onError(error);
484
- }
485
- },
486
- write(chunk) {
487
- if (!currentHeader) {
488
- const error = /* @__PURE__ */ new Error("No active tar entry.");
489
- onError(error);
490
- throw error;
491
- }
492
- if (finalized) {
493
- const error = /* @__PURE__ */ new Error("Cannot write data after finalize.");
494
- onError(error);
495
- throw error;
496
- }
497
- const newTotal = bytesWritten + chunk.length;
498
- if (newTotal > currentHeader.size) {
499
- const error = /* @__PURE__ */ new Error(`"${currentHeader.name}" exceeds given size of ${currentHeader.size} bytes.`);
500
- onError(error);
501
- throw error;
502
- }
503
- try {
504
- bytesWritten = newTotal;
505
- onData(chunk);
506
- } catch (error) {
507
- onError(error);
508
- }
509
- },
510
- endEntry() {
511
- if (!currentHeader) {
512
- const error = /* @__PURE__ */ new Error("No active entry to end.");
513
- onError(error);
514
- throw error;
515
- }
516
- if (finalized) {
517
- const error = /* @__PURE__ */ new Error("Cannot end entry after finalize.");
518
- onError(error);
519
- throw error;
520
- }
521
- try {
522
- if (bytesWritten !== currentHeader.size) {
523
- const error = /* @__PURE__ */ new Error(`Size mismatch for "${currentHeader.name}".`);
524
- onError(error);
525
- throw error;
526
- }
527
- const paddingSize = -currentHeader.size & BLOCK_SIZE_MASK;
528
- if (paddingSize > 0)
529
- onData(new Uint8Array(paddingSize));
530
- currentHeader = null;
531
- bytesWritten = 0;
532
- } catch (error) {
533
- onError(error);
534
- throw error;
535
- }
536
- },
537
- finalize() {
538
- if (finalized) {
539
- const error = /* @__PURE__ */ new Error("Archive has already been finalized");
540
- onError(error);
541
- throw error;
542
- }
543
- if (currentHeader !== null) {
544
- const error = /* @__PURE__ */ new Error("Cannot finalize while an entry is still active");
545
- onError(error);
546
- throw error;
547
- }
548
- try {
549
- onData(EOF_BUFFER);
550
- finalized = true;
551
- if (onFinalize)
552
- onFinalize();
553
- } catch (error) {
554
- onError(error);
555
- }
556
- }
557
- };
558
- }
559
- var INITIAL_CAPACITY = 256;
560
- function createChunkQueue() {
561
- let chunks = new Array(INITIAL_CAPACITY);
562
- let capacityMask = chunks.length - 1;
563
- let head = 0;
564
- let tail = 0;
565
- let totalAvailable = 0;
566
- const consumeFromHead = (count) => {
567
- const chunk = chunks[head];
568
- if (count === chunk.length) {
569
- chunks[head] = EMPTY;
570
- head = head + 1 & capacityMask;
571
- } else
572
- chunks[head] = chunk.subarray(count);
573
- totalAvailable -= count;
574
- if (totalAvailable === 0 && chunks.length > INITIAL_CAPACITY) {
575
- chunks = new Array(INITIAL_CAPACITY);
576
- capacityMask = INITIAL_CAPACITY - 1;
577
- head = 0;
578
- tail = 0;
579
- }
580
- };
581
- function pull(bytes, callback) {
582
- if (callback) {
583
- let fed = 0;
584
- let remaining$1 = Math.min(bytes, totalAvailable);
585
- while (remaining$1 > 0) {
586
- const chunk = chunks[head];
587
- const toFeed = Math.min(remaining$1, chunk.length);
588
- const segment = toFeed === chunk.length ? chunk : chunk.subarray(0, toFeed);
589
- consumeFromHead(toFeed);
590
- remaining$1 -= toFeed;
591
- fed += toFeed;
592
- if (!callback(segment))
593
- break;
594
- }
595
- return fed;
596
- }
597
- if (totalAvailable < bytes)
598
- return null;
599
- if (bytes === 0)
600
- return EMPTY;
601
- const firstChunk = chunks[head];
602
- if (firstChunk.length >= bytes) {
603
- const view = firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);
604
- consumeFromHead(bytes);
605
- return view;
606
- }
607
- const result = new Uint8Array(bytes);
608
- let copied = 0;
609
- let remaining = bytes;
610
- while (remaining > 0) {
611
- const chunk = chunks[head];
612
- const toCopy = Math.min(remaining, chunk.length);
613
- result.set(toCopy === chunk.length ? chunk : chunk.subarray(0, toCopy), copied);
614
- copied += toCopy;
615
- remaining -= toCopy;
616
- consumeFromHead(toCopy);
617
- }
618
- return result;
619
- }
620
- return {
621
- push: (chunk) => {
622
- if (chunk.length === 0)
623
- return;
624
- let nextTail = tail + 1 & capacityMask;
625
- if (nextTail === head) {
626
- const oldLen = chunks.length;
627
- const newLen = oldLen * 2;
628
- const newChunks = new Array(newLen);
629
- const count = tail - head + oldLen & oldLen - 1;
630
- if (head < tail)
631
- for (let i = 0;i < count; i++)
632
- newChunks[i] = chunks[head + i];
633
- else if (count > 0) {
634
- const firstPart = oldLen - head;
635
- for (let i = 0;i < firstPart; i++)
636
- newChunks[i] = chunks[head + i];
637
- for (let i = 0;i < tail; i++)
638
- newChunks[firstPart + i] = chunks[i];
639
- }
640
- chunks = newChunks;
641
- capacityMask = newLen - 1;
642
- head = 0;
643
- tail = count;
644
- nextTail = tail + 1 & capacityMask;
645
- }
646
- chunks[tail] = chunk;
647
- tail = nextTail;
648
- totalAvailable += chunk.length;
649
- },
650
- available: () => totalAvailable,
651
- peek: (bytes) => {
652
- if (totalAvailable < bytes)
653
- return null;
654
- if (bytes === 0)
655
- return EMPTY;
656
- const firstChunk = chunks[head];
657
- if (firstChunk.length >= bytes)
658
- return firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);
659
- const result = new Uint8Array(bytes);
660
- let copied = 0;
661
- let index = head;
662
- while (copied < bytes) {
663
- const chunk = chunks[index];
664
- const toCopy = Math.min(bytes - copied, chunk.length);
665
- if (toCopy === chunk.length)
666
- result.set(chunk, copied);
667
- else
668
- result.set(chunk.subarray(0, toCopy), copied);
669
- copied += toCopy;
670
- index = index + 1 & capacityMask;
671
- }
672
- return result;
673
- },
674
- discard: (bytes) => {
675
- if (bytes > totalAvailable)
676
- throw new Error("Too many bytes consumed");
677
- if (bytes === 0)
678
- return;
679
- let remaining = bytes;
680
- while (remaining > 0) {
681
- const chunk = chunks[head];
682
- const toConsume = Math.min(remaining, chunk.length);
683
- consumeFromHead(toConsume);
684
- remaining -= toConsume;
685
- }
686
- },
687
- pull
688
- };
689
- }
690
- var STATE_HEADER = 0;
691
- var STATE_BODY = 1;
692
- var truncateErr = /* @__PURE__ */ new Error("Tar archive is truncated.");
693
- function createUnpacker(options = {}) {
694
- const strict = options.strict ?? false;
695
- const { available, peek, push, discard, pull } = createChunkQueue();
696
- let state = STATE_HEADER;
697
- let ended = false;
698
- let done = false;
699
- let eof = false;
700
- let currentEntry = null;
701
- const paxGlobals = {};
702
- let nextEntryOverrides = {};
703
- const unpacker = {
704
- isEntryActive: () => state === STATE_BODY,
705
- isBodyComplete: () => !currentEntry || currentEntry.remaining === 0,
706
- write(chunk) {
707
- if (ended)
708
- throw new Error("Archive already ended.");
709
- push(chunk);
710
- },
711
- end() {
712
- ended = true;
713
- },
714
- readHeader() {
715
- if (state !== STATE_HEADER)
716
- throw new Error("Cannot read header while an entry is active");
717
- if (done)
718
- return;
719
- while (!done) {
720
- if (available() < BLOCK_SIZE) {
721
- if (ended) {
722
- if (available() > 0 && strict)
723
- throw truncateErr;
724
- done = true;
725
- return;
726
- }
727
- return null;
728
- }
729
- const headerBlock = peek(BLOCK_SIZE);
730
- if (isZeroBlock(headerBlock)) {
731
- if (available() < BLOCK_SIZE * 2) {
732
- if (ended) {
733
- if (strict)
734
- throw truncateErr;
735
- done = true;
736
- return;
737
- }
738
- return null;
739
- }
740
- if (isZeroBlock(peek(BLOCK_SIZE * 2).subarray(BLOCK_SIZE))) {
741
- discard(BLOCK_SIZE * 2);
742
- done = true;
743
- eof = true;
744
- return;
745
- }
746
- if (strict)
747
- throw new Error("Invalid tar header.");
748
- discard(BLOCK_SIZE);
749
- continue;
750
- }
751
- let internalHeader;
752
- try {
753
- internalHeader = parseUstarHeader(headerBlock, strict);
754
- } catch (err) {
755
- if (strict)
756
- throw err;
757
- discard(BLOCK_SIZE);
758
- continue;
759
- }
760
- const metaParser = getMetaParser(internalHeader.type);
761
- if (metaParser) {
762
- const paddedSize = internalHeader.size + BLOCK_SIZE_MASK & ~BLOCK_SIZE_MASK;
763
- if (available() < BLOCK_SIZE + paddedSize) {
764
- if (ended && strict)
765
- throw truncateErr;
766
- return null;
767
- }
768
- discard(BLOCK_SIZE);
769
- const overrides = metaParser(pull(paddedSize).subarray(0, internalHeader.size));
770
- const target = internalHeader.type === "pax-global-header" ? paxGlobals : nextEntryOverrides;
771
- for (const key in overrides)
772
- target[key] = overrides[key];
773
- continue;
774
- }
775
- discard(BLOCK_SIZE);
776
- const header = internalHeader;
777
- if (internalHeader.prefix)
778
- header.name = `${internalHeader.prefix}/${header.name}`;
779
- applyOverrides(header, paxGlobals);
780
- applyOverrides(header, nextEntryOverrides);
781
- nextEntryOverrides = {};
782
- currentEntry = {
783
- header,
784
- remaining: header.size,
785
- padding: -header.size & BLOCK_SIZE_MASK
786
- };
787
- state = STATE_BODY;
788
- return header;
789
- }
790
- },
791
- streamBody(callback) {
792
- if (state !== STATE_BODY || !currentEntry || currentEntry.remaining === 0)
793
- return 0;
794
- const bytesToFeed = Math.min(currentEntry.remaining, available());
795
- if (bytesToFeed === 0)
796
- return 0;
797
- const fed = pull(bytesToFeed, callback);
798
- currentEntry.remaining -= fed;
799
- return fed;
800
- },
801
- skipPadding() {
802
- if (state !== STATE_BODY || !currentEntry)
803
- return true;
804
- if (currentEntry.remaining > 0)
805
- throw new Error("Body not fully consumed");
806
- if (available() < currentEntry.padding)
807
- return false;
808
- discard(currentEntry.padding);
809
- currentEntry = null;
810
- state = STATE_HEADER;
811
- return true;
812
- },
813
- skipEntry() {
814
- if (state !== STATE_BODY || !currentEntry)
815
- return true;
816
- const toDiscard = Math.min(currentEntry.remaining, available());
817
- if (toDiscard > 0) {
818
- discard(toDiscard);
819
- currentEntry.remaining -= toDiscard;
820
- }
821
- if (currentEntry.remaining > 0)
822
- return false;
823
- return unpacker.skipPadding();
824
- },
825
- validateEOF() {
826
- if (strict) {
827
- if (!eof)
828
- throw truncateErr;
829
- if (available() > 0) {
830
- if (pull(available()).some((byte) => byte !== 0))
831
- throw new Error("Invalid EOF.");
832
- }
833
- }
834
- }
835
- };
836
- return unpacker;
837
- }
838
- function isZeroBlock(block) {
839
- if (block.byteOffset % 8 === 0) {
840
- const view = new BigUint64Array(block.buffer, block.byteOffset, block.length / 8);
841
- for (let i = 0;i < view.length; i++)
842
- if (view[i] !== 0n)
843
- return false;
844
- return true;
845
- }
846
- for (let i = 0;i < block.length; i++)
847
- if (block[i] !== 0)
848
- return false;
849
- return true;
850
- }
851
- function createTarPacker2() {
852
- let streamController;
853
- let packer;
854
- return {
855
- readable: new ReadableStream({ start(controller) {
856
- streamController = controller;
857
- packer = createTarPacker(controller.enqueue.bind(controller), controller.error.bind(controller), controller.close.bind(controller));
858
- } }),
859
- controller: {
860
- add(header) {
861
- const bodyless = isBodyless(header);
862
- const h = { ...header };
863
- if (bodyless)
864
- h.size = 0;
865
- packer.add(h);
866
- if (bodyless)
867
- packer.endEntry();
868
- return new WritableStream({
869
- write(chunk) {
870
- packer.write(chunk);
871
- },
872
- close() {
873
- if (!bodyless)
874
- packer.endEntry();
875
- },
876
- abort(reason) {
877
- streamController.error(reason);
878
- }
879
- });
880
- },
881
- finalize() {
882
- packer.finalize();
883
- },
884
- error(err) {
885
- streamController.error(err);
886
- }
887
- }
888
- };
889
- }
890
- async function streamToBuffer(stream) {
891
- const chunks = [];
892
- const reader = stream.getReader();
893
- let totalLength = 0;
894
- try {
895
- while (true) {
896
- const { done, value } = await reader.read();
897
- if (done)
898
- break;
899
- chunks.push(value);
900
- totalLength += value.length;
901
- }
902
- const result = new Uint8Array(totalLength);
903
- let offset = 0;
904
- for (const chunk of chunks) {
905
- result.set(chunk, offset);
906
- offset += chunk.length;
907
- }
908
- return result;
909
- } finally {
910
- reader.releaseLock();
911
- }
912
- }
913
- var drain = (stream) => stream.pipeTo(new WritableStream);
914
- function createTarDecoder(options = {}) {
915
- const unpacker = createUnpacker(options);
916
- let bodyController = null;
917
- let pumping = false;
918
- const pump = (controller) => {
919
- if (pumping)
920
- return;
921
- pumping = true;
922
- try {
923
- while (true)
924
- if (unpacker.isEntryActive()) {
925
- if (bodyController) {
926
- if (unpacker.streamBody((c) => (bodyController.enqueue(c), true)) === 0 && !unpacker.isBodyComplete())
927
- break;
928
- } else if (!unpacker.skipEntry())
929
- break;
930
- if (unpacker.isBodyComplete()) {
931
- try {
932
- bodyController?.close();
933
- } catch {}
934
- bodyController = null;
935
- if (!unpacker.skipPadding())
936
- break;
937
- }
938
- } else {
939
- const header = unpacker.readHeader();
940
- if (header === null || header === undefined)
941
- break;
942
- controller.enqueue({
943
- header,
944
- body: new ReadableStream({
945
- start(c) {
946
- if (header.size === 0)
947
- c.close();
948
- else
949
- bodyController = c;
950
- },
951
- pull: () => pump(controller),
952
- cancel() {
953
- bodyController = null;
954
- pump(controller);
955
- }
956
- })
957
- });
958
- }
959
- } catch (error) {
960
- try {
961
- bodyController?.error(error);
962
- } catch {}
963
- bodyController = null;
964
- throw error;
965
- } finally {
966
- pumping = false;
967
- }
968
- };
969
- return new TransformStream({
970
- transform(chunk, controller) {
971
- try {
972
- unpacker.write(chunk);
973
- pump(controller);
974
- } catch (error) {
975
- try {
976
- bodyController?.error(error);
977
- } catch {}
978
- throw error;
979
- }
980
- },
981
- flush(controller) {
982
- try {
983
- unpacker.end();
984
- pump(controller);
985
- unpacker.validateEOF();
986
- if (unpacker.isEntryActive() && !unpacker.isBodyComplete())
987
- try {
988
- bodyController?.close();
989
- } catch {}
990
- } catch (error) {
991
- try {
992
- bodyController?.error(error);
993
- } catch {}
994
- throw error;
995
- }
996
- }
997
- }, undefined, { highWaterMark: 1 });
998
- }
999
- async function packTar(entries) {
1000
- const { readable, controller } = createTarPacker2();
1001
- await (async () => {
1002
- for (const entry of entries) {
1003
- const entryStream = controller.add(entry.header);
1004
- const body = "body" in entry ? entry.body : entry.data;
1005
- if (!body) {
1006
- await entryStream.close();
1007
- continue;
1008
- }
1009
- if (body instanceof ReadableStream)
1010
- await body.pipeTo(entryStream);
1011
- else if (body instanceof Blob)
1012
- await body.stream().pipeTo(entryStream);
1013
- else
1014
- try {
1015
- const chunk = await normalizeBody(body);
1016
- if (chunk.length > 0) {
1017
- const writer = entryStream.getWriter();
1018
- await writer.write(chunk);
1019
- await writer.close();
1020
- } else
1021
- await entryStream.close();
1022
- } catch {
1023
- throw new TypeError(`Unsupported content type for entry "${entry.header.name}".`);
1024
- }
1025
- }
1026
- })().then(() => controller.finalize()).catch((err) => controller.error(err));
1027
- return new Uint8Array(await streamToBuffer(readable));
1028
- }
1029
- async function unpackTar(archive, options = {}) {
1030
- const sourceStream = archive instanceof ReadableStream ? archive : new ReadableStream({ start(controller) {
1031
- controller.enqueue(archive instanceof Uint8Array ? archive : new Uint8Array(archive));
1032
- controller.close();
1033
- } });
1034
- const results = [];
1035
- const entryStream = sourceStream.pipeThrough(createTarDecoder(options));
1036
- for await (const entry of entryStream) {
1037
- let processedHeader;
1038
- try {
1039
- processedHeader = transformHeader(entry.header, options);
1040
- } catch (error) {
1041
- await entry.body.cancel();
1042
- throw error;
1043
- }
1044
- if (processedHeader === null) {
1045
- await drain(entry.body);
1046
- continue;
1047
- }
1048
- if (isBodyless(processedHeader)) {
1049
- await drain(entry.body);
1050
- results.push({ header: processedHeader });
1051
- } else
1052
- results.push({
1053
- header: processedHeader,
1054
- data: await streamToBuffer(entry.body)
1055
- });
1056
- }
1057
- return results;
1058
- }
1059
55
  var gzipAsync = promisify(gzip);
1060
56
  var gunzipAsync = promisify(gunzip);
1061
57
 
1062
- class RXCImpl {
1063
- _buffer;
1064
- _filesCache = null;
1065
- constructor(buffer) {
1066
- this._buffer = buffer;
1067
- }
1068
- get stream() {
1069
- const buffer = this._buffer;
1070
- return new ReadableStream({
1071
- start(controller) {
1072
- controller.enqueue(new Uint8Array(buffer));
1073
- controller.close();
1074
- }
1075
- });
1076
- }
1077
- async buffer() {
1078
- return this._buffer;
1079
- }
1080
- async file(path) {
1081
- const filesMap = await this.files();
1082
- const content = filesMap.get(path);
1083
- if (!content) {
1084
- throw new ContentError(`file not found: ${path}`);
1085
- }
1086
- return content;
1087
- }
1088
- async files() {
1089
- if (this._filesCache) {
1090
- return this._filesCache;
1091
- }
1092
- const tarBuffer = await gunzipAsync(this._buffer);
1093
- const entries = await unpackTar(tarBuffer);
1094
- const filesMap = new Map;
1095
- for (const entry of entries) {
1096
- if ((entry.header.type === "file" || entry.header.type === undefined) && entry.data) {
1097
- filesMap.set(entry.header.name, Buffer.from(entry.data));
1098
- }
1099
- }
1100
- this._filesCache = filesMap;
1101
- return filesMap;
1102
- }
1103
- }
1104
- function isArchiveInput(input) {
1105
- return "archive" in input && Buffer.isBuffer(input.archive);
1106
- }
1107
- async function createRXC(input) {
1108
- if (isArchiveInput(input)) {
1109
- return new RXCImpl(input.archive);
1110
- }
1111
- const entries = Object.entries(input).map(([name, content]) => {
1112
- const body = typeof content === "string" ? content : content instanceof Uint8Array ? content : new Uint8Array(content);
1113
- const size = typeof content === "string" ? Buffer.byteLength(content) : content.length;
1114
- return {
1115
- header: { name, size, type: "file" },
1116
- body
1117
- };
1118
- });
1119
- const tarBuffer = await packTar(entries);
1120
- const gzipBuffer = await gzipAsync(Buffer.from(tarBuffer));
1121
- return new RXCImpl(gzipBuffer);
1122
- }
1123
-
1124
58
  // src/errors.ts
1125
59
  class ResourceTypeError extends ResourceXError {
1126
60
  constructor(message) {
@@ -1129,114 +63,67 @@ class ResourceTypeError extends ResourceXError {
1129
63
  }
1130
64
  }
1131
65
  // src/builtinTypes.ts
1132
- var textSerializer = {
1133
- async serialize(rxr) {
1134
- return rxr.content.buffer();
1135
- },
1136
- async deserialize(data, manifest) {
1137
- return {
1138
- locator: parseRXL(manifest.toLocator()),
1139
- manifest,
1140
- content: await createRXC({ archive: data })
1141
- };
1142
- }
1143
- };
1144
- var textResolver = {
1145
- schema: undefined,
1146
- async resolve(rxr) {
1147
- return {
1148
- resource: rxr,
1149
- schema: undefined,
1150
- execute: async () => {
1151
- const buffer = await rxr.content.file("content");
1152
- return buffer.toString("utf-8");
1153
- }
1154
- };
1155
- }
1156
- };
1157
66
  var textType = {
1158
67
  name: "text",
1159
68
  aliases: ["txt", "plaintext"],
1160
69
  description: "Plain text content",
1161
- serializer: textSerializer,
1162
- resolver: textResolver
1163
- };
1164
- var jsonSerializer = {
1165
- async serialize(rxr) {
1166
- return rxr.content.buffer();
1167
- },
1168
- async deserialize(data, manifest) {
1169
- return {
1170
- locator: parseRXL(manifest.toLocator()),
1171
- manifest,
1172
- content: await createRXC({ archive: data })
1173
- };
1174
- }
1175
- };
1176
- var jsonResolver = {
1177
- schema: undefined,
1178
- async resolve(rxr) {
1179
- return {
1180
- resource: rxr,
1181
- schema: undefined,
1182
- execute: async () => {
1183
- const buffer = await rxr.content.file("content");
1184
- return JSON.parse(buffer.toString("utf-8"));
1185
- }
1186
- };
70
+ code: `// @resolver: text_type_default
71
+ // src/builtins/text.type.ts
72
+ var text_type_default = {
73
+ name: "text",
74
+ aliases: ["txt", "plaintext"],
75
+ description: "Plain text content",
76
+ async resolve(ctx) {
77
+ const content = ctx.files["content"];
78
+ return new TextDecoder().decode(content);
1187
79
  }
80
+ };`
1188
81
  };
1189
82
  var jsonType = {
1190
83
  name: "json",
1191
84
  aliases: ["config", "manifest"],
1192
85
  description: "JSON content",
1193
- serializer: jsonSerializer,
1194
- resolver: jsonResolver
1195
- };
1196
- var binarySerializer = {
1197
- async serialize(rxr) {
1198
- return rxr.content.buffer();
1199
- },
1200
- async deserialize(data, manifest) {
1201
- return {
1202
- locator: parseRXL(manifest.toLocator()),
1203
- manifest,
1204
- content: await createRXC({ archive: data })
1205
- };
1206
- }
1207
- };
1208
- var binaryResolver = {
1209
- schema: undefined,
1210
- async resolve(rxr) {
1211
- return {
1212
- resource: rxr,
1213
- schema: undefined,
1214
- execute: async () => {
1215
- return rxr.content.file("content");
1216
- }
1217
- };
86
+ code: `// @resolver: json_type_default
87
+ // src/builtins/json.type.ts
88
+ var json_type_default = {
89
+ name: "json",
90
+ aliases: ["config", "manifest"],
91
+ description: "JSON content",
92
+ async resolve(ctx) {
93
+ const content = ctx.files["content"];
94
+ return JSON.parse(new TextDecoder().decode(content));
1218
95
  }
96
+ };`
1219
97
  };
1220
98
  var binaryType = {
1221
99
  name: "binary",
1222
100
  aliases: ["bin", "blob", "raw"],
1223
101
  description: "Binary content",
1224
- serializer: binarySerializer,
1225
- resolver: binaryResolver
102
+ code: `// @resolver: binary_type_default
103
+ // src/builtins/binary.type.ts
104
+ var binary_type_default = {
105
+ name: "binary",
106
+ aliases: ["bin", "blob", "raw"],
107
+ description: "Binary content",
108
+ async resolve(ctx) {
109
+ return ctx.files["content"];
110
+ }
111
+ };`
1226
112
  };
1227
113
  var builtinTypes = [textType, jsonType, binaryType];
114
+
1228
115
  // src/TypeHandlerChain.ts
1229
116
  class TypeHandlerChain {
1230
117
  handlers = new Map;
1231
118
  constructor() {
1232
119
  for (const type of builtinTypes) {
1233
- this.registerBuiltin(type);
120
+ this.registerInternal(type);
1234
121
  }
1235
122
  }
1236
123
  static create() {
1237
124
  return new TypeHandlerChain;
1238
125
  }
1239
- registerBuiltin(type) {
126
+ registerInternal(type) {
1240
127
  this.handlers.set(type.name, type);
1241
128
  if (type.aliases) {
1242
129
  for (const alias of type.aliases) {
@@ -1262,49 +149,30 @@ class TypeHandlerChain {
1262
149
  return this.handlers.has(typeName);
1263
150
  }
1264
151
  getHandler(typeName) {
1265
- return this.handlers.get(typeName);
1266
- }
1267
- getSupportedTypes() {
1268
- return Array.from(this.handlers.keys());
1269
- }
1270
- async serialize(rxr) {
1271
- const typeName = rxr.manifest.type;
1272
152
  const handler = this.handlers.get(typeName);
1273
153
  if (!handler) {
1274
154
  throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);
1275
155
  }
1276
- return handler.serializer.serialize(rxr);
156
+ return handler;
1277
157
  }
1278
- async deserialize(data, manifest) {
1279
- const typeName = manifest.type;
1280
- const handler = this.handlers.get(typeName);
1281
- if (!handler) {
1282
- throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);
1283
- }
1284
- return handler.serializer.deserialize(data, manifest);
158
+ getHandlerOrUndefined(typeName) {
159
+ return this.handlers.get(typeName);
1285
160
  }
1286
- async resolve(rxr) {
1287
- const typeName = rxr.manifest.type;
1288
- const handler = this.handlers.get(typeName);
1289
- if (!handler) {
1290
- throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);
1291
- }
1292
- return handler.resolver.resolve(rxr);
161
+ getSupportedTypes() {
162
+ return Array.from(this.handlers.keys());
1293
163
  }
1294
- clearExtensions() {
164
+ clear() {
1295
165
  this.handlers.clear();
1296
- for (const type of builtinTypes) {
1297
- this.registerBuiltin(type);
1298
- }
1299
166
  }
1300
167
  }
1301
168
  export {
1302
169
  textType,
1303
170
  jsonType,
171
+ bundleResourceType,
1304
172
  builtinTypes,
1305
173
  binaryType,
1306
174
  TypeHandlerChain,
1307
175
  ResourceTypeError
1308
176
  };
1309
177
 
1310
- //# debugId=2BBFE6A3C304057764756E2164756E21
178
+ //# debugId=9BB0B47B5E1F216964756E2164756E21