@resourcexjs/core 0.9.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -28,13 +28,6 @@ class ContentError extends ResourceXError {
28
28
  this.name = "ContentError";
29
29
  }
30
30
  }
31
-
32
- class ResourceTypeError extends ResourceXError {
33
- constructor(message) {
34
- super(message);
35
- this.name = "ResourceTypeError";
36
- }
37
- }
38
31
  // src/locator/parseRXL.ts
39
32
  class RXLImpl {
40
33
  domain;
@@ -163,325 +156,1059 @@ function createRXM(data) {
163
156
  });
164
157
  }
165
158
  // src/content/createRXC.ts
166
- class RXCImpl {
167
- _stream;
168
- _consumed = false;
169
- constructor(stream) {
170
- this._stream = stream;
171
- }
172
- get stream() {
173
- if (this._consumed) {
174
- throw new ContentError("Content has already been consumed");
175
- }
176
- this._consumed = true;
177
- return this._stream;
159
+ import { gzip, gunzip } from "node:zlib";
160
+ import { promisify } from "node:util";
161
+
162
+ // ../../node_modules/.bun/modern-tar@0.7.3/node_modules/modern-tar/dist/unpacker-BpPBxY8N.js
163
+ var BLOCK_SIZE = 512;
164
+ var BLOCK_SIZE_MASK = 511;
165
+ var DEFAULT_FILE_MODE = 420;
166
+ var DEFAULT_DIR_MODE = 493;
167
+ var USTAR_NAME_OFFSET = 0;
168
+ var USTAR_NAME_SIZE = 100;
169
+ var USTAR_MODE_OFFSET = 100;
170
+ var USTAR_MODE_SIZE = 8;
171
+ var USTAR_UID_OFFSET = 108;
172
+ var USTAR_UID_SIZE = 8;
173
+ var USTAR_GID_OFFSET = 116;
174
+ var USTAR_GID_SIZE = 8;
175
+ var USTAR_SIZE_OFFSET = 124;
176
+ var USTAR_SIZE_SIZE = 12;
177
+ var USTAR_MTIME_OFFSET = 136;
178
+ var USTAR_MTIME_SIZE = 12;
179
+ var USTAR_CHECKSUM_OFFSET = 148;
180
+ var USTAR_CHECKSUM_SIZE = 8;
181
+ var USTAR_TYPEFLAG_OFFSET = 156;
182
+ var USTAR_TYPEFLAG_SIZE = 1;
183
+ var USTAR_LINKNAME_OFFSET = 157;
184
+ var USTAR_LINKNAME_SIZE = 100;
185
+ var USTAR_MAGIC_OFFSET = 257;
186
+ var USTAR_MAGIC_SIZE = 6;
187
+ var USTAR_VERSION_OFFSET = 263;
188
+ var USTAR_VERSION_SIZE = 2;
189
+ var USTAR_UNAME_OFFSET = 265;
190
+ var USTAR_UNAME_SIZE = 32;
191
+ var USTAR_GNAME_OFFSET = 297;
192
+ var USTAR_GNAME_SIZE = 32;
193
+ var USTAR_PREFIX_OFFSET = 345;
194
+ var USTAR_PREFIX_SIZE = 155;
195
+ var USTAR_VERSION = "00";
196
+ var USTAR_MAX_UID_GID = 2097151;
197
+ var USTAR_MAX_SIZE = 8589934591;
198
+ var FILE = "file";
199
+ var LINK = "link";
200
+ var SYMLINK = "symlink";
201
+ var DIRECTORY = "directory";
202
+ var TYPEFLAG = {
203
+ file: "0",
204
+ link: "1",
205
+ symlink: "2",
206
+ "character-device": "3",
207
+ "block-device": "4",
208
+ directory: "5",
209
+ fifo: "6",
210
+ "pax-header": "x",
211
+ "pax-global-header": "g",
212
+ "gnu-long-name": "L",
213
+ "gnu-long-link-name": "K"
214
+ };
215
+ var FLAGTYPE = {
216
+ "0": FILE,
217
+ "1": LINK,
218
+ "2": SYMLINK,
219
+ "3": "character-device",
220
+ "4": "block-device",
221
+ "5": DIRECTORY,
222
+ "6": "fifo",
223
+ x: "pax-header",
224
+ g: "pax-global-header",
225
+ L: "gnu-long-name",
226
+ K: "gnu-long-link-name"
227
+ };
228
+ var ZERO_BLOCK = new Uint8Array(BLOCK_SIZE);
229
+ var EMPTY = new Uint8Array(0);
230
+ var encoder = new TextEncoder;
231
+ var decoder = new TextDecoder;
232
+ function writeString(view, offset, size, value) {
233
+ if (value)
234
+ encoder.encodeInto(value, view.subarray(offset, offset + size));
235
+ }
236
+ function writeOctal(view, offset, size, value) {
237
+ if (value === undefined)
238
+ return;
239
+ const octalString = value.toString(8).padStart(size - 1, "0");
240
+ encoder.encodeInto(octalString, view.subarray(offset, offset + size - 1));
241
+ }
242
+ function readString(view, offset, size) {
243
+ const end = view.indexOf(0, offset);
244
+ const sliceEnd = end === -1 || end > offset + size ? offset + size : end;
245
+ return decoder.decode(view.subarray(offset, sliceEnd));
246
+ }
247
+ function readOctal(view, offset, size) {
248
+ let value = 0;
249
+ const end = offset + size;
250
+ for (let i = offset;i < end; i++) {
251
+ const charCode = view[i];
252
+ if (charCode === 0)
253
+ break;
254
+ if (charCode === 32)
255
+ continue;
256
+ value = value * 8 + (charCode - 48);
178
257
  }
179
- async text() {
180
- const buffer = await this.buffer();
181
- return buffer.toString("utf-8");
258
+ return value;
259
+ }
260
+ function readNumeric(view, offset, size) {
261
+ if (view[offset] & 128) {
262
+ let result = 0;
263
+ result = view[offset] & 127;
264
+ for (let i = 1;i < size; i++)
265
+ result = result * 256 + view[offset + i];
266
+ if (!Number.isSafeInteger(result))
267
+ throw new Error("TAR number too large");
268
+ return result;
182
269
  }
183
- async buffer() {
184
- if (this._consumed) {
185
- throw new ContentError("Content has already been consumed");
186
- }
187
- this._consumed = true;
188
- const reader = this._stream.getReader();
189
- const chunks = [];
190
- while (true) {
191
- const { done, value } = await reader.read();
192
- if (done)
193
- break;
194
- chunks.push(value);
270
+ return readOctal(view, offset, size);
271
+ }
272
+ var isBodyless = (header) => header.type === DIRECTORY || header.type === SYMLINK || header.type === LINK;
273
+ async function normalizeBody(body) {
274
+ if (body === null || body === undefined)
275
+ return EMPTY;
276
+ if (body instanceof Uint8Array)
277
+ return body;
278
+ if (typeof body === "string")
279
+ return encoder.encode(body);
280
+ if (body instanceof ArrayBuffer)
281
+ return new Uint8Array(body);
282
+ if (body instanceof Blob)
283
+ return new Uint8Array(await body.arrayBuffer());
284
+ throw new TypeError("Unsupported content type for entry body.");
285
+ }
286
+ function transformHeader(header, options) {
287
+ const { strip, filter, map } = options;
288
+ if (!strip && !filter && !map)
289
+ return header;
290
+ const h = { ...header };
291
+ if (strip && strip > 0) {
292
+ const components = h.name.split("/").filter(Boolean);
293
+ if (strip >= components.length)
294
+ return null;
295
+ const newName = components.slice(strip).join("/");
296
+ h.name = h.type === DIRECTORY && !newName.endsWith("/") ? `${newName}/` : newName;
297
+ if (h.linkname?.startsWith("/")) {
298
+ const linkComponents = h.linkname.split("/").filter(Boolean);
299
+ h.linkname = strip >= linkComponents.length ? "/" : `/${linkComponents.slice(strip).join("/")}`;
195
300
  }
196
- return Buffer.concat(chunks);
197
- }
198
- async json() {
199
- const text = await this.text();
200
- return JSON.parse(text);
201
301
  }
302
+ if (filter?.(h) === false)
303
+ return null;
304
+ const result = map ? map(h) : h;
305
+ if (result && (!result.name || !result.name.trim() || result.name === "." || result.name === "/"))
306
+ return null;
307
+ return result;
202
308
  }
203
- function createRXC(data) {
204
- let stream;
205
- if (typeof data === "string") {
206
- const encoded = new TextEncoder().encode(data);
207
- stream = new ReadableStream({
208
- start(controller) {
209
- controller.enqueue(encoded);
210
- controller.close();
211
- }
212
- });
213
- } else if (Buffer.isBuffer(data)) {
214
- stream = new ReadableStream({
215
- start(controller) {
216
- controller.enqueue(new Uint8Array(data));
217
- controller.close();
218
- }
219
- });
220
- } else {
221
- stream = data;
222
- }
223
- return new RXCImpl(stream);
224
- }
225
- // src/content/loadRXC.ts
226
- import { createReadStream } from "node:fs";
227
- import { Readable } from "node:stream";
228
- async function loadRXC(source) {
229
- if (source.startsWith("http://") || source.startsWith("https://")) {
230
- const response = await fetch(source);
231
- if (!response.ok) {
232
- throw new Error(`Failed to fetch ${source}: ${response.statusText}`);
233
- }
234
- if (!response.body) {
235
- throw new Error(`No body in response from ${source}`);
236
- }
237
- return createRXC(response.body);
238
- }
239
- const nodeStream = createReadStream(source);
240
- const webStream = Readable.toWeb(nodeStream);
241
- return createRXC(webStream);
242
- }
243
- // src/resource/defineResourceType.ts
244
- var resourceTypes = new Map;
245
- function defineResourceType(config) {
246
- if (resourceTypes.has(config.name)) {
247
- throw new ResourceTypeError(`Resource type "${config.name}" is already registered`);
248
- }
249
- resourceTypes.set(config.name, config);
250
- return config;
251
- }
252
- function getResourceType(name) {
253
- return resourceTypes.get(name);
254
- }
255
- function clearResourceTypes() {
256
- resourceTypes.clear();
257
- }
258
- // src/resource/builtinTypes.ts
259
- var textSerializer = {
260
- async serialize(rxr) {
261
- const text = await rxr.content.text();
262
- return Buffer.from(text, "utf-8");
263
- },
264
- async deserialize(data, manifest) {
265
- const text = data.toString("utf-8");
266
- return {
267
- locator: parseRXL(manifest.toLocator()),
268
- manifest,
269
- content: createRXC(text)
270
- };
309
+ var CHECKSUM_SPACE = 32;
310
+ var ASCII_ZERO = 48;
311
+ function validateChecksum(block) {
312
+ const stored = readOctal(block, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_SIZE);
313
+ let sum = 0;
314
+ for (let i = 0;i < block.length; i++)
315
+ if (i >= USTAR_CHECKSUM_OFFSET && i < USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE)
316
+ sum += CHECKSUM_SPACE;
317
+ else
318
+ sum += block[i];
319
+ return stored === sum;
320
+ }
321
+ function writeChecksum(block) {
322
+ block.fill(CHECKSUM_SPACE, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE);
323
+ let checksum = 0;
324
+ for (const byte of block)
325
+ checksum += byte;
326
+ for (let i = USTAR_CHECKSUM_OFFSET + 6 - 1;i >= USTAR_CHECKSUM_OFFSET; i--) {
327
+ block[i] = (checksum & 7) + ASCII_ZERO;
328
+ checksum >>= 3;
271
329
  }
272
- };
273
- var textResolver = {
274
- async resolve(rxr) {
275
- return rxr.content.text();
330
+ block[USTAR_CHECKSUM_OFFSET + 6] = 0;
331
+ block[USTAR_CHECKSUM_OFFSET + 7] = CHECKSUM_SPACE;
332
+ }
333
+ function generatePax(header) {
334
+ const paxRecords = {};
335
+ if (header.name.length > USTAR_NAME_SIZE) {
336
+ if (findUstarSplit(header.name) === null)
337
+ paxRecords.path = header.name;
276
338
  }
277
- };
278
- var textType = {
279
- name: "text",
280
- aliases: ["txt", "plaintext"],
281
- description: "Plain text content",
282
- serializer: textSerializer,
283
- resolver: textResolver
284
- };
285
- var jsonSerializer = {
286
- async serialize(rxr) {
287
- const json = await rxr.content.json();
288
- return Buffer.from(JSON.stringify(json, null, 2), "utf-8");
289
- },
290
- async deserialize(data, manifest) {
291
- const text = data.toString("utf-8");
339
+ if (header.linkname && header.linkname.length > USTAR_NAME_SIZE)
340
+ paxRecords.linkpath = header.linkname;
341
+ if (header.uname && header.uname.length > USTAR_UNAME_SIZE)
342
+ paxRecords.uname = header.uname;
343
+ if (header.gname && header.gname.length > USTAR_GNAME_SIZE)
344
+ paxRecords.gname = header.gname;
345
+ if (header.uid != null && header.uid > USTAR_MAX_UID_GID)
346
+ paxRecords.uid = String(header.uid);
347
+ if (header.gid != null && header.gid > USTAR_MAX_UID_GID)
348
+ paxRecords.gid = String(header.gid);
349
+ if (header.size != null && header.size > USTAR_MAX_SIZE)
350
+ paxRecords.size = String(header.size);
351
+ if (header.pax)
352
+ Object.assign(paxRecords, header.pax);
353
+ const paxEntries = Object.entries(paxRecords);
354
+ if (paxEntries.length === 0)
355
+ return null;
356
+ const paxBody = encoder.encode(paxEntries.map(([key, value]) => {
357
+ const record = `${key}=${value}
358
+ `;
359
+ const partLength = encoder.encode(record).length + 1;
360
+ let totalLength = partLength + String(partLength).length;
361
+ totalLength = partLength + String(totalLength).length;
362
+ return `${totalLength} ${record}`;
363
+ }).join(""));
364
+ return {
365
+ paxHeader: createTarHeader({
366
+ name: decoder.decode(encoder.encode(`PaxHeader/${header.name}`).slice(0, 100)),
367
+ size: paxBody.length,
368
+ type: "pax-header",
369
+ mode: 420,
370
+ mtime: header.mtime,
371
+ uname: header.uname,
372
+ gname: header.gname,
373
+ uid: header.uid,
374
+ gid: header.gid
375
+ }),
376
+ paxBody
377
+ };
378
+ }
379
+ function findUstarSplit(path) {
380
+ if (path.length <= USTAR_NAME_SIZE)
381
+ return null;
382
+ const minSlashIndex = path.length - USTAR_NAME_SIZE - 1;
383
+ const slashIndex = path.lastIndexOf("/", USTAR_PREFIX_SIZE);
384
+ if (slashIndex > 0 && slashIndex >= minSlashIndex)
292
385
  return {
293
- locator: parseRXL(manifest.toLocator()),
294
- manifest,
295
- content: createRXC(text)
386
+ prefix: path.slice(0, slashIndex),
387
+ name: path.slice(slashIndex + 1)
296
388
  };
389
+ return null;
390
+ }
391
+ function createTarHeader(header) {
392
+ const view = new Uint8Array(BLOCK_SIZE);
393
+ const size = isBodyless(header) ? 0 : header.size ?? 0;
394
+ let name = header.name;
395
+ let prefix = "";
396
+ if (!header.pax?.path) {
397
+ const split = findUstarSplit(name);
398
+ if (split) {
399
+ name = split.name;
400
+ prefix = split.prefix;
401
+ }
297
402
  }
298
- };
299
- var jsonResolver = {
300
- async resolve(rxr) {
301
- return rxr.content.json();
403
+ writeString(view, USTAR_NAME_OFFSET, USTAR_NAME_SIZE, name);
404
+ writeOctal(view, USTAR_MODE_OFFSET, USTAR_MODE_SIZE, header.mode ?? (header.type === DIRECTORY ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));
405
+ writeOctal(view, USTAR_UID_OFFSET, USTAR_UID_SIZE, header.uid ?? 0);
406
+ writeOctal(view, USTAR_GID_OFFSET, USTAR_GID_SIZE, header.gid ?? 0);
407
+ writeOctal(view, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE, size);
408
+ writeOctal(view, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1000));
409
+ writeString(view, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE, TYPEFLAG[header.type ?? FILE]);
410
+ writeString(view, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE, header.linkname);
411
+ writeString(view, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE, "ustar\x00");
412
+ writeString(view, USTAR_VERSION_OFFSET, USTAR_VERSION_SIZE, USTAR_VERSION);
413
+ writeString(view, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE, header.uname);
414
+ writeString(view, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE, header.gname);
415
+ writeString(view, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE, prefix);
416
+ writeChecksum(view);
417
+ return view;
418
+ }
419
+ function parseUstarHeader(block, strict) {
420
+ if (strict && !validateChecksum(block))
421
+ throw new Error("Invalid tar header checksum.");
422
+ const typeflag = readString(block, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE);
423
+ const header = {
424
+ name: readString(block, USTAR_NAME_OFFSET, USTAR_NAME_SIZE),
425
+ mode: readOctal(block, USTAR_MODE_OFFSET, USTAR_MODE_SIZE),
426
+ uid: readNumeric(block, USTAR_UID_OFFSET, USTAR_UID_SIZE),
427
+ gid: readNumeric(block, USTAR_GID_OFFSET, USTAR_GID_SIZE),
428
+ size: readNumeric(block, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE),
429
+ mtime: /* @__PURE__ */ new Date(readNumeric(block, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE) * 1000),
430
+ type: FLAGTYPE[typeflag] || FILE,
431
+ linkname: readString(block, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE)
432
+ };
433
+ const magic = readString(block, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE);
434
+ if (magic.trim() === "ustar") {
435
+ header.uname = readString(block, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE);
436
+ header.gname = readString(block, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE);
302
437
  }
438
+ if (magic === "ustar")
439
+ header.prefix = readString(block, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE);
440
+ return header;
441
+ }
442
+ var PAX_MAPPING = {
443
+ path: ["name", (v) => v],
444
+ linkpath: ["linkname", (v) => v],
445
+ size: ["size", (v) => parseInt(v, 10)],
446
+ mtime: ["mtime", parseFloat],
447
+ uid: ["uid", (v) => parseInt(v, 10)],
448
+ gid: ["gid", (v) => parseInt(v, 10)],
449
+ uname: ["uname", (v) => v],
450
+ gname: ["gname", (v) => v]
303
451
  };
304
- var jsonType = {
305
- name: "json",
306
- aliases: ["config", "manifest"],
307
- description: "JSON content",
308
- serializer: jsonSerializer,
309
- resolver: jsonResolver
310
- };
311
- var binarySerializer = {
312
- async serialize(rxr) {
313
- return rxr.content.buffer();
314
- },
315
- async deserialize(data, manifest) {
316
- return {
317
- locator: parseRXL(manifest.toLocator()),
318
- manifest,
319
- content: createRXC(data)
320
- };
452
+ function parsePax(buffer) {
453
+ const decoder$1 = new TextDecoder("utf-8");
454
+ const overrides = {};
455
+ const pax = {};
456
+ let offset = 0;
457
+ while (offset < buffer.length) {
458
+ const spaceIndex = buffer.indexOf(32, offset);
459
+ if (spaceIndex === -1)
460
+ break;
461
+ const length = parseInt(decoder$1.decode(buffer.subarray(offset, spaceIndex)), 10);
462
+ if (Number.isNaN(length) || length === 0)
463
+ break;
464
+ const recordEnd = offset + length;
465
+ const [key, value] = decoder$1.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
466
+ if (key && value !== undefined) {
467
+ pax[key] = value;
468
+ const mapping = PAX_MAPPING[key];
469
+ if (mapping) {
470
+ const [targetKey, parser] = mapping;
471
+ const parsedValue = parser(value);
472
+ if (typeof parsedValue === "string" || !Number.isNaN(parsedValue))
473
+ overrides[targetKey] = parsedValue;
474
+ }
475
+ }
476
+ offset = recordEnd;
321
477
  }
322
- };
323
- var binaryResolver = {
324
- async resolve(rxr) {
325
- return rxr.content.buffer();
478
+ if (Object.keys(pax).length > 0)
479
+ overrides.pax = pax;
480
+ return overrides;
481
+ }
482
+ function applyOverrides(header, overrides) {
483
+ if (overrides.name !== undefined)
484
+ header.name = overrides.name;
485
+ if (overrides.linkname !== undefined)
486
+ header.linkname = overrides.linkname;
487
+ if (overrides.size !== undefined)
488
+ header.size = overrides.size;
489
+ if (overrides.mtime !== undefined)
490
+ header.mtime = /* @__PURE__ */ new Date(overrides.mtime * 1000);
491
+ if (overrides.uid !== undefined)
492
+ header.uid = overrides.uid;
493
+ if (overrides.gid !== undefined)
494
+ header.gid = overrides.gid;
495
+ if (overrides.uname !== undefined)
496
+ header.uname = overrides.uname;
497
+ if (overrides.gname !== undefined)
498
+ header.gname = overrides.gname;
499
+ if (overrides.pax)
500
+ header.pax = Object.assign({}, header.pax ?? {}, overrides.pax);
501
+ }
502
+ function getMetaParser(type) {
503
+ switch (type) {
504
+ case "pax-global-header":
505
+ case "pax-header":
506
+ return parsePax;
507
+ case "gnu-long-name":
508
+ return (data) => ({ name: readString(data, 0, data.length) });
509
+ case "gnu-long-link-name":
510
+ return (data) => ({ linkname: readString(data, 0, data.length) });
511
+ default:
512
+ return;
326
513
  }
327
- };
328
- var binaryType = {
329
- name: "binary",
330
- aliases: ["bin", "blob", "raw"],
331
- description: "Binary content",
332
- serializer: binarySerializer,
333
- resolver: binaryResolver
334
- };
335
- var builtinTypes = [textType, jsonType, binaryType];
336
- // src/resource/TypeHandlerChain.ts
337
- class TypeHandlerChain {
338
- handlers = new Map;
339
- register(type) {
340
- this.handlers.set(type.name, type);
341
- if (type.aliases) {
342
- for (const alias of type.aliases) {
343
- this.handlers.set(alias, type);
514
+ }
515
+ function getHeaderBlocks(header) {
516
+ const base = createTarHeader(header);
517
+ const pax = generatePax(header);
518
+ if (!pax)
519
+ return [base];
520
+ const paxPadding = -pax.paxBody.length & BLOCK_SIZE_MASK;
521
+ const paddingBlocks = paxPadding > 0 ? [ZERO_BLOCK.subarray(0, paxPadding)] : [];
522
+ return [
523
+ pax.paxHeader,
524
+ pax.paxBody,
525
+ ...paddingBlocks,
526
+ base
527
+ ];
528
+ }
529
+ var EOF_BUFFER = new Uint8Array(BLOCK_SIZE * 2);
530
+ function createTarPacker(onData, onError, onFinalize) {
531
+ let currentHeader = null;
532
+ let bytesWritten = 0;
533
+ let finalized = false;
534
+ return {
535
+ add(header) {
536
+ if (finalized) {
537
+ const error = /* @__PURE__ */ new Error("No new tar entries after finalize.");
538
+ onError(error);
539
+ throw error;
540
+ }
541
+ if (currentHeader !== null) {
542
+ const error = /* @__PURE__ */ new Error("Previous entry must be completed before adding a new one");
543
+ onError(error);
544
+ throw error;
545
+ }
546
+ try {
547
+ const size = isBodyless(header) ? 0 : header.size ?? 0;
548
+ const headerBlocks = getHeaderBlocks({
549
+ ...header,
550
+ size
551
+ });
552
+ for (const block of headerBlocks)
553
+ onData(block);
554
+ currentHeader = {
555
+ ...header,
556
+ size
557
+ };
558
+ bytesWritten = 0;
559
+ } catch (error) {
560
+ onError(error);
561
+ }
562
+ },
563
+ write(chunk) {
564
+ if (!currentHeader) {
565
+ const error = /* @__PURE__ */ new Error("No active tar entry.");
566
+ onError(error);
567
+ throw error;
568
+ }
569
+ if (finalized) {
570
+ const error = /* @__PURE__ */ new Error("Cannot write data after finalize.");
571
+ onError(error);
572
+ throw error;
573
+ }
574
+ const newTotal = bytesWritten + chunk.length;
575
+ if (newTotal > currentHeader.size) {
576
+ const error = /* @__PURE__ */ new Error(`"${currentHeader.name}" exceeds given size of ${currentHeader.size} bytes.`);
577
+ onError(error);
578
+ throw error;
579
+ }
580
+ try {
581
+ bytesWritten = newTotal;
582
+ onData(chunk);
583
+ } catch (error) {
584
+ onError(error);
585
+ }
586
+ },
587
+ endEntry() {
588
+ if (!currentHeader) {
589
+ const error = /* @__PURE__ */ new Error("No active entry to end.");
590
+ onError(error);
591
+ throw error;
592
+ }
593
+ if (finalized) {
594
+ const error = /* @__PURE__ */ new Error("Cannot end entry after finalize.");
595
+ onError(error);
596
+ throw error;
597
+ }
598
+ try {
599
+ if (bytesWritten !== currentHeader.size) {
600
+ const error = /* @__PURE__ */ new Error(`Size mismatch for "${currentHeader.name}".`);
601
+ onError(error);
602
+ throw error;
603
+ }
604
+ const paddingSize = -currentHeader.size & BLOCK_SIZE_MASK;
605
+ if (paddingSize > 0)
606
+ onData(new Uint8Array(paddingSize));
607
+ currentHeader = null;
608
+ bytesWritten = 0;
609
+ } catch (error) {
610
+ onError(error);
611
+ throw error;
612
+ }
613
+ },
614
+ finalize() {
615
+ if (finalized) {
616
+ const error = /* @__PURE__ */ new Error("Archive has already been finalized");
617
+ onError(error);
618
+ throw error;
619
+ }
620
+ if (currentHeader !== null) {
621
+ const error = /* @__PURE__ */ new Error("Cannot finalize while an entry is still active");
622
+ onError(error);
623
+ throw error;
624
+ }
625
+ try {
626
+ onData(EOF_BUFFER);
627
+ finalized = true;
628
+ if (onFinalize)
629
+ onFinalize();
630
+ } catch (error) {
631
+ onError(error);
344
632
  }
345
633
  }
346
- }
347
- registerAll(types) {
348
- for (const type of types) {
349
- this.register(type);
634
+ };
635
+ }
636
+ var INITIAL_CAPACITY = 256;
637
+ function createChunkQueue() {
638
+ let chunks = new Array(INITIAL_CAPACITY);
639
+ let capacityMask = chunks.length - 1;
640
+ let head = 0;
641
+ let tail = 0;
642
+ let totalAvailable = 0;
643
+ const consumeFromHead = (count) => {
644
+ const chunk = chunks[head];
645
+ if (count === chunk.length) {
646
+ chunks[head] = EMPTY;
647
+ head = head + 1 & capacityMask;
648
+ } else
649
+ chunks[head] = chunk.subarray(count);
650
+ totalAvailable -= count;
651
+ if (totalAvailable === 0 && chunks.length > INITIAL_CAPACITY) {
652
+ chunks = new Array(INITIAL_CAPACITY);
653
+ capacityMask = INITIAL_CAPACITY - 1;
654
+ head = 0;
655
+ tail = 0;
350
656
  }
351
- }
352
- canHandle(typeName) {
353
- return this.handlers.has(typeName);
354
- }
355
- getHandler(typeName) {
356
- return this.handlers.get(typeName);
357
- }
358
- async serialize(rxr) {
359
- const typeName = rxr.manifest.type;
360
- const handler = this.handlers.get(typeName);
361
- if (!handler) {
362
- throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);
657
+ };
658
+ function pull(bytes, callback) {
659
+ if (callback) {
660
+ let fed = 0;
661
+ let remaining$1 = Math.min(bytes, totalAvailable);
662
+ while (remaining$1 > 0) {
663
+ const chunk = chunks[head];
664
+ const toFeed = Math.min(remaining$1, chunk.length);
665
+ const segment = toFeed === chunk.length ? chunk : chunk.subarray(0, toFeed);
666
+ consumeFromHead(toFeed);
667
+ remaining$1 -= toFeed;
668
+ fed += toFeed;
669
+ if (!callback(segment))
670
+ break;
671
+ }
672
+ return fed;
363
673
  }
364
- return handler.serializer.serialize(rxr);
365
- }
366
- async deserialize(data, manifest) {
367
- const typeName = manifest.type;
368
- const handler = this.handlers.get(typeName);
369
- if (!handler) {
370
- throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);
674
+ if (totalAvailable < bytes)
675
+ return null;
676
+ if (bytes === 0)
677
+ return EMPTY;
678
+ const firstChunk = chunks[head];
679
+ if (firstChunk.length >= bytes) {
680
+ const view = firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);
681
+ consumeFromHead(bytes);
682
+ return view;
371
683
  }
372
- return handler.serializer.deserialize(data, manifest);
373
- }
374
- async resolve(rxr) {
375
- const typeName = rxr.manifest.type;
376
- const handler = this.handlers.get(typeName);
377
- if (!handler) {
378
- throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);
684
+ const result = new Uint8Array(bytes);
685
+ let copied = 0;
686
+ let remaining = bytes;
687
+ while (remaining > 0) {
688
+ const chunk = chunks[head];
689
+ const toCopy = Math.min(remaining, chunk.length);
690
+ result.set(toCopy === chunk.length ? chunk : chunk.subarray(0, toCopy), copied);
691
+ copied += toCopy;
692
+ remaining -= toCopy;
693
+ consumeFromHead(toCopy);
379
694
  }
380
- return handler.resolver.resolve(rxr);
381
- }
382
- }
383
- function createTypeHandlerChain(types) {
384
- const chain = new TypeHandlerChain;
385
- if (types) {
386
- chain.registerAll(types);
695
+ return result;
387
696
  }
388
- return chain;
697
+ return {
698
+ push: (chunk) => {
699
+ if (chunk.length === 0)
700
+ return;
701
+ let nextTail = tail + 1 & capacityMask;
702
+ if (nextTail === head) {
703
+ const oldLen = chunks.length;
704
+ const newLen = oldLen * 2;
705
+ const newChunks = new Array(newLen);
706
+ const count = tail - head + oldLen & oldLen - 1;
707
+ if (head < tail)
708
+ for (let i = 0;i < count; i++)
709
+ newChunks[i] = chunks[head + i];
710
+ else if (count > 0) {
711
+ const firstPart = oldLen - head;
712
+ for (let i = 0;i < firstPart; i++)
713
+ newChunks[i] = chunks[head + i];
714
+ for (let i = 0;i < tail; i++)
715
+ newChunks[firstPart + i] = chunks[i];
716
+ }
717
+ chunks = newChunks;
718
+ capacityMask = newLen - 1;
719
+ head = 0;
720
+ tail = count;
721
+ nextTail = tail + 1 & capacityMask;
722
+ }
723
+ chunks[tail] = chunk;
724
+ tail = nextTail;
725
+ totalAvailable += chunk.length;
726
+ },
727
+ available: () => totalAvailable,
728
+ peek: (bytes) => {
729
+ if (totalAvailable < bytes)
730
+ return null;
731
+ if (bytes === 0)
732
+ return EMPTY;
733
+ const firstChunk = chunks[head];
734
+ if (firstChunk.length >= bytes)
735
+ return firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);
736
+ const result = new Uint8Array(bytes);
737
+ let copied = 0;
738
+ let index = head;
739
+ while (copied < bytes) {
740
+ const chunk = chunks[index];
741
+ const toCopy = Math.min(bytes - copied, chunk.length);
742
+ if (toCopy === chunk.length)
743
+ result.set(chunk, copied);
744
+ else
745
+ result.set(chunk.subarray(0, toCopy), copied);
746
+ copied += toCopy;
747
+ index = index + 1 & capacityMask;
748
+ }
749
+ return result;
750
+ },
751
+ discard: (bytes) => {
752
+ if (bytes > totalAvailable)
753
+ throw new Error("Too many bytes consumed");
754
+ if (bytes === 0)
755
+ return;
756
+ let remaining = bytes;
757
+ while (remaining > 0) {
758
+ const chunk = chunks[head];
759
+ const toConsume = Math.min(remaining, chunk.length);
760
+ consumeFromHead(toConsume);
761
+ remaining -= toConsume;
762
+ }
763
+ },
764
+ pull
765
+ };
389
766
  }
390
- // src/resource/FolderLoader.ts
391
- import { join } from "node:path";
392
- import { stat, readFile } from "node:fs/promises";
393
- class FolderLoader {
394
- async canLoad(source) {
395
- try {
396
- const stats = await stat(source);
397
- if (!stats.isDirectory()) {
767
+ var STATE_HEADER = 0;
768
+ var STATE_BODY = 1;
769
+ var truncateErr = /* @__PURE__ */ new Error("Tar archive is truncated.");
770
+ function createUnpacker(options = {}) {
771
+ const strict = options.strict ?? false;
772
+ const { available, peek, push, discard, pull } = createChunkQueue();
773
+ let state = STATE_HEADER;
774
+ let ended = false;
775
+ let done = false;
776
+ let eof = false;
777
+ let currentEntry = null;
778
+ const paxGlobals = {};
779
+ let nextEntryOverrides = {};
780
+ const unpacker = {
781
+ isEntryActive: () => state === STATE_BODY,
782
+ isBodyComplete: () => !currentEntry || currentEntry.remaining === 0,
783
+ write(chunk) {
784
+ if (ended)
785
+ throw new Error("Archive already ended.");
786
+ push(chunk);
787
+ },
788
+ end() {
789
+ ended = true;
790
+ },
791
+ readHeader() {
792
+ if (state !== STATE_HEADER)
793
+ throw new Error("Cannot read header while an entry is active");
794
+ if (done)
795
+ return;
796
+ while (!done) {
797
+ if (available() < BLOCK_SIZE) {
798
+ if (ended) {
799
+ if (available() > 0 && strict)
800
+ throw truncateErr;
801
+ done = true;
802
+ return;
803
+ }
804
+ return null;
805
+ }
806
+ const headerBlock = peek(BLOCK_SIZE);
807
+ if (isZeroBlock(headerBlock)) {
808
+ if (available() < BLOCK_SIZE * 2) {
809
+ if (ended) {
810
+ if (strict)
811
+ throw truncateErr;
812
+ done = true;
813
+ return;
814
+ }
815
+ return null;
816
+ }
817
+ if (isZeroBlock(peek(BLOCK_SIZE * 2).subarray(BLOCK_SIZE))) {
818
+ discard(BLOCK_SIZE * 2);
819
+ done = true;
820
+ eof = true;
821
+ return;
822
+ }
823
+ if (strict)
824
+ throw new Error("Invalid tar header.");
825
+ discard(BLOCK_SIZE);
826
+ continue;
827
+ }
828
+ let internalHeader;
829
+ try {
830
+ internalHeader = parseUstarHeader(headerBlock, strict);
831
+ } catch (err) {
832
+ if (strict)
833
+ throw err;
834
+ discard(BLOCK_SIZE);
835
+ continue;
836
+ }
837
+ const metaParser = getMetaParser(internalHeader.type);
838
+ if (metaParser) {
839
+ const paddedSize = internalHeader.size + BLOCK_SIZE_MASK & ~BLOCK_SIZE_MASK;
840
+ if (available() < BLOCK_SIZE + paddedSize) {
841
+ if (ended && strict)
842
+ throw truncateErr;
843
+ return null;
844
+ }
845
+ discard(BLOCK_SIZE);
846
+ const overrides = metaParser(pull(paddedSize).subarray(0, internalHeader.size));
847
+ const target = internalHeader.type === "pax-global-header" ? paxGlobals : nextEntryOverrides;
848
+ for (const key in overrides)
849
+ target[key] = overrides[key];
850
+ continue;
851
+ }
852
+ discard(BLOCK_SIZE);
853
+ const header = internalHeader;
854
+ if (internalHeader.prefix)
855
+ header.name = `${internalHeader.prefix}/${header.name}`;
856
+ applyOverrides(header, paxGlobals);
857
+ applyOverrides(header, nextEntryOverrides);
858
+ nextEntryOverrides = {};
859
+ currentEntry = {
860
+ header,
861
+ remaining: header.size,
862
+ padding: -header.size & BLOCK_SIZE_MASK
863
+ };
864
+ state = STATE_BODY;
865
+ return header;
866
+ }
867
+ },
868
+ streamBody(callback) {
869
+ if (state !== STATE_BODY || !currentEntry || currentEntry.remaining === 0)
870
+ return 0;
871
+ const bytesToFeed = Math.min(currentEntry.remaining, available());
872
+ if (bytesToFeed === 0)
873
+ return 0;
874
+ const fed = pull(bytesToFeed, callback);
875
+ currentEntry.remaining -= fed;
876
+ return fed;
877
+ },
878
+ skipPadding() {
879
+ if (state !== STATE_BODY || !currentEntry)
880
+ return true;
881
+ if (currentEntry.remaining > 0)
882
+ throw new Error("Body not fully consumed");
883
+ if (available() < currentEntry.padding)
884
+ return false;
885
+ discard(currentEntry.padding);
886
+ currentEntry = null;
887
+ state = STATE_HEADER;
888
+ return true;
889
+ },
890
+ skipEntry() {
891
+ if (state !== STATE_BODY || !currentEntry)
892
+ return true;
893
+ const toDiscard = Math.min(currentEntry.remaining, available());
894
+ if (toDiscard > 0) {
895
+ discard(toDiscard);
896
+ currentEntry.remaining -= toDiscard;
897
+ }
898
+ if (currentEntry.remaining > 0)
398
899
  return false;
900
+ return unpacker.skipPadding();
901
+ },
902
+ validateEOF() {
903
+ if (strict) {
904
+ if (!eof)
905
+ throw truncateErr;
906
+ if (available() > 0) {
907
+ if (pull(available()).some((byte) => byte !== 0))
908
+ throw new Error("Invalid EOF.");
909
+ }
399
910
  }
400
- const manifestPath = join(source, "resource.json");
401
- const contentPath = join(source, "content");
402
- const manifestStats = await stat(manifestPath);
403
- const contentStats = await stat(contentPath);
404
- return manifestStats.isFile() && contentStats.isFile();
405
- } catch {
911
+ }
912
+ };
913
+ return unpacker;
914
+ }
915
+ function isZeroBlock(block) {
916
+ if (block.byteOffset % 8 === 0) {
917
+ const view = new BigUint64Array(block.buffer, block.byteOffset, block.length / 8);
918
+ for (let i = 0;i < view.length; i++)
919
+ if (view[i] !== 0n)
920
+ return false;
921
+ return true;
922
+ }
923
+ for (let i = 0;i < block.length; i++)
924
+ if (block[i] !== 0)
406
925
  return false;
926
+ return true;
927
+ }
928
+
929
+ // ../../node_modules/.bun/modern-tar@0.7.3/node_modules/modern-tar/dist/web/index.js
930
+ function createTarPacker2() {
931
+ let streamController;
932
+ let packer;
933
+ return {
934
+ readable: new ReadableStream({ start(controller) {
935
+ streamController = controller;
936
+ packer = createTarPacker(controller.enqueue.bind(controller), controller.error.bind(controller), controller.close.bind(controller));
937
+ } }),
938
+ controller: {
939
+ add(header) {
940
+ const bodyless = isBodyless(header);
941
+ const h = { ...header };
942
+ if (bodyless)
943
+ h.size = 0;
944
+ packer.add(h);
945
+ if (bodyless)
946
+ packer.endEntry();
947
+ return new WritableStream({
948
+ write(chunk) {
949
+ packer.write(chunk);
950
+ },
951
+ close() {
952
+ if (!bodyless)
953
+ packer.endEntry();
954
+ },
955
+ abort(reason) {
956
+ streamController.error(reason);
957
+ }
958
+ });
959
+ },
960
+ finalize() {
961
+ packer.finalize();
962
+ },
963
+ error(err) {
964
+ streamController.error(err);
965
+ }
966
+ }
967
+ };
968
+ }
969
+ async function streamToBuffer(stream) {
970
+ const chunks = [];
971
+ const reader = stream.getReader();
972
+ let totalLength = 0;
973
+ try {
974
+ while (true) {
975
+ const { done, value } = await reader.read();
976
+ if (done)
977
+ break;
978
+ chunks.push(value);
979
+ totalLength += value.length;
407
980
  }
981
+ const result = new Uint8Array(totalLength);
982
+ let offset = 0;
983
+ for (const chunk of chunks) {
984
+ result.set(chunk, offset);
985
+ offset += chunk.length;
986
+ }
987
+ return result;
988
+ } finally {
989
+ reader.releaseLock();
408
990
  }
409
- async load(folderPath) {
410
- const manifestPath = join(folderPath, "resource.json");
411
- let manifestJson;
991
+ }
992
+ var drain = (stream) => stream.pipeTo(new WritableStream);
993
+ function createTarDecoder(options = {}) {
994
+ const unpacker = createUnpacker(options);
995
+ let bodyController = null;
996
+ let pumping = false;
997
+ const pump = (controller) => {
998
+ if (pumping)
999
+ return;
1000
+ pumping = true;
412
1001
  try {
413
- manifestJson = await readFile(manifestPath, "utf-8");
1002
+ while (true)
1003
+ if (unpacker.isEntryActive()) {
1004
+ if (bodyController) {
1005
+ if (unpacker.streamBody((c) => (bodyController.enqueue(c), true)) === 0 && !unpacker.isBodyComplete())
1006
+ break;
1007
+ } else if (!unpacker.skipEntry())
1008
+ break;
1009
+ if (unpacker.isBodyComplete()) {
1010
+ try {
1011
+ bodyController?.close();
1012
+ } catch {}
1013
+ bodyController = null;
1014
+ if (!unpacker.skipPadding())
1015
+ break;
1016
+ }
1017
+ } else {
1018
+ const header = unpacker.readHeader();
1019
+ if (header === null || header === undefined)
1020
+ break;
1021
+ controller.enqueue({
1022
+ header,
1023
+ body: new ReadableStream({
1024
+ start(c) {
1025
+ if (header.size === 0)
1026
+ c.close();
1027
+ else
1028
+ bodyController = c;
1029
+ },
1030
+ pull: () => pump(controller),
1031
+ cancel() {
1032
+ bodyController = null;
1033
+ pump(controller);
1034
+ }
1035
+ })
1036
+ });
1037
+ }
414
1038
  } catch (error) {
415
- throw new ResourceXError(`Failed to read resource.json: ${error instanceof Error ? error.message : String(error)}`);
1039
+ try {
1040
+ bodyController?.error(error);
1041
+ } catch {}
1042
+ bodyController = null;
1043
+ throw error;
1044
+ } finally {
1045
+ pumping = false;
1046
+ }
1047
+ };
1048
+ return new TransformStream({
1049
+ transform(chunk, controller) {
1050
+ try {
1051
+ unpacker.write(chunk);
1052
+ pump(controller);
1053
+ } catch (error) {
1054
+ try {
1055
+ bodyController?.error(error);
1056
+ } catch {}
1057
+ throw error;
1058
+ }
1059
+ },
1060
+ flush(controller) {
1061
+ try {
1062
+ unpacker.end();
1063
+ pump(controller);
1064
+ unpacker.validateEOF();
1065
+ if (unpacker.isEntryActive() && !unpacker.isBodyComplete())
1066
+ try {
1067
+ bodyController?.close();
1068
+ } catch {}
1069
+ } catch (error) {
1070
+ try {
1071
+ bodyController?.error(error);
1072
+ } catch {}
1073
+ throw error;
1074
+ }
1075
+ }
1076
+ }, undefined, { highWaterMark: 1 });
1077
+ }
1078
+ async function packTar(entries) {
1079
+ const { readable, controller } = createTarPacker2();
1080
+ await (async () => {
1081
+ for (const entry of entries) {
1082
+ const entryStream = controller.add(entry.header);
1083
+ const body = "body" in entry ? entry.body : entry.data;
1084
+ if (!body) {
1085
+ await entryStream.close();
1086
+ continue;
1087
+ }
1088
+ if (body instanceof ReadableStream)
1089
+ await body.pipeTo(entryStream);
1090
+ else if (body instanceof Blob)
1091
+ await body.stream().pipeTo(entryStream);
1092
+ else
1093
+ try {
1094
+ const chunk = await normalizeBody(body);
1095
+ if (chunk.length > 0) {
1096
+ const writer = entryStream.getWriter();
1097
+ await writer.write(chunk);
1098
+ await writer.close();
1099
+ } else
1100
+ await entryStream.close();
1101
+ } catch {
1102
+ throw new TypeError(`Unsupported content type for entry "${entry.header.name}".`);
1103
+ }
416
1104
  }
417
- let manifestData;
1105
+ })().then(() => controller.finalize()).catch((err) => controller.error(err));
1106
+ return new Uint8Array(await streamToBuffer(readable));
1107
+ }
1108
+ async function unpackTar(archive, options = {}) {
1109
+ const sourceStream = archive instanceof ReadableStream ? archive : new ReadableStream({ start(controller) {
1110
+ controller.enqueue(archive instanceof Uint8Array ? archive : new Uint8Array(archive));
1111
+ controller.close();
1112
+ } });
1113
+ const results = [];
1114
+ const entryStream = sourceStream.pipeThrough(createTarDecoder(options));
1115
+ for await (const entry of entryStream) {
1116
+ let processedHeader;
418
1117
  try {
419
- manifestData = JSON.parse(manifestJson);
1118
+ processedHeader = transformHeader(entry.header, options);
420
1119
  } catch (error) {
421
- throw new ResourceXError(`Invalid JSON in resource.json: ${error instanceof Error ? error.message : String(error)}`);
1120
+ await entry.body.cancel();
1121
+ throw error;
422
1122
  }
423
- if (!manifestData.name) {
424
- throw new ResourceXError("Invalid resource.json: missing required field 'name'");
1123
+ if (processedHeader === null) {
1124
+ await drain(entry.body);
1125
+ continue;
425
1126
  }
426
- if (!manifestData.type) {
427
- throw new ResourceXError("Invalid resource.json: missing required field 'type'");
1127
+ if (isBodyless(processedHeader)) {
1128
+ await drain(entry.body);
1129
+ results.push({ header: processedHeader });
1130
+ } else
1131
+ results.push({
1132
+ header: processedHeader,
1133
+ data: await streamToBuffer(entry.body)
1134
+ });
1135
+ }
1136
+ return results;
1137
+ }
1138
+
1139
+ // src/content/createRXC.ts
1140
+ var gzipAsync = promisify(gzip);
1141
+ var gunzipAsync = promisify(gunzip);
1142
+
1143
+ class RXCImpl {
1144
+ _buffer;
1145
+ _filesCache = null;
1146
+ constructor(buffer) {
1147
+ this._buffer = buffer;
1148
+ }
1149
+ get stream() {
1150
+ const buffer = this._buffer;
1151
+ return new ReadableStream({
1152
+ start(controller) {
1153
+ controller.enqueue(new Uint8Array(buffer));
1154
+ controller.close();
1155
+ }
1156
+ });
1157
+ }
1158
+ async buffer() {
1159
+ return this._buffer;
1160
+ }
1161
+ async file(path) {
1162
+ const filesMap = await this.files();
1163
+ const content = filesMap.get(path);
1164
+ if (!content) {
1165
+ throw new ContentError(`file not found: ${path}`);
428
1166
  }
429
- if (!manifestData.version) {
430
- throw new ResourceXError("Invalid resource.json: missing required field 'version'");
1167
+ return content;
1168
+ }
1169
+ async files() {
1170
+ if (this._filesCache) {
1171
+ return this._filesCache;
431
1172
  }
432
- const manifest = createRXM({
433
- domain: manifestData.domain ?? "localhost",
434
- path: manifestData.path,
435
- name: manifestData.name,
436
- type: manifestData.type,
437
- version: manifestData.version
438
- });
439
- const contentPath = join(folderPath, "content");
440
- let contentBuffer;
441
- try {
442
- contentBuffer = await readFile(contentPath);
443
- } catch (error) {
444
- throw new ResourceXError(`Failed to read content file: ${error instanceof Error ? error.message : String(error)}`);
1173
+ const tarBuffer = await gunzipAsync(this._buffer);
1174
+ const entries = await unpackTar(tarBuffer);
1175
+ const filesMap = new Map;
1176
+ for (const entry of entries) {
1177
+ if ((entry.header.type === "file" || entry.header.type === undefined) && entry.data) {
1178
+ filesMap.set(entry.header.name, Buffer.from(entry.data));
1179
+ }
445
1180
  }
446
- const content = createRXC(contentBuffer);
447
- const locator = parseRXL(manifest.toLocator());
448
- return {
449
- locator,
450
- manifest,
451
- content
452
- };
1181
+ this._filesCache = filesMap;
1182
+ return filesMap;
453
1183
  }
454
1184
  }
455
- // src/resource/loadResource.ts
456
- async function loadResource(source, config) {
457
- const loader = config?.loader ?? new FolderLoader;
458
- const canLoad = await loader.canLoad(source);
459
- if (!canLoad) {
460
- throw new ResourceXError(`Cannot load resource from: ${source}`);
1185
+ function isArchiveInput(input) {
1186
+ return "archive" in input && Buffer.isBuffer(input.archive);
1187
+ }
1188
+ async function createRXC(input) {
1189
+ if (isArchiveInput(input)) {
1190
+ return new RXCImpl(input.archive);
461
1191
  }
462
- return loader.load(source);
1192
+ const entries = Object.entries(input).map(([name, content]) => {
1193
+ const body = typeof content === "string" ? content : content instanceof Uint8Array ? content : new Uint8Array(content);
1194
+ const size = typeof content === "string" ? Buffer.byteLength(content) : content.length;
1195
+ return {
1196
+ header: { name, size, type: "file" },
1197
+ body
1198
+ };
1199
+ });
1200
+ const tarBuffer = await packTar(entries);
1201
+ const gzipBuffer = await gzipAsync(Buffer.from(tarBuffer));
1202
+ return new RXCImpl(gzipBuffer);
463
1203
  }
464
1204
  export {
465
- textType,
466
1205
  parseRXL,
467
- loadResource,
468
- loadRXC,
469
- jsonType,
470
- getResourceType,
471
- defineResourceType,
472
- createTypeHandlerChain,
473
1206
  createRXM,
474
1207
  createRXC,
475
- clearResourceTypes,
476
- builtinTypes,
477
- binaryType,
478
- TypeHandlerChain,
479
1208
  ResourceXError,
480
- ResourceTypeError,
481
1209
  ManifestError,
482
1210
  LocatorError,
483
- FolderLoader,
484
1211
  ContentError
485
1212
  };
486
1213
 
487
- //# debugId=9516A09E80942F2964756E2164756E21
1214
+ //# debugId=3D46BAF324E7BC5864756E2164756E21