@resourcexjs/registry 1.0.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,4 +1,7 @@
1
1
  // ../core/dist/index.js
2
+ import { gzip, gunzip } from "node:zlib";
3
+ import { promisify } from "node:util";
4
+
2
5
  class ResourceXError extends Error {
3
6
  constructor(message, options) {
4
7
  super(message, options);
@@ -137,6 +140,14 @@ function createRXM(data) {
137
140
  version: data.version
138
141
  });
139
142
  }
143
+ var BLOCK_SIZE = 512;
144
+ var ZERO_BLOCK = new Uint8Array(BLOCK_SIZE);
145
+ var EMPTY = new Uint8Array(0);
146
+ var encoder = new TextEncoder;
147
+ var decoder = new TextDecoder;
148
+ var EOF_BUFFER = new Uint8Array(BLOCK_SIZE * 2);
149
+ var gzipAsync = promisify(gzip);
150
+ var gunzipAsync = promisify(gunzip);
140
151
 
141
152
  // src/errors.ts
142
153
  class RegistryError extends ResourceXError {
@@ -149,6 +160,9 @@ class RegistryError extends ResourceXError {
149
160
  import { homedir } from "node:os";
150
161
 
151
162
  // ../type/dist/index.js
163
+ import { gzip as gzip2, gunzip as gunzip2 } from "node:zlib";
164
+ import { promisify as promisify2 } from "node:util";
165
+
152
166
  class ResourceXError2 extends Error {
153
167
  constructor(message, options) {
154
168
  super(message, options);
@@ -231,65 +245,1042 @@ function parseRXL2(locator) {
231
245
  }
232
246
  return new RXLImpl2({ domain, path, name, type, version });
233
247
  }
234
-
235
- class RXCImpl {
236
- _stream;
237
- _consumed = false;
238
- constructor(stream) {
239
- this._stream = stream;
248
+ var BLOCK_SIZE2 = 512;
249
+ var BLOCK_SIZE_MASK = 511;
250
+ var DEFAULT_FILE_MODE = 420;
251
+ var DEFAULT_DIR_MODE = 493;
252
+ var USTAR_NAME_OFFSET = 0;
253
+ var USTAR_NAME_SIZE = 100;
254
+ var USTAR_MODE_OFFSET = 100;
255
+ var USTAR_MODE_SIZE = 8;
256
+ var USTAR_UID_OFFSET = 108;
257
+ var USTAR_UID_SIZE = 8;
258
+ var USTAR_GID_OFFSET = 116;
259
+ var USTAR_GID_SIZE = 8;
260
+ var USTAR_SIZE_OFFSET = 124;
261
+ var USTAR_SIZE_SIZE = 12;
262
+ var USTAR_MTIME_OFFSET = 136;
263
+ var USTAR_MTIME_SIZE = 12;
264
+ var USTAR_CHECKSUM_OFFSET = 148;
265
+ var USTAR_CHECKSUM_SIZE = 8;
266
+ var USTAR_TYPEFLAG_OFFSET = 156;
267
+ var USTAR_TYPEFLAG_SIZE = 1;
268
+ var USTAR_LINKNAME_OFFSET = 157;
269
+ var USTAR_LINKNAME_SIZE = 100;
270
+ var USTAR_MAGIC_OFFSET = 257;
271
+ var USTAR_MAGIC_SIZE = 6;
272
+ var USTAR_VERSION_OFFSET = 263;
273
+ var USTAR_VERSION_SIZE = 2;
274
+ var USTAR_UNAME_OFFSET = 265;
275
+ var USTAR_UNAME_SIZE = 32;
276
+ var USTAR_GNAME_OFFSET = 297;
277
+ var USTAR_GNAME_SIZE = 32;
278
+ var USTAR_PREFIX_OFFSET = 345;
279
+ var USTAR_PREFIX_SIZE = 155;
280
+ var USTAR_VERSION = "00";
281
+ var USTAR_MAX_UID_GID = 2097151;
282
+ var USTAR_MAX_SIZE = 8589934591;
283
+ var FILE = "file";
284
+ var LINK = "link";
285
+ var SYMLINK = "symlink";
286
+ var DIRECTORY = "directory";
287
+ var TYPEFLAG = {
288
+ file: "0",
289
+ link: "1",
290
+ symlink: "2",
291
+ "character-device": "3",
292
+ "block-device": "4",
293
+ directory: "5",
294
+ fifo: "6",
295
+ "pax-header": "x",
296
+ "pax-global-header": "g",
297
+ "gnu-long-name": "L",
298
+ "gnu-long-link-name": "K"
299
+ };
300
+ var FLAGTYPE = {
301
+ "0": FILE,
302
+ "1": LINK,
303
+ "2": SYMLINK,
304
+ "3": "character-device",
305
+ "4": "block-device",
306
+ "5": DIRECTORY,
307
+ "6": "fifo",
308
+ x: "pax-header",
309
+ g: "pax-global-header",
310
+ L: "gnu-long-name",
311
+ K: "gnu-long-link-name"
312
+ };
313
+ var ZERO_BLOCK2 = new Uint8Array(BLOCK_SIZE2);
314
+ var EMPTY2 = new Uint8Array(0);
315
+ var encoder2 = new TextEncoder;
316
+ var decoder2 = new TextDecoder;
317
+ function writeString(view, offset, size, value) {
318
+ if (value)
319
+ encoder2.encodeInto(value, view.subarray(offset, offset + size));
320
+ }
321
+ function writeOctal(view, offset, size, value) {
322
+ if (value === undefined)
323
+ return;
324
+ const octalString = value.toString(8).padStart(size - 1, "0");
325
+ encoder2.encodeInto(octalString, view.subarray(offset, offset + size - 1));
326
+ }
327
+ function readString(view, offset, size) {
328
+ const end = view.indexOf(0, offset);
329
+ const sliceEnd = end === -1 || end > offset + size ? offset + size : end;
330
+ return decoder2.decode(view.subarray(offset, sliceEnd));
331
+ }
332
+ function readOctal(view, offset, size) {
333
+ let value = 0;
334
+ const end = offset + size;
335
+ for (let i = offset;i < end; i++) {
336
+ const charCode = view[i];
337
+ if (charCode === 0)
338
+ break;
339
+ if (charCode === 32)
340
+ continue;
341
+ value = value * 8 + (charCode - 48);
342
+ }
343
+ return value;
344
+ }
345
+ function readNumeric(view, offset, size) {
346
+ if (view[offset] & 128) {
347
+ let result = 0;
348
+ result = view[offset] & 127;
349
+ for (let i = 1;i < size; i++)
350
+ result = result * 256 + view[offset + i];
351
+ if (!Number.isSafeInteger(result))
352
+ throw new Error("TAR number too large");
353
+ return result;
240
354
  }
241
- get stream() {
242
- if (this._consumed) {
243
- throw new ContentError("Content has already been consumed");
355
+ return readOctal(view, offset, size);
356
+ }
357
+ var isBodyless = (header) => header.type === DIRECTORY || header.type === SYMLINK || header.type === LINK;
358
+ async function normalizeBody(body) {
359
+ if (body === null || body === undefined)
360
+ return EMPTY2;
361
+ if (body instanceof Uint8Array)
362
+ return body;
363
+ if (typeof body === "string")
364
+ return encoder2.encode(body);
365
+ if (body instanceof ArrayBuffer)
366
+ return new Uint8Array(body);
367
+ if (body instanceof Blob)
368
+ return new Uint8Array(await body.arrayBuffer());
369
+ throw new TypeError("Unsupported content type for entry body.");
370
+ }
371
+ function transformHeader(header, options) {
372
+ const { strip, filter, map } = options;
373
+ if (!strip && !filter && !map)
374
+ return header;
375
+ const h = { ...header };
376
+ if (strip && strip > 0) {
377
+ const components = h.name.split("/").filter(Boolean);
378
+ if (strip >= components.length)
379
+ return null;
380
+ const newName = components.slice(strip).join("/");
381
+ h.name = h.type === DIRECTORY && !newName.endsWith("/") ? `${newName}/` : newName;
382
+ if (h.linkname?.startsWith("/")) {
383
+ const linkComponents = h.linkname.split("/").filter(Boolean);
384
+ h.linkname = strip >= linkComponents.length ? "/" : `/${linkComponents.slice(strip).join("/")}`;
244
385
  }
245
- this._consumed = true;
246
- return this._stream;
247
386
  }
248
- async text() {
249
- const buffer = await this.buffer();
250
- return buffer.toString("utf-8");
387
+ if (filter?.(h) === false)
388
+ return null;
389
+ const result = map ? map(h) : h;
390
+ if (result && (!result.name || !result.name.trim() || result.name === "." || result.name === "/"))
391
+ return null;
392
+ return result;
393
+ }
394
+ var CHECKSUM_SPACE = 32;
395
+ var ASCII_ZERO = 48;
396
+ function validateChecksum(block) {
397
+ const stored = readOctal(block, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_SIZE);
398
+ let sum = 0;
399
+ for (let i = 0;i < block.length; i++)
400
+ if (i >= USTAR_CHECKSUM_OFFSET && i < USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE)
401
+ sum += CHECKSUM_SPACE;
402
+ else
403
+ sum += block[i];
404
+ return stored === sum;
405
+ }
406
+ function writeChecksum(block) {
407
+ block.fill(CHECKSUM_SPACE, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE);
408
+ let checksum = 0;
409
+ for (const byte of block)
410
+ checksum += byte;
411
+ for (let i = USTAR_CHECKSUM_OFFSET + 6 - 1;i >= USTAR_CHECKSUM_OFFSET; i--) {
412
+ block[i] = (checksum & 7) + ASCII_ZERO;
413
+ checksum >>= 3;
414
+ }
415
+ block[USTAR_CHECKSUM_OFFSET + 6] = 0;
416
+ block[USTAR_CHECKSUM_OFFSET + 7] = CHECKSUM_SPACE;
417
+ }
418
+ function generatePax(header) {
419
+ const paxRecords = {};
420
+ if (header.name.length > USTAR_NAME_SIZE) {
421
+ if (findUstarSplit(header.name) === null)
422
+ paxRecords.path = header.name;
423
+ }
424
+ if (header.linkname && header.linkname.length > USTAR_NAME_SIZE)
425
+ paxRecords.linkpath = header.linkname;
426
+ if (header.uname && header.uname.length > USTAR_UNAME_SIZE)
427
+ paxRecords.uname = header.uname;
428
+ if (header.gname && header.gname.length > USTAR_GNAME_SIZE)
429
+ paxRecords.gname = header.gname;
430
+ if (header.uid != null && header.uid > USTAR_MAX_UID_GID)
431
+ paxRecords.uid = String(header.uid);
432
+ if (header.gid != null && header.gid > USTAR_MAX_UID_GID)
433
+ paxRecords.gid = String(header.gid);
434
+ if (header.size != null && header.size > USTAR_MAX_SIZE)
435
+ paxRecords.size = String(header.size);
436
+ if (header.pax)
437
+ Object.assign(paxRecords, header.pax);
438
+ const paxEntries = Object.entries(paxRecords);
439
+ if (paxEntries.length === 0)
440
+ return null;
441
+ const paxBody = encoder2.encode(paxEntries.map(([key, value]) => {
442
+ const record = `${key}=${value}
443
+ `;
444
+ const partLength = encoder2.encode(record).length + 1;
445
+ let totalLength = partLength + String(partLength).length;
446
+ totalLength = partLength + String(totalLength).length;
447
+ return `${totalLength} ${record}`;
448
+ }).join(""));
449
+ return {
450
+ paxHeader: createTarHeader({
451
+ name: decoder2.decode(encoder2.encode(`PaxHeader/${header.name}`).slice(0, 100)),
452
+ size: paxBody.length,
453
+ type: "pax-header",
454
+ mode: 420,
455
+ mtime: header.mtime,
456
+ uname: header.uname,
457
+ gname: header.gname,
458
+ uid: header.uid,
459
+ gid: header.gid
460
+ }),
461
+ paxBody
462
+ };
463
+ }
464
+ function findUstarSplit(path) {
465
+ if (path.length <= USTAR_NAME_SIZE)
466
+ return null;
467
+ const minSlashIndex = path.length - USTAR_NAME_SIZE - 1;
468
+ const slashIndex = path.lastIndexOf("/", USTAR_PREFIX_SIZE);
469
+ if (slashIndex > 0 && slashIndex >= minSlashIndex)
470
+ return {
471
+ prefix: path.slice(0, slashIndex),
472
+ name: path.slice(slashIndex + 1)
473
+ };
474
+ return null;
475
+ }
476
+ function createTarHeader(header) {
477
+ const view = new Uint8Array(BLOCK_SIZE2);
478
+ const size = isBodyless(header) ? 0 : header.size ?? 0;
479
+ let name = header.name;
480
+ let prefix = "";
481
+ if (!header.pax?.path) {
482
+ const split = findUstarSplit(name);
483
+ if (split) {
484
+ name = split.name;
485
+ prefix = split.prefix;
486
+ }
251
487
  }
252
- async buffer() {
253
- if (this._consumed) {
254
- throw new ContentError("Content has already been consumed");
488
+ writeString(view, USTAR_NAME_OFFSET, USTAR_NAME_SIZE, name);
489
+ writeOctal(view, USTAR_MODE_OFFSET, USTAR_MODE_SIZE, header.mode ?? (header.type === DIRECTORY ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));
490
+ writeOctal(view, USTAR_UID_OFFSET, USTAR_UID_SIZE, header.uid ?? 0);
491
+ writeOctal(view, USTAR_GID_OFFSET, USTAR_GID_SIZE, header.gid ?? 0);
492
+ writeOctal(view, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE, size);
493
+ writeOctal(view, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1000));
494
+ writeString(view, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE, TYPEFLAG[header.type ?? FILE]);
495
+ writeString(view, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE, header.linkname);
496
+ writeString(view, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE, "ustar\x00");
497
+ writeString(view, USTAR_VERSION_OFFSET, USTAR_VERSION_SIZE, USTAR_VERSION);
498
+ writeString(view, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE, header.uname);
499
+ writeString(view, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE, header.gname);
500
+ writeString(view, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE, prefix);
501
+ writeChecksum(view);
502
+ return view;
503
+ }
504
+ function parseUstarHeader(block, strict) {
505
+ if (strict && !validateChecksum(block))
506
+ throw new Error("Invalid tar header checksum.");
507
+ const typeflag = readString(block, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE);
508
+ const header = {
509
+ name: readString(block, USTAR_NAME_OFFSET, USTAR_NAME_SIZE),
510
+ mode: readOctal(block, USTAR_MODE_OFFSET, USTAR_MODE_SIZE),
511
+ uid: readNumeric(block, USTAR_UID_OFFSET, USTAR_UID_SIZE),
512
+ gid: readNumeric(block, USTAR_GID_OFFSET, USTAR_GID_SIZE),
513
+ size: readNumeric(block, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE),
514
+ mtime: /* @__PURE__ */ new Date(readNumeric(block, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE) * 1000),
515
+ type: FLAGTYPE[typeflag] || FILE,
516
+ linkname: readString(block, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE)
517
+ };
518
+ const magic = readString(block, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE);
519
+ if (magic.trim() === "ustar") {
520
+ header.uname = readString(block, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE);
521
+ header.gname = readString(block, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE);
522
+ }
523
+ if (magic === "ustar")
524
+ header.prefix = readString(block, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE);
525
+ return header;
526
+ }
527
+ var PAX_MAPPING = {
528
+ path: ["name", (v) => v],
529
+ linkpath: ["linkname", (v) => v],
530
+ size: ["size", (v) => parseInt(v, 10)],
531
+ mtime: ["mtime", parseFloat],
532
+ uid: ["uid", (v) => parseInt(v, 10)],
533
+ gid: ["gid", (v) => parseInt(v, 10)],
534
+ uname: ["uname", (v) => v],
535
+ gname: ["gname", (v) => v]
536
+ };
537
+ function parsePax(buffer) {
538
+ const decoder$1 = new TextDecoder("utf-8");
539
+ const overrides = {};
540
+ const pax = {};
541
+ let offset = 0;
542
+ while (offset < buffer.length) {
543
+ const spaceIndex = buffer.indexOf(32, offset);
544
+ if (spaceIndex === -1)
545
+ break;
546
+ const length = parseInt(decoder$1.decode(buffer.subarray(offset, spaceIndex)), 10);
547
+ if (Number.isNaN(length) || length === 0)
548
+ break;
549
+ const recordEnd = offset + length;
550
+ const [key, value] = decoder$1.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
551
+ if (key && value !== undefined) {
552
+ pax[key] = value;
553
+ const mapping = PAX_MAPPING[key];
554
+ if (mapping) {
555
+ const [targetKey, parser] = mapping;
556
+ const parsedValue = parser(value);
557
+ if (typeof parsedValue === "string" || !Number.isNaN(parsedValue))
558
+ overrides[targetKey] = parsedValue;
559
+ }
560
+ }
561
+ offset = recordEnd;
562
+ }
563
+ if (Object.keys(pax).length > 0)
564
+ overrides.pax = pax;
565
+ return overrides;
566
+ }
567
+ function applyOverrides(header, overrides) {
568
+ if (overrides.name !== undefined)
569
+ header.name = overrides.name;
570
+ if (overrides.linkname !== undefined)
571
+ header.linkname = overrides.linkname;
572
+ if (overrides.size !== undefined)
573
+ header.size = overrides.size;
574
+ if (overrides.mtime !== undefined)
575
+ header.mtime = /* @__PURE__ */ new Date(overrides.mtime * 1000);
576
+ if (overrides.uid !== undefined)
577
+ header.uid = overrides.uid;
578
+ if (overrides.gid !== undefined)
579
+ header.gid = overrides.gid;
580
+ if (overrides.uname !== undefined)
581
+ header.uname = overrides.uname;
582
+ if (overrides.gname !== undefined)
583
+ header.gname = overrides.gname;
584
+ if (overrides.pax)
585
+ header.pax = Object.assign({}, header.pax ?? {}, overrides.pax);
586
+ }
587
+ function getMetaParser(type) {
588
+ switch (type) {
589
+ case "pax-global-header":
590
+ case "pax-header":
591
+ return parsePax;
592
+ case "gnu-long-name":
593
+ return (data) => ({ name: readString(data, 0, data.length) });
594
+ case "gnu-long-link-name":
595
+ return (data) => ({ linkname: readString(data, 0, data.length) });
596
+ default:
597
+ return;
598
+ }
599
+ }
600
+ function getHeaderBlocks(header) {
601
+ const base = createTarHeader(header);
602
+ const pax = generatePax(header);
603
+ if (!pax)
604
+ return [base];
605
+ const paxPadding = -pax.paxBody.length & BLOCK_SIZE_MASK;
606
+ const paddingBlocks = paxPadding > 0 ? [ZERO_BLOCK2.subarray(0, paxPadding)] : [];
607
+ return [
608
+ pax.paxHeader,
609
+ pax.paxBody,
610
+ ...paddingBlocks,
611
+ base
612
+ ];
613
+ }
614
+ var EOF_BUFFER2 = new Uint8Array(BLOCK_SIZE2 * 2);
615
+ function createTarPacker(onData, onError, onFinalize) {
616
+ let currentHeader = null;
617
+ let bytesWritten = 0;
618
+ let finalized = false;
619
+ return {
620
+ add(header) {
621
+ if (finalized) {
622
+ const error = /* @__PURE__ */ new Error("No new tar entries after finalize.");
623
+ onError(error);
624
+ throw error;
625
+ }
626
+ if (currentHeader !== null) {
627
+ const error = /* @__PURE__ */ new Error("Previous entry must be completed before adding a new one");
628
+ onError(error);
629
+ throw error;
630
+ }
631
+ try {
632
+ const size = isBodyless(header) ? 0 : header.size ?? 0;
633
+ const headerBlocks = getHeaderBlocks({
634
+ ...header,
635
+ size
636
+ });
637
+ for (const block of headerBlocks)
638
+ onData(block);
639
+ currentHeader = {
640
+ ...header,
641
+ size
642
+ };
643
+ bytesWritten = 0;
644
+ } catch (error) {
645
+ onError(error);
646
+ }
647
+ },
648
+ write(chunk) {
649
+ if (!currentHeader) {
650
+ const error = /* @__PURE__ */ new Error("No active tar entry.");
651
+ onError(error);
652
+ throw error;
653
+ }
654
+ if (finalized) {
655
+ const error = /* @__PURE__ */ new Error("Cannot write data after finalize.");
656
+ onError(error);
657
+ throw error;
658
+ }
659
+ const newTotal = bytesWritten + chunk.length;
660
+ if (newTotal > currentHeader.size) {
661
+ const error = /* @__PURE__ */ new Error(`"${currentHeader.name}" exceeds given size of ${currentHeader.size} bytes.`);
662
+ onError(error);
663
+ throw error;
664
+ }
665
+ try {
666
+ bytesWritten = newTotal;
667
+ onData(chunk);
668
+ } catch (error) {
669
+ onError(error);
670
+ }
671
+ },
672
+ endEntry() {
673
+ if (!currentHeader) {
674
+ const error = /* @__PURE__ */ new Error("No active entry to end.");
675
+ onError(error);
676
+ throw error;
677
+ }
678
+ if (finalized) {
679
+ const error = /* @__PURE__ */ new Error("Cannot end entry after finalize.");
680
+ onError(error);
681
+ throw error;
682
+ }
683
+ try {
684
+ if (bytesWritten !== currentHeader.size) {
685
+ const error = /* @__PURE__ */ new Error(`Size mismatch for "${currentHeader.name}".`);
686
+ onError(error);
687
+ throw error;
688
+ }
689
+ const paddingSize = -currentHeader.size & BLOCK_SIZE_MASK;
690
+ if (paddingSize > 0)
691
+ onData(new Uint8Array(paddingSize));
692
+ currentHeader = null;
693
+ bytesWritten = 0;
694
+ } catch (error) {
695
+ onError(error);
696
+ throw error;
697
+ }
698
+ },
699
+ finalize() {
700
+ if (finalized) {
701
+ const error = /* @__PURE__ */ new Error("Archive has already been finalized");
702
+ onError(error);
703
+ throw error;
704
+ }
705
+ if (currentHeader !== null) {
706
+ const error = /* @__PURE__ */ new Error("Cannot finalize while an entry is still active");
707
+ onError(error);
708
+ throw error;
709
+ }
710
+ try {
711
+ onData(EOF_BUFFER2);
712
+ finalized = true;
713
+ if (onFinalize)
714
+ onFinalize();
715
+ } catch (error) {
716
+ onError(error);
717
+ }
718
+ }
719
+ };
720
+ }
721
+ var INITIAL_CAPACITY = 256;
722
+ function createChunkQueue() {
723
+ let chunks = new Array(INITIAL_CAPACITY);
724
+ let capacityMask = chunks.length - 1;
725
+ let head = 0;
726
+ let tail = 0;
727
+ let totalAvailable = 0;
728
+ const consumeFromHead = (count) => {
729
+ const chunk = chunks[head];
730
+ if (count === chunk.length) {
731
+ chunks[head] = EMPTY2;
732
+ head = head + 1 & capacityMask;
733
+ } else
734
+ chunks[head] = chunk.subarray(count);
735
+ totalAvailable -= count;
736
+ if (totalAvailable === 0 && chunks.length > INITIAL_CAPACITY) {
737
+ chunks = new Array(INITIAL_CAPACITY);
738
+ capacityMask = INITIAL_CAPACITY - 1;
739
+ head = 0;
740
+ tail = 0;
741
+ }
742
+ };
743
+ function pull(bytes, callback) {
744
+ if (callback) {
745
+ let fed = 0;
746
+ let remaining$1 = Math.min(bytes, totalAvailable);
747
+ while (remaining$1 > 0) {
748
+ const chunk = chunks[head];
749
+ const toFeed = Math.min(remaining$1, chunk.length);
750
+ const segment = toFeed === chunk.length ? chunk : chunk.subarray(0, toFeed);
751
+ consumeFromHead(toFeed);
752
+ remaining$1 -= toFeed;
753
+ fed += toFeed;
754
+ if (!callback(segment))
755
+ break;
756
+ }
757
+ return fed;
758
+ }
759
+ if (totalAvailable < bytes)
760
+ return null;
761
+ if (bytes === 0)
762
+ return EMPTY2;
763
+ const firstChunk = chunks[head];
764
+ if (firstChunk.length >= bytes) {
765
+ const view = firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);
766
+ consumeFromHead(bytes);
767
+ return view;
768
+ }
769
+ const result = new Uint8Array(bytes);
770
+ let copied = 0;
771
+ let remaining = bytes;
772
+ while (remaining > 0) {
773
+ const chunk = chunks[head];
774
+ const toCopy = Math.min(remaining, chunk.length);
775
+ result.set(toCopy === chunk.length ? chunk : chunk.subarray(0, toCopy), copied);
776
+ copied += toCopy;
777
+ remaining -= toCopy;
778
+ consumeFromHead(toCopy);
779
+ }
780
+ return result;
781
+ }
782
+ return {
783
+ push: (chunk) => {
784
+ if (chunk.length === 0)
785
+ return;
786
+ let nextTail = tail + 1 & capacityMask;
787
+ if (nextTail === head) {
788
+ const oldLen = chunks.length;
789
+ const newLen = oldLen * 2;
790
+ const newChunks = new Array(newLen);
791
+ const count = tail - head + oldLen & oldLen - 1;
792
+ if (head < tail)
793
+ for (let i = 0;i < count; i++)
794
+ newChunks[i] = chunks[head + i];
795
+ else if (count > 0) {
796
+ const firstPart = oldLen - head;
797
+ for (let i = 0;i < firstPart; i++)
798
+ newChunks[i] = chunks[head + i];
799
+ for (let i = 0;i < tail; i++)
800
+ newChunks[firstPart + i] = chunks[i];
801
+ }
802
+ chunks = newChunks;
803
+ capacityMask = newLen - 1;
804
+ head = 0;
805
+ tail = count;
806
+ nextTail = tail + 1 & capacityMask;
807
+ }
808
+ chunks[tail] = chunk;
809
+ tail = nextTail;
810
+ totalAvailable += chunk.length;
811
+ },
812
+ available: () => totalAvailable,
813
+ peek: (bytes) => {
814
+ if (totalAvailable < bytes)
815
+ return null;
816
+ if (bytes === 0)
817
+ return EMPTY2;
818
+ const firstChunk = chunks[head];
819
+ if (firstChunk.length >= bytes)
820
+ return firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);
821
+ const result = new Uint8Array(bytes);
822
+ let copied = 0;
823
+ let index = head;
824
+ while (copied < bytes) {
825
+ const chunk = chunks[index];
826
+ const toCopy = Math.min(bytes - copied, chunk.length);
827
+ if (toCopy === chunk.length)
828
+ result.set(chunk, copied);
829
+ else
830
+ result.set(chunk.subarray(0, toCopy), copied);
831
+ copied += toCopy;
832
+ index = index + 1 & capacityMask;
833
+ }
834
+ return result;
835
+ },
836
+ discard: (bytes) => {
837
+ if (bytes > totalAvailable)
838
+ throw new Error("Too many bytes consumed");
839
+ if (bytes === 0)
840
+ return;
841
+ let remaining = bytes;
842
+ while (remaining > 0) {
843
+ const chunk = chunks[head];
844
+ const toConsume = Math.min(remaining, chunk.length);
845
+ consumeFromHead(toConsume);
846
+ remaining -= toConsume;
847
+ }
848
+ },
849
+ pull
850
+ };
851
+ }
852
+ var STATE_HEADER = 0;
853
+ var STATE_BODY = 1;
854
+ var truncateErr = /* @__PURE__ */ new Error("Tar archive is truncated.");
855
+ function createUnpacker(options = {}) {
856
+ const strict = options.strict ?? false;
857
+ const { available, peek, push, discard, pull } = createChunkQueue();
858
+ let state = STATE_HEADER;
859
+ let ended = false;
860
+ let done = false;
861
+ let eof = false;
862
+ let currentEntry = null;
863
+ const paxGlobals = {};
864
+ let nextEntryOverrides = {};
865
+ const unpacker = {
866
+ isEntryActive: () => state === STATE_BODY,
867
+ isBodyComplete: () => !currentEntry || currentEntry.remaining === 0,
868
+ write(chunk) {
869
+ if (ended)
870
+ throw new Error("Archive already ended.");
871
+ push(chunk);
872
+ },
873
+ end() {
874
+ ended = true;
875
+ },
876
+ readHeader() {
877
+ if (state !== STATE_HEADER)
878
+ throw new Error("Cannot read header while an entry is active");
879
+ if (done)
880
+ return;
881
+ while (!done) {
882
+ if (available() < BLOCK_SIZE2) {
883
+ if (ended) {
884
+ if (available() > 0 && strict)
885
+ throw truncateErr;
886
+ done = true;
887
+ return;
888
+ }
889
+ return null;
890
+ }
891
+ const headerBlock = peek(BLOCK_SIZE2);
892
+ if (isZeroBlock(headerBlock)) {
893
+ if (available() < BLOCK_SIZE2 * 2) {
894
+ if (ended) {
895
+ if (strict)
896
+ throw truncateErr;
897
+ done = true;
898
+ return;
899
+ }
900
+ return null;
901
+ }
902
+ if (isZeroBlock(peek(BLOCK_SIZE2 * 2).subarray(BLOCK_SIZE2))) {
903
+ discard(BLOCK_SIZE2 * 2);
904
+ done = true;
905
+ eof = true;
906
+ return;
907
+ }
908
+ if (strict)
909
+ throw new Error("Invalid tar header.");
910
+ discard(BLOCK_SIZE2);
911
+ continue;
912
+ }
913
+ let internalHeader;
914
+ try {
915
+ internalHeader = parseUstarHeader(headerBlock, strict);
916
+ } catch (err) {
917
+ if (strict)
918
+ throw err;
919
+ discard(BLOCK_SIZE2);
920
+ continue;
921
+ }
922
+ const metaParser = getMetaParser(internalHeader.type);
923
+ if (metaParser) {
924
+ const paddedSize = internalHeader.size + BLOCK_SIZE_MASK & ~BLOCK_SIZE_MASK;
925
+ if (available() < BLOCK_SIZE2 + paddedSize) {
926
+ if (ended && strict)
927
+ throw truncateErr;
928
+ return null;
929
+ }
930
+ discard(BLOCK_SIZE2);
931
+ const overrides = metaParser(pull(paddedSize).subarray(0, internalHeader.size));
932
+ const target = internalHeader.type === "pax-global-header" ? paxGlobals : nextEntryOverrides;
933
+ for (const key in overrides)
934
+ target[key] = overrides[key];
935
+ continue;
936
+ }
937
+ discard(BLOCK_SIZE2);
938
+ const header = internalHeader;
939
+ if (internalHeader.prefix)
940
+ header.name = `${internalHeader.prefix}/${header.name}`;
941
+ applyOverrides(header, paxGlobals);
942
+ applyOverrides(header, nextEntryOverrides);
943
+ nextEntryOverrides = {};
944
+ currentEntry = {
945
+ header,
946
+ remaining: header.size,
947
+ padding: -header.size & BLOCK_SIZE_MASK
948
+ };
949
+ state = STATE_BODY;
950
+ return header;
951
+ }
952
+ },
953
+ streamBody(callback) {
954
+ if (state !== STATE_BODY || !currentEntry || currentEntry.remaining === 0)
955
+ return 0;
956
+ const bytesToFeed = Math.min(currentEntry.remaining, available());
957
+ if (bytesToFeed === 0)
958
+ return 0;
959
+ const fed = pull(bytesToFeed, callback);
960
+ currentEntry.remaining -= fed;
961
+ return fed;
962
+ },
963
+ skipPadding() {
964
+ if (state !== STATE_BODY || !currentEntry)
965
+ return true;
966
+ if (currentEntry.remaining > 0)
967
+ throw new Error("Body not fully consumed");
968
+ if (available() < currentEntry.padding)
969
+ return false;
970
+ discard(currentEntry.padding);
971
+ currentEntry = null;
972
+ state = STATE_HEADER;
973
+ return true;
974
+ },
975
+ skipEntry() {
976
+ if (state !== STATE_BODY || !currentEntry)
977
+ return true;
978
+ const toDiscard = Math.min(currentEntry.remaining, available());
979
+ if (toDiscard > 0) {
980
+ discard(toDiscard);
981
+ currentEntry.remaining -= toDiscard;
982
+ }
983
+ if (currentEntry.remaining > 0)
984
+ return false;
985
+ return unpacker.skipPadding();
986
+ },
987
+ validateEOF() {
988
+ if (strict) {
989
+ if (!eof)
990
+ throw truncateErr;
991
+ if (available() > 0) {
992
+ if (pull(available()).some((byte) => byte !== 0))
993
+ throw new Error("Invalid EOF.");
994
+ }
995
+ }
255
996
  }
256
- this._consumed = true;
257
- const reader = this._stream.getReader();
258
- const chunks = [];
997
+ };
998
+ return unpacker;
999
+ }
1000
+ function isZeroBlock(block) {
1001
+ if (block.byteOffset % 8 === 0) {
1002
+ const view = new BigUint64Array(block.buffer, block.byteOffset, block.length / 8);
1003
+ for (let i = 0;i < view.length; i++)
1004
+ if (view[i] !== 0n)
1005
+ return false;
1006
+ return true;
1007
+ }
1008
+ for (let i = 0;i < block.length; i++)
1009
+ if (block[i] !== 0)
1010
+ return false;
1011
+ return true;
1012
+ }
1013
+ function createTarPacker2() {
1014
+ let streamController;
1015
+ let packer;
1016
+ return {
1017
+ readable: new ReadableStream({ start(controller) {
1018
+ streamController = controller;
1019
+ packer = createTarPacker(controller.enqueue.bind(controller), controller.error.bind(controller), controller.close.bind(controller));
1020
+ } }),
1021
+ controller: {
1022
+ add(header) {
1023
+ const bodyless = isBodyless(header);
1024
+ const h = { ...header };
1025
+ if (bodyless)
1026
+ h.size = 0;
1027
+ packer.add(h);
1028
+ if (bodyless)
1029
+ packer.endEntry();
1030
+ return new WritableStream({
1031
+ write(chunk) {
1032
+ packer.write(chunk);
1033
+ },
1034
+ close() {
1035
+ if (!bodyless)
1036
+ packer.endEntry();
1037
+ },
1038
+ abort(reason) {
1039
+ streamController.error(reason);
1040
+ }
1041
+ });
1042
+ },
1043
+ finalize() {
1044
+ packer.finalize();
1045
+ },
1046
+ error(err) {
1047
+ streamController.error(err);
1048
+ }
1049
+ }
1050
+ };
1051
+ }
1052
+ async function streamToBuffer(stream) {
1053
+ const chunks = [];
1054
+ const reader = stream.getReader();
1055
+ let totalLength = 0;
1056
+ try {
259
1057
  while (true) {
260
1058
  const { done, value } = await reader.read();
261
1059
  if (done)
262
1060
  break;
263
1061
  chunks.push(value);
1062
+ totalLength += value.length;
1063
+ }
1064
+ const result = new Uint8Array(totalLength);
1065
+ let offset = 0;
1066
+ for (const chunk of chunks) {
1067
+ result.set(chunk, offset);
1068
+ offset += chunk.length;
264
1069
  }
265
- return Buffer.concat(chunks);
1070
+ return result;
1071
+ } finally {
1072
+ reader.releaseLock();
266
1073
  }
267
- async json() {
268
- const text = await this.text();
269
- return JSON.parse(text);
1074
+ }
1075
+ var drain = (stream) => stream.pipeTo(new WritableStream);
1076
+ function createTarDecoder(options = {}) {
1077
+ const unpacker = createUnpacker(options);
1078
+ let bodyController = null;
1079
+ let pumping = false;
1080
+ const pump = (controller) => {
1081
+ if (pumping)
1082
+ return;
1083
+ pumping = true;
1084
+ try {
1085
+ while (true)
1086
+ if (unpacker.isEntryActive()) {
1087
+ if (bodyController) {
1088
+ if (unpacker.streamBody((c) => (bodyController.enqueue(c), true)) === 0 && !unpacker.isBodyComplete())
1089
+ break;
1090
+ } else if (!unpacker.skipEntry())
1091
+ break;
1092
+ if (unpacker.isBodyComplete()) {
1093
+ try {
1094
+ bodyController?.close();
1095
+ } catch {}
1096
+ bodyController = null;
1097
+ if (!unpacker.skipPadding())
1098
+ break;
1099
+ }
1100
+ } else {
1101
+ const header = unpacker.readHeader();
1102
+ if (header === null || header === undefined)
1103
+ break;
1104
+ controller.enqueue({
1105
+ header,
1106
+ body: new ReadableStream({
1107
+ start(c) {
1108
+ if (header.size === 0)
1109
+ c.close();
1110
+ else
1111
+ bodyController = c;
1112
+ },
1113
+ pull: () => pump(controller),
1114
+ cancel() {
1115
+ bodyController = null;
1116
+ pump(controller);
1117
+ }
1118
+ })
1119
+ });
1120
+ }
1121
+ } catch (error) {
1122
+ try {
1123
+ bodyController?.error(error);
1124
+ } catch {}
1125
+ bodyController = null;
1126
+ throw error;
1127
+ } finally {
1128
+ pumping = false;
1129
+ }
1130
+ };
1131
+ return new TransformStream({
1132
+ transform(chunk, controller) {
1133
+ try {
1134
+ unpacker.write(chunk);
1135
+ pump(controller);
1136
+ } catch (error) {
1137
+ try {
1138
+ bodyController?.error(error);
1139
+ } catch {}
1140
+ throw error;
1141
+ }
1142
+ },
1143
+ flush(controller) {
1144
+ try {
1145
+ unpacker.end();
1146
+ pump(controller);
1147
+ unpacker.validateEOF();
1148
+ if (unpacker.isEntryActive() && !unpacker.isBodyComplete())
1149
+ try {
1150
+ bodyController?.close();
1151
+ } catch {}
1152
+ } catch (error) {
1153
+ try {
1154
+ bodyController?.error(error);
1155
+ } catch {}
1156
+ throw error;
1157
+ }
1158
+ }
1159
+ }, undefined, { highWaterMark: 1 });
1160
+ }
1161
+ async function packTar(entries) {
1162
+ const { readable, controller } = createTarPacker2();
1163
+ await (async () => {
1164
+ for (const entry of entries) {
1165
+ const entryStream = controller.add(entry.header);
1166
+ const body = "body" in entry ? entry.body : entry.data;
1167
+ if (!body) {
1168
+ await entryStream.close();
1169
+ continue;
1170
+ }
1171
+ if (body instanceof ReadableStream)
1172
+ await body.pipeTo(entryStream);
1173
+ else if (body instanceof Blob)
1174
+ await body.stream().pipeTo(entryStream);
1175
+ else
1176
+ try {
1177
+ const chunk = await normalizeBody(body);
1178
+ if (chunk.length > 0) {
1179
+ const writer = entryStream.getWriter();
1180
+ await writer.write(chunk);
1181
+ await writer.close();
1182
+ } else
1183
+ await entryStream.close();
1184
+ } catch {
1185
+ throw new TypeError(`Unsupported content type for entry "${entry.header.name}".`);
1186
+ }
1187
+ }
1188
+ })().then(() => controller.finalize()).catch((err) => controller.error(err));
1189
+ return new Uint8Array(await streamToBuffer(readable));
1190
+ }
1191
+ async function unpackTar(archive, options = {}) {
1192
+ const sourceStream = archive instanceof ReadableStream ? archive : new ReadableStream({ start(controller) {
1193
+ controller.enqueue(archive instanceof Uint8Array ? archive : new Uint8Array(archive));
1194
+ controller.close();
1195
+ } });
1196
+ const results = [];
1197
+ const entryStream = sourceStream.pipeThrough(createTarDecoder(options));
1198
+ for await (const entry of entryStream) {
1199
+ let processedHeader;
1200
+ try {
1201
+ processedHeader = transformHeader(entry.header, options);
1202
+ } catch (error) {
1203
+ await entry.body.cancel();
1204
+ throw error;
1205
+ }
1206
+ if (processedHeader === null) {
1207
+ await drain(entry.body);
1208
+ continue;
1209
+ }
1210
+ if (isBodyless(processedHeader)) {
1211
+ await drain(entry.body);
1212
+ results.push({ header: processedHeader });
1213
+ } else
1214
+ results.push({
1215
+ header: processedHeader,
1216
+ data: await streamToBuffer(entry.body)
1217
+ });
270
1218
  }
1219
+ return results;
271
1220
  }
272
- function createRXC(data) {
273
- let stream;
274
- if (typeof data === "string") {
275
- const encoded = new TextEncoder().encode(data);
276
- stream = new ReadableStream({
1221
+ var gzipAsync2 = promisify2(gzip2);
1222
+ var gunzipAsync2 = promisify2(gunzip2);
1223
+
1224
+ class RXCImpl {
1225
+ _buffer;
1226
+ _filesCache = null;
1227
+ constructor(buffer) {
1228
+ this._buffer = buffer;
1229
+ }
1230
+ get stream() {
1231
+ const buffer = this._buffer;
1232
+ return new ReadableStream({
277
1233
  start(controller) {
278
- controller.enqueue(encoded);
1234
+ controller.enqueue(new Uint8Array(buffer));
279
1235
  controller.close();
280
1236
  }
281
1237
  });
282
- } else if (Buffer.isBuffer(data)) {
283
- stream = new ReadableStream({
284
- start(controller) {
285
- controller.enqueue(new Uint8Array(data));
286
- controller.close();
1238
+ }
1239
+ async buffer() {
1240
+ return this._buffer;
1241
+ }
1242
+ async file(path) {
1243
+ const filesMap = await this.files();
1244
+ const content = filesMap.get(path);
1245
+ if (!content) {
1246
+ throw new ContentError(`file not found: ${path}`);
1247
+ }
1248
+ return content;
1249
+ }
1250
+ async files() {
1251
+ if (this._filesCache) {
1252
+ return this._filesCache;
1253
+ }
1254
+ const tarBuffer = await gunzipAsync2(this._buffer);
1255
+ const entries = await unpackTar(tarBuffer);
1256
+ const filesMap = new Map;
1257
+ for (const entry of entries) {
1258
+ if ((entry.header.type === "file" || entry.header.type === undefined) && entry.data) {
1259
+ filesMap.set(entry.header.name, Buffer.from(entry.data));
287
1260
  }
288
- });
289
- } else {
290
- stream = data;
1261
+ }
1262
+ this._filesCache = filesMap;
1263
+ return filesMap;
1264
+ }
1265
+ }
1266
+ function isArchiveInput(input) {
1267
+ return "archive" in input && Buffer.isBuffer(input.archive);
1268
+ }
1269
+ async function createRXC(input) {
1270
+ if (isArchiveInput(input)) {
1271
+ return new RXCImpl(input.archive);
291
1272
  }
292
- return new RXCImpl(stream);
1273
+ const entries = Object.entries(input).map(([name, content]) => {
1274
+ const body = typeof content === "string" ? content : content instanceof Uint8Array ? content : new Uint8Array(content);
1275
+ const size = typeof content === "string" ? Buffer.byteLength(content) : content.length;
1276
+ return {
1277
+ header: { name, size, type: "file" },
1278
+ body
1279
+ };
1280
+ });
1281
+ const tarBuffer = await packTar(entries);
1282
+ const gzipBuffer = await gzipAsync2(Buffer.from(tarBuffer));
1283
+ return new RXCImpl(gzipBuffer);
293
1284
  }
294
1285
 
295
1286
  class ResourceTypeError extends ResourceXError2 {
@@ -300,21 +1291,20 @@ class ResourceTypeError extends ResourceXError2 {
300
1291
  }
301
1292
  var textSerializer = {
302
1293
  async serialize(rxr) {
303
- const text = await rxr.content.text();
304
- return Buffer.from(text, "utf-8");
1294
+ return rxr.content.buffer();
305
1295
  },
306
1296
  async deserialize(data, manifest) {
307
- const text = data.toString("utf-8");
308
1297
  return {
309
1298
  locator: parseRXL2(manifest.toLocator()),
310
1299
  manifest,
311
- content: createRXC(text)
1300
+ content: await createRXC({ archive: data })
312
1301
  };
313
1302
  }
314
1303
  };
315
1304
  var textResolver = {
316
1305
  async resolve(rxr) {
317
- return rxr.content.text();
1306
+ const buffer = await rxr.content.file("content");
1307
+ return buffer.toString("utf-8");
318
1308
  }
319
1309
  };
320
1310
  var textType = {
@@ -326,21 +1316,20 @@ var textType = {
326
1316
  };
327
1317
  var jsonSerializer = {
328
1318
  async serialize(rxr) {
329
- const json = await rxr.content.json();
330
- return Buffer.from(JSON.stringify(json, null, 2), "utf-8");
1319
+ return rxr.content.buffer();
331
1320
  },
332
1321
  async deserialize(data, manifest) {
333
- const text = data.toString("utf-8");
334
1322
  return {
335
1323
  locator: parseRXL2(manifest.toLocator()),
336
1324
  manifest,
337
- content: createRXC(text)
1325
+ content: await createRXC({ archive: data })
338
1326
  };
339
1327
  }
340
1328
  };
341
1329
  var jsonResolver = {
342
1330
  async resolve(rxr) {
343
- return rxr.content.json();
1331
+ const buffer = await rxr.content.file("content");
1332
+ return JSON.parse(buffer.toString("utf-8"));
344
1333
  }
345
1334
  };
346
1335
  var jsonType = {
@@ -358,13 +1347,13 @@ var binarySerializer = {
358
1347
  return {
359
1348
  locator: parseRXL2(manifest.toLocator()),
360
1349
  manifest,
361
- content: createRXC(data)
1350
+ content: await createRXC({ archive: data })
362
1351
  };
363
1352
  }
364
1353
  };
365
1354
  var binaryResolver = {
366
1355
  async resolve(rxr) {
367
- return rxr.content.buffer();
1356
+ return rxr.content.file("content");
368
1357
  }
369
1358
  };
370
1359
  var binaryType = {
@@ -964,4 +1953,4 @@ export {
964
1953
  ARPRegistry
965
1954
  };
966
1955
 
967
- //# debugId=690671A1EE104C4464756E2164756E21
1956
+ //# debugId=6B85BCB12D2565AC64756E2164756E21