@resourcexjs/arp 2.0.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -29
- package/dist/index.d.ts +53 -66
- package/dist/index.js +47 -2095
- package/dist/index.js.map +6 -8
- package/package.json +2 -4
package/dist/index.js
CHANGED
|
@@ -88,6 +88,20 @@ class ARL {
|
|
|
88
88
|
}
|
|
89
89
|
await transport.delete(this.location);
|
|
90
90
|
}
|
|
91
|
+
async list(options) {
|
|
92
|
+
const transport = this.resolver.getTransportHandler(this.transport);
|
|
93
|
+
if (!transport.list) {
|
|
94
|
+
throw new TransportError(`Transport "${transport.name}" does not support list operation`, this.transport);
|
|
95
|
+
}
|
|
96
|
+
return transport.list(this.location, options);
|
|
97
|
+
}
|
|
98
|
+
async mkdir() {
|
|
99
|
+
const transport = this.resolver.getTransportHandler(this.transport);
|
|
100
|
+
if (!transport.mkdir) {
|
|
101
|
+
throw new TransportError(`Transport "${transport.name}" does not support mkdir operation`, this.transport);
|
|
102
|
+
}
|
|
103
|
+
await transport.mkdir(this.location);
|
|
104
|
+
}
|
|
91
105
|
toString() {
|
|
92
106
|
return `arp:${this.semantic}:${this.transport}://${this.location}`;
|
|
93
107
|
}
|
|
@@ -207,6 +221,37 @@ class FileTransportHandler {
|
|
|
207
221
|
});
|
|
208
222
|
}
|
|
209
223
|
}
|
|
224
|
+
async list(location, options) {
|
|
225
|
+
const dirPath = this.resolvePath(location);
|
|
226
|
+
try {
|
|
227
|
+
let entries;
|
|
228
|
+
if (options?.recursive) {
|
|
229
|
+
entries = await this.listRecursive(dirPath, dirPath);
|
|
230
|
+
} else {
|
|
231
|
+
entries = await readdir(dirPath);
|
|
232
|
+
}
|
|
233
|
+
if (options?.pattern) {
|
|
234
|
+
entries = this.filterByPattern(entries, options.pattern);
|
|
235
|
+
}
|
|
236
|
+
return entries;
|
|
237
|
+
} catch (error) {
|
|
238
|
+
const err = error;
|
|
239
|
+
throw new TransportError(`File list error: ${err.code} - ${dirPath}`, this.name, {
|
|
240
|
+
cause: err
|
|
241
|
+
});
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
async mkdir(location) {
|
|
245
|
+
const dirPath = this.resolvePath(location);
|
|
246
|
+
try {
|
|
247
|
+
await mkdir(dirPath, { recursive: true });
|
|
248
|
+
} catch (error) {
|
|
249
|
+
const err = error;
|
|
250
|
+
throw new TransportError(`File mkdir error: ${err.code} - ${dirPath}`, this.name, {
|
|
251
|
+
cause: err
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
}
|
|
210
255
|
}
|
|
211
256
|
var fileTransport = new FileTransportHandler;
|
|
212
257
|
// src/transport/http.ts
|
|
@@ -274,2097 +319,6 @@ class HttpTransportHandler {
|
|
|
274
319
|
}
|
|
275
320
|
var httpsTransport = new HttpTransportHandler("https");
|
|
276
321
|
var httpTransport = new HttpTransportHandler("http");
|
|
277
|
-
// ../registry/dist/index.js
|
|
278
|
-
import { gzip, gunzip } from "node:zlib";
|
|
279
|
-
import { promisify } from "node:util";
|
|
280
|
-
import { homedir } from "node:os";
|
|
281
|
-
import { join as join2 } from "node:path";
|
|
282
|
-
import { readFile as readFile2, writeFile as writeFile2, mkdir as mkdir2, rm as rm2, stat as stat2, readdir as readdir2 } from "node:fs/promises";
|
|
283
|
-
import { gzip as gzip2, gunzip as gunzip2 } from "node:zlib";
|
|
284
|
-
import { promisify as promisify2 } from "node:util";
|
|
285
|
-
import { homedir as homedir2 } from "node:os";
|
|
286
|
-
import { join as join22 } from "node:path";
|
|
287
|
-
import { readFile as readFile22, stat as stat22, readdir as readdir22, mkdir as mkdir22 } from "node:fs/promises";
|
|
288
|
-
import { execSync } from "node:child_process";
|
|
289
|
-
function isRemoteConfig(config) {
|
|
290
|
-
return config !== undefined && "endpoint" in config;
|
|
291
|
-
}
|
|
292
|
-
function isGitConfig(config) {
|
|
293
|
-
return config !== undefined && "type" in config && config.type === "git";
|
|
294
|
-
}
|
|
295
|
-
|
|
296
|
-
class ResourceXError extends Error {
|
|
297
|
-
constructor(message, options) {
|
|
298
|
-
super(message, options);
|
|
299
|
-
this.name = "ResourceXError";
|
|
300
|
-
}
|
|
301
|
-
}
|
|
302
|
-
|
|
303
|
-
class ManifestError extends ResourceXError {
|
|
304
|
-
constructor(message) {
|
|
305
|
-
super(message);
|
|
306
|
-
this.name = "ManifestError";
|
|
307
|
-
}
|
|
308
|
-
}
|
|
309
|
-
|
|
310
|
-
class RXLImpl {
|
|
311
|
-
domain;
|
|
312
|
-
path;
|
|
313
|
-
name;
|
|
314
|
-
type;
|
|
315
|
-
version;
|
|
316
|
-
constructor(parts) {
|
|
317
|
-
this.domain = parts.domain;
|
|
318
|
-
this.path = parts.path;
|
|
319
|
-
this.name = parts.name;
|
|
320
|
-
this.type = parts.type;
|
|
321
|
-
this.version = parts.version;
|
|
322
|
-
}
|
|
323
|
-
toString() {
|
|
324
|
-
let result = "";
|
|
325
|
-
if (this.domain) {
|
|
326
|
-
result += this.domain + "/";
|
|
327
|
-
if (this.path) {
|
|
328
|
-
result += this.path + "/";
|
|
329
|
-
}
|
|
330
|
-
}
|
|
331
|
-
result += this.name;
|
|
332
|
-
if (this.type) {
|
|
333
|
-
result += "." + this.type;
|
|
334
|
-
}
|
|
335
|
-
if (this.version) {
|
|
336
|
-
result += "@" + this.version;
|
|
337
|
-
}
|
|
338
|
-
return result;
|
|
339
|
-
}
|
|
340
|
-
}
|
|
341
|
-
function isDomain(str) {
|
|
342
|
-
if (str === "localhost")
|
|
343
|
-
return true;
|
|
344
|
-
return str.includes(".");
|
|
345
|
-
}
|
|
346
|
-
function parseRXL(locator) {
|
|
347
|
-
let remaining = locator;
|
|
348
|
-
let version;
|
|
349
|
-
let type;
|
|
350
|
-
let domain;
|
|
351
|
-
let path;
|
|
352
|
-
let name;
|
|
353
|
-
const atIndex = remaining.indexOf("@");
|
|
354
|
-
if (atIndex !== -1) {
|
|
355
|
-
version = remaining.slice(atIndex + 1);
|
|
356
|
-
remaining = remaining.slice(0, atIndex);
|
|
357
|
-
}
|
|
358
|
-
const segments = remaining.split("/");
|
|
359
|
-
if (segments.length > 1 && isDomain(segments[0])) {
|
|
360
|
-
domain = segments[0];
|
|
361
|
-
const lastSegment = segments[segments.length - 1];
|
|
362
|
-
if (segments.length > 2) {
|
|
363
|
-
path = segments.slice(1, -1).join("/");
|
|
364
|
-
}
|
|
365
|
-
remaining = lastSegment;
|
|
366
|
-
} else {
|
|
367
|
-
remaining = segments.join("/");
|
|
368
|
-
}
|
|
369
|
-
const dotIndex = remaining.lastIndexOf(".");
|
|
370
|
-
if (dotIndex !== -1) {
|
|
371
|
-
type = remaining.slice(dotIndex + 1);
|
|
372
|
-
name = remaining.slice(0, dotIndex);
|
|
373
|
-
} else {
|
|
374
|
-
name = remaining;
|
|
375
|
-
}
|
|
376
|
-
return new RXLImpl({ domain, path, name, type, version });
|
|
377
|
-
}
|
|
378
|
-
|
|
379
|
-
class RXMImpl {
|
|
380
|
-
domain;
|
|
381
|
-
path;
|
|
382
|
-
name;
|
|
383
|
-
type;
|
|
384
|
-
version;
|
|
385
|
-
constructor(data) {
|
|
386
|
-
this.domain = data.domain;
|
|
387
|
-
this.path = data.path;
|
|
388
|
-
this.name = data.name;
|
|
389
|
-
this.type = data.type;
|
|
390
|
-
this.version = data.version;
|
|
391
|
-
}
|
|
392
|
-
toLocator() {
|
|
393
|
-
let result = this.domain + "/";
|
|
394
|
-
if (this.path) {
|
|
395
|
-
result += this.path + "/";
|
|
396
|
-
}
|
|
397
|
-
result += this.name;
|
|
398
|
-
result += "." + this.type;
|
|
399
|
-
result += "@" + this.version;
|
|
400
|
-
return result;
|
|
401
|
-
}
|
|
402
|
-
toJSON() {
|
|
403
|
-
const json = {
|
|
404
|
-
domain: this.domain,
|
|
405
|
-
name: this.name,
|
|
406
|
-
type: this.type,
|
|
407
|
-
version: this.version
|
|
408
|
-
};
|
|
409
|
-
if (this.path !== undefined) {
|
|
410
|
-
json.path = this.path;
|
|
411
|
-
}
|
|
412
|
-
return json;
|
|
413
|
-
}
|
|
414
|
-
}
|
|
415
|
-
function createRXM(data) {
|
|
416
|
-
if (!data.domain) {
|
|
417
|
-
throw new ManifestError("domain is required");
|
|
418
|
-
}
|
|
419
|
-
if (!data.name) {
|
|
420
|
-
throw new ManifestError("name is required");
|
|
421
|
-
}
|
|
422
|
-
if (!data.type) {
|
|
423
|
-
throw new ManifestError("type is required");
|
|
424
|
-
}
|
|
425
|
-
if (!data.version) {
|
|
426
|
-
throw new ManifestError("version is required");
|
|
427
|
-
}
|
|
428
|
-
return new RXMImpl({
|
|
429
|
-
domain: data.domain,
|
|
430
|
-
path: data.path,
|
|
431
|
-
name: data.name,
|
|
432
|
-
type: data.type,
|
|
433
|
-
version: data.version
|
|
434
|
-
});
|
|
435
|
-
}
|
|
436
|
-
var BLOCK_SIZE = 512;
|
|
437
|
-
var ZERO_BLOCK = new Uint8Array(BLOCK_SIZE);
|
|
438
|
-
var EMPTY = new Uint8Array(0);
|
|
439
|
-
var encoder = new TextEncoder;
|
|
440
|
-
var decoder = new TextDecoder;
|
|
441
|
-
var EOF_BUFFER = new Uint8Array(BLOCK_SIZE * 2);
|
|
442
|
-
var gzipAsync = promisify(gzip);
|
|
443
|
-
var gunzipAsync = promisify(gunzip);
|
|
444
|
-
|
|
445
|
-
class RegistryError extends ResourceXError {
|
|
446
|
-
constructor(message) {
|
|
447
|
-
super(message);
|
|
448
|
-
this.name = "RegistryError";
|
|
449
|
-
}
|
|
450
|
-
}
|
|
451
|
-
|
|
452
|
-
class ResourceXError2 extends Error {
|
|
453
|
-
constructor(message, options) {
|
|
454
|
-
super(message, options);
|
|
455
|
-
this.name = "ResourceXError";
|
|
456
|
-
}
|
|
457
|
-
}
|
|
458
|
-
|
|
459
|
-
class ContentError extends ResourceXError2 {
|
|
460
|
-
constructor(message) {
|
|
461
|
-
super(message);
|
|
462
|
-
this.name = "ContentError";
|
|
463
|
-
}
|
|
464
|
-
}
|
|
465
|
-
|
|
466
|
-
class RXLImpl2 {
|
|
467
|
-
domain;
|
|
468
|
-
path;
|
|
469
|
-
name;
|
|
470
|
-
type;
|
|
471
|
-
version;
|
|
472
|
-
constructor(parts) {
|
|
473
|
-
this.domain = parts.domain;
|
|
474
|
-
this.path = parts.path;
|
|
475
|
-
this.name = parts.name;
|
|
476
|
-
this.type = parts.type;
|
|
477
|
-
this.version = parts.version;
|
|
478
|
-
}
|
|
479
|
-
toString() {
|
|
480
|
-
let result = "";
|
|
481
|
-
if (this.domain) {
|
|
482
|
-
result += this.domain + "/";
|
|
483
|
-
if (this.path) {
|
|
484
|
-
result += this.path + "/";
|
|
485
|
-
}
|
|
486
|
-
}
|
|
487
|
-
result += this.name;
|
|
488
|
-
if (this.type) {
|
|
489
|
-
result += "." + this.type;
|
|
490
|
-
}
|
|
491
|
-
if (this.version) {
|
|
492
|
-
result += "@" + this.version;
|
|
493
|
-
}
|
|
494
|
-
return result;
|
|
495
|
-
}
|
|
496
|
-
}
|
|
497
|
-
function isDomain2(str) {
|
|
498
|
-
if (str === "localhost")
|
|
499
|
-
return true;
|
|
500
|
-
return str.includes(".");
|
|
501
|
-
}
|
|
502
|
-
function parseRXL2(locator) {
|
|
503
|
-
let remaining = locator;
|
|
504
|
-
let version;
|
|
505
|
-
let type;
|
|
506
|
-
let domain;
|
|
507
|
-
let path;
|
|
508
|
-
let name;
|
|
509
|
-
const atIndex = remaining.indexOf("@");
|
|
510
|
-
if (atIndex !== -1) {
|
|
511
|
-
version = remaining.slice(atIndex + 1);
|
|
512
|
-
remaining = remaining.slice(0, atIndex);
|
|
513
|
-
}
|
|
514
|
-
const segments = remaining.split("/");
|
|
515
|
-
if (segments.length > 1 && isDomain2(segments[0])) {
|
|
516
|
-
domain = segments[0];
|
|
517
|
-
const lastSegment = segments[segments.length - 1];
|
|
518
|
-
if (segments.length > 2) {
|
|
519
|
-
path = segments.slice(1, -1).join("/");
|
|
520
|
-
}
|
|
521
|
-
remaining = lastSegment;
|
|
522
|
-
} else {
|
|
523
|
-
remaining = segments.join("/");
|
|
524
|
-
}
|
|
525
|
-
const dotIndex = remaining.lastIndexOf(".");
|
|
526
|
-
if (dotIndex !== -1) {
|
|
527
|
-
type = remaining.slice(dotIndex + 1);
|
|
528
|
-
name = remaining.slice(0, dotIndex);
|
|
529
|
-
} else {
|
|
530
|
-
name = remaining;
|
|
531
|
-
}
|
|
532
|
-
return new RXLImpl2({ domain, path, name, type, version });
|
|
533
|
-
}
|
|
534
|
-
var BLOCK_SIZE2 = 512;
|
|
535
|
-
var BLOCK_SIZE_MASK = 511;
|
|
536
|
-
var DEFAULT_FILE_MODE = 420;
|
|
537
|
-
var DEFAULT_DIR_MODE = 493;
|
|
538
|
-
var USTAR_NAME_OFFSET = 0;
|
|
539
|
-
var USTAR_NAME_SIZE = 100;
|
|
540
|
-
var USTAR_MODE_OFFSET = 100;
|
|
541
|
-
var USTAR_MODE_SIZE = 8;
|
|
542
|
-
var USTAR_UID_OFFSET = 108;
|
|
543
|
-
var USTAR_UID_SIZE = 8;
|
|
544
|
-
var USTAR_GID_OFFSET = 116;
|
|
545
|
-
var USTAR_GID_SIZE = 8;
|
|
546
|
-
var USTAR_SIZE_OFFSET = 124;
|
|
547
|
-
var USTAR_SIZE_SIZE = 12;
|
|
548
|
-
var USTAR_MTIME_OFFSET = 136;
|
|
549
|
-
var USTAR_MTIME_SIZE = 12;
|
|
550
|
-
var USTAR_CHECKSUM_OFFSET = 148;
|
|
551
|
-
var USTAR_CHECKSUM_SIZE = 8;
|
|
552
|
-
var USTAR_TYPEFLAG_OFFSET = 156;
|
|
553
|
-
var USTAR_TYPEFLAG_SIZE = 1;
|
|
554
|
-
var USTAR_LINKNAME_OFFSET = 157;
|
|
555
|
-
var USTAR_LINKNAME_SIZE = 100;
|
|
556
|
-
var USTAR_MAGIC_OFFSET = 257;
|
|
557
|
-
var USTAR_MAGIC_SIZE = 6;
|
|
558
|
-
var USTAR_VERSION_OFFSET = 263;
|
|
559
|
-
var USTAR_VERSION_SIZE = 2;
|
|
560
|
-
var USTAR_UNAME_OFFSET = 265;
|
|
561
|
-
var USTAR_UNAME_SIZE = 32;
|
|
562
|
-
var USTAR_GNAME_OFFSET = 297;
|
|
563
|
-
var USTAR_GNAME_SIZE = 32;
|
|
564
|
-
var USTAR_PREFIX_OFFSET = 345;
|
|
565
|
-
var USTAR_PREFIX_SIZE = 155;
|
|
566
|
-
var USTAR_VERSION = "00";
|
|
567
|
-
var USTAR_MAX_UID_GID = 2097151;
|
|
568
|
-
var USTAR_MAX_SIZE = 8589934591;
|
|
569
|
-
var FILE = "file";
|
|
570
|
-
var LINK = "link";
|
|
571
|
-
var SYMLINK = "symlink";
|
|
572
|
-
var DIRECTORY = "directory";
|
|
573
|
-
var TYPEFLAG = {
|
|
574
|
-
file: "0",
|
|
575
|
-
link: "1",
|
|
576
|
-
symlink: "2",
|
|
577
|
-
"character-device": "3",
|
|
578
|
-
"block-device": "4",
|
|
579
|
-
directory: "5",
|
|
580
|
-
fifo: "6",
|
|
581
|
-
"pax-header": "x",
|
|
582
|
-
"pax-global-header": "g",
|
|
583
|
-
"gnu-long-name": "L",
|
|
584
|
-
"gnu-long-link-name": "K"
|
|
585
|
-
};
|
|
586
|
-
var FLAGTYPE = {
|
|
587
|
-
"0": FILE,
|
|
588
|
-
"1": LINK,
|
|
589
|
-
"2": SYMLINK,
|
|
590
|
-
"3": "character-device",
|
|
591
|
-
"4": "block-device",
|
|
592
|
-
"5": DIRECTORY,
|
|
593
|
-
"6": "fifo",
|
|
594
|
-
x: "pax-header",
|
|
595
|
-
g: "pax-global-header",
|
|
596
|
-
L: "gnu-long-name",
|
|
597
|
-
K: "gnu-long-link-name"
|
|
598
|
-
};
|
|
599
|
-
var ZERO_BLOCK2 = new Uint8Array(BLOCK_SIZE2);
|
|
600
|
-
var EMPTY2 = new Uint8Array(0);
|
|
601
|
-
var encoder2 = new TextEncoder;
|
|
602
|
-
var decoder2 = new TextDecoder;
|
|
603
|
-
function writeString(view, offset, size, value) {
|
|
604
|
-
if (value)
|
|
605
|
-
encoder2.encodeInto(value, view.subarray(offset, offset + size));
|
|
606
|
-
}
|
|
607
|
-
function writeOctal(view, offset, size, value) {
|
|
608
|
-
if (value === undefined)
|
|
609
|
-
return;
|
|
610
|
-
const octalString = value.toString(8).padStart(size - 1, "0");
|
|
611
|
-
encoder2.encodeInto(octalString, view.subarray(offset, offset + size - 1));
|
|
612
|
-
}
|
|
613
|
-
function readString(view, offset, size) {
|
|
614
|
-
const end = view.indexOf(0, offset);
|
|
615
|
-
const sliceEnd = end === -1 || end > offset + size ? offset + size : end;
|
|
616
|
-
return decoder2.decode(view.subarray(offset, sliceEnd));
|
|
617
|
-
}
|
|
618
|
-
function readOctal(view, offset, size) {
|
|
619
|
-
let value = 0;
|
|
620
|
-
const end = offset + size;
|
|
621
|
-
for (let i = offset;i < end; i++) {
|
|
622
|
-
const charCode = view[i];
|
|
623
|
-
if (charCode === 0)
|
|
624
|
-
break;
|
|
625
|
-
if (charCode === 32)
|
|
626
|
-
continue;
|
|
627
|
-
value = value * 8 + (charCode - 48);
|
|
628
|
-
}
|
|
629
|
-
return value;
|
|
630
|
-
}
|
|
631
|
-
function readNumeric(view, offset, size) {
|
|
632
|
-
if (view[offset] & 128) {
|
|
633
|
-
let result = 0;
|
|
634
|
-
result = view[offset] & 127;
|
|
635
|
-
for (let i = 1;i < size; i++)
|
|
636
|
-
result = result * 256 + view[offset + i];
|
|
637
|
-
if (!Number.isSafeInteger(result))
|
|
638
|
-
throw new Error("TAR number too large");
|
|
639
|
-
return result;
|
|
640
|
-
}
|
|
641
|
-
return readOctal(view, offset, size);
|
|
642
|
-
}
|
|
643
|
-
var isBodyless = (header) => header.type === DIRECTORY || header.type === SYMLINK || header.type === LINK;
|
|
644
|
-
async function normalizeBody(body) {
|
|
645
|
-
if (body === null || body === undefined)
|
|
646
|
-
return EMPTY2;
|
|
647
|
-
if (body instanceof Uint8Array)
|
|
648
|
-
return body;
|
|
649
|
-
if (typeof body === "string")
|
|
650
|
-
return encoder2.encode(body);
|
|
651
|
-
if (body instanceof ArrayBuffer)
|
|
652
|
-
return new Uint8Array(body);
|
|
653
|
-
if (body instanceof Blob)
|
|
654
|
-
return new Uint8Array(await body.arrayBuffer());
|
|
655
|
-
throw new TypeError("Unsupported content type for entry body.");
|
|
656
|
-
}
|
|
657
|
-
function transformHeader(header, options) {
|
|
658
|
-
const { strip, filter, map } = options;
|
|
659
|
-
if (!strip && !filter && !map)
|
|
660
|
-
return header;
|
|
661
|
-
const h = { ...header };
|
|
662
|
-
if (strip && strip > 0) {
|
|
663
|
-
const components = h.name.split("/").filter(Boolean);
|
|
664
|
-
if (strip >= components.length)
|
|
665
|
-
return null;
|
|
666
|
-
const newName = components.slice(strip).join("/");
|
|
667
|
-
h.name = h.type === DIRECTORY && !newName.endsWith("/") ? `${newName}/` : newName;
|
|
668
|
-
if (h.linkname?.startsWith("/")) {
|
|
669
|
-
const linkComponents = h.linkname.split("/").filter(Boolean);
|
|
670
|
-
h.linkname = strip >= linkComponents.length ? "/" : `/${linkComponents.slice(strip).join("/")}`;
|
|
671
|
-
}
|
|
672
|
-
}
|
|
673
|
-
if (filter?.(h) === false)
|
|
674
|
-
return null;
|
|
675
|
-
const result = map ? map(h) : h;
|
|
676
|
-
if (result && (!result.name || !result.name.trim() || result.name === "." || result.name === "/"))
|
|
677
|
-
return null;
|
|
678
|
-
return result;
|
|
679
|
-
}
|
|
680
|
-
var CHECKSUM_SPACE = 32;
|
|
681
|
-
var ASCII_ZERO = 48;
|
|
682
|
-
function validateChecksum(block) {
|
|
683
|
-
const stored = readOctal(block, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_SIZE);
|
|
684
|
-
let sum = 0;
|
|
685
|
-
for (let i = 0;i < block.length; i++)
|
|
686
|
-
if (i >= USTAR_CHECKSUM_OFFSET && i < USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE)
|
|
687
|
-
sum += CHECKSUM_SPACE;
|
|
688
|
-
else
|
|
689
|
-
sum += block[i];
|
|
690
|
-
return stored === sum;
|
|
691
|
-
}
|
|
692
|
-
function writeChecksum(block) {
|
|
693
|
-
block.fill(CHECKSUM_SPACE, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE);
|
|
694
|
-
let checksum = 0;
|
|
695
|
-
for (const byte of block)
|
|
696
|
-
checksum += byte;
|
|
697
|
-
for (let i = USTAR_CHECKSUM_OFFSET + 6 - 1;i >= USTAR_CHECKSUM_OFFSET; i--) {
|
|
698
|
-
block[i] = (checksum & 7) + ASCII_ZERO;
|
|
699
|
-
checksum >>= 3;
|
|
700
|
-
}
|
|
701
|
-
block[USTAR_CHECKSUM_OFFSET + 6] = 0;
|
|
702
|
-
block[USTAR_CHECKSUM_OFFSET + 7] = CHECKSUM_SPACE;
|
|
703
|
-
}
|
|
704
|
-
function generatePax(header) {
|
|
705
|
-
const paxRecords = {};
|
|
706
|
-
if (header.name.length > USTAR_NAME_SIZE) {
|
|
707
|
-
if (findUstarSplit(header.name) === null)
|
|
708
|
-
paxRecords.path = header.name;
|
|
709
|
-
}
|
|
710
|
-
if (header.linkname && header.linkname.length > USTAR_NAME_SIZE)
|
|
711
|
-
paxRecords.linkpath = header.linkname;
|
|
712
|
-
if (header.uname && header.uname.length > USTAR_UNAME_SIZE)
|
|
713
|
-
paxRecords.uname = header.uname;
|
|
714
|
-
if (header.gname && header.gname.length > USTAR_GNAME_SIZE)
|
|
715
|
-
paxRecords.gname = header.gname;
|
|
716
|
-
if (header.uid != null && header.uid > USTAR_MAX_UID_GID)
|
|
717
|
-
paxRecords.uid = String(header.uid);
|
|
718
|
-
if (header.gid != null && header.gid > USTAR_MAX_UID_GID)
|
|
719
|
-
paxRecords.gid = String(header.gid);
|
|
720
|
-
if (header.size != null && header.size > USTAR_MAX_SIZE)
|
|
721
|
-
paxRecords.size = String(header.size);
|
|
722
|
-
if (header.pax)
|
|
723
|
-
Object.assign(paxRecords, header.pax);
|
|
724
|
-
const paxEntries = Object.entries(paxRecords);
|
|
725
|
-
if (paxEntries.length === 0)
|
|
726
|
-
return null;
|
|
727
|
-
const paxBody = encoder2.encode(paxEntries.map(([key, value]) => {
|
|
728
|
-
const record = `${key}=${value}
|
|
729
|
-
`;
|
|
730
|
-
const partLength = encoder2.encode(record).length + 1;
|
|
731
|
-
let totalLength = partLength + String(partLength).length;
|
|
732
|
-
totalLength = partLength + String(totalLength).length;
|
|
733
|
-
return `${totalLength} ${record}`;
|
|
734
|
-
}).join(""));
|
|
735
|
-
return {
|
|
736
|
-
paxHeader: createTarHeader({
|
|
737
|
-
name: decoder2.decode(encoder2.encode(`PaxHeader/${header.name}`).slice(0, 100)),
|
|
738
|
-
size: paxBody.length,
|
|
739
|
-
type: "pax-header",
|
|
740
|
-
mode: 420,
|
|
741
|
-
mtime: header.mtime,
|
|
742
|
-
uname: header.uname,
|
|
743
|
-
gname: header.gname,
|
|
744
|
-
uid: header.uid,
|
|
745
|
-
gid: header.gid
|
|
746
|
-
}),
|
|
747
|
-
paxBody
|
|
748
|
-
};
|
|
749
|
-
}
|
|
750
|
-
function findUstarSplit(path) {
|
|
751
|
-
if (path.length <= USTAR_NAME_SIZE)
|
|
752
|
-
return null;
|
|
753
|
-
const minSlashIndex = path.length - USTAR_NAME_SIZE - 1;
|
|
754
|
-
const slashIndex = path.lastIndexOf("/", USTAR_PREFIX_SIZE);
|
|
755
|
-
if (slashIndex > 0 && slashIndex >= minSlashIndex)
|
|
756
|
-
return {
|
|
757
|
-
prefix: path.slice(0, slashIndex),
|
|
758
|
-
name: path.slice(slashIndex + 1)
|
|
759
|
-
};
|
|
760
|
-
return null;
|
|
761
|
-
}
|
|
762
|
-
function createTarHeader(header) {
|
|
763
|
-
const view = new Uint8Array(BLOCK_SIZE2);
|
|
764
|
-
const size = isBodyless(header) ? 0 : header.size ?? 0;
|
|
765
|
-
let name = header.name;
|
|
766
|
-
let prefix = "";
|
|
767
|
-
if (!header.pax?.path) {
|
|
768
|
-
const split = findUstarSplit(name);
|
|
769
|
-
if (split) {
|
|
770
|
-
name = split.name;
|
|
771
|
-
prefix = split.prefix;
|
|
772
|
-
}
|
|
773
|
-
}
|
|
774
|
-
writeString(view, USTAR_NAME_OFFSET, USTAR_NAME_SIZE, name);
|
|
775
|
-
writeOctal(view, USTAR_MODE_OFFSET, USTAR_MODE_SIZE, header.mode ?? (header.type === DIRECTORY ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));
|
|
776
|
-
writeOctal(view, USTAR_UID_OFFSET, USTAR_UID_SIZE, header.uid ?? 0);
|
|
777
|
-
writeOctal(view, USTAR_GID_OFFSET, USTAR_GID_SIZE, header.gid ?? 0);
|
|
778
|
-
writeOctal(view, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE, size);
|
|
779
|
-
writeOctal(view, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1000));
|
|
780
|
-
writeString(view, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE, TYPEFLAG[header.type ?? FILE]);
|
|
781
|
-
writeString(view, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE, header.linkname);
|
|
782
|
-
writeString(view, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE, "ustar\x00");
|
|
783
|
-
writeString(view, USTAR_VERSION_OFFSET, USTAR_VERSION_SIZE, USTAR_VERSION);
|
|
784
|
-
writeString(view, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE, header.uname);
|
|
785
|
-
writeString(view, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE, header.gname);
|
|
786
|
-
writeString(view, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE, prefix);
|
|
787
|
-
writeChecksum(view);
|
|
788
|
-
return view;
|
|
789
|
-
}
|
|
790
|
-
function parseUstarHeader(block, strict) {
|
|
791
|
-
if (strict && !validateChecksum(block))
|
|
792
|
-
throw new Error("Invalid tar header checksum.");
|
|
793
|
-
const typeflag = readString(block, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE);
|
|
794
|
-
const header = {
|
|
795
|
-
name: readString(block, USTAR_NAME_OFFSET, USTAR_NAME_SIZE),
|
|
796
|
-
mode: readOctal(block, USTAR_MODE_OFFSET, USTAR_MODE_SIZE),
|
|
797
|
-
uid: readNumeric(block, USTAR_UID_OFFSET, USTAR_UID_SIZE),
|
|
798
|
-
gid: readNumeric(block, USTAR_GID_OFFSET, USTAR_GID_SIZE),
|
|
799
|
-
size: readNumeric(block, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE),
|
|
800
|
-
mtime: /* @__PURE__ */ new Date(readNumeric(block, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE) * 1000),
|
|
801
|
-
type: FLAGTYPE[typeflag] || FILE,
|
|
802
|
-
linkname: readString(block, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE)
|
|
803
|
-
};
|
|
804
|
-
const magic = readString(block, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE);
|
|
805
|
-
if (magic.trim() === "ustar") {
|
|
806
|
-
header.uname = readString(block, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE);
|
|
807
|
-
header.gname = readString(block, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE);
|
|
808
|
-
}
|
|
809
|
-
if (magic === "ustar")
|
|
810
|
-
header.prefix = readString(block, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE);
|
|
811
|
-
return header;
|
|
812
|
-
}
|
|
813
|
-
var PAX_MAPPING = {
|
|
814
|
-
path: ["name", (v) => v],
|
|
815
|
-
linkpath: ["linkname", (v) => v],
|
|
816
|
-
size: ["size", (v) => parseInt(v, 10)],
|
|
817
|
-
mtime: ["mtime", parseFloat],
|
|
818
|
-
uid: ["uid", (v) => parseInt(v, 10)],
|
|
819
|
-
gid: ["gid", (v) => parseInt(v, 10)],
|
|
820
|
-
uname: ["uname", (v) => v],
|
|
821
|
-
gname: ["gname", (v) => v]
|
|
822
|
-
};
|
|
823
|
-
function parsePax(buffer) {
|
|
824
|
-
const decoder$1 = new TextDecoder("utf-8");
|
|
825
|
-
const overrides = {};
|
|
826
|
-
const pax = {};
|
|
827
|
-
let offset = 0;
|
|
828
|
-
while (offset < buffer.length) {
|
|
829
|
-
const spaceIndex = buffer.indexOf(32, offset);
|
|
830
|
-
if (spaceIndex === -1)
|
|
831
|
-
break;
|
|
832
|
-
const length = parseInt(decoder$1.decode(buffer.subarray(offset, spaceIndex)), 10);
|
|
833
|
-
if (Number.isNaN(length) || length === 0)
|
|
834
|
-
break;
|
|
835
|
-
const recordEnd = offset + length;
|
|
836
|
-
const [key, value] = decoder$1.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split("=", 2);
|
|
837
|
-
if (key && value !== undefined) {
|
|
838
|
-
pax[key] = value;
|
|
839
|
-
const mapping = PAX_MAPPING[key];
|
|
840
|
-
if (mapping) {
|
|
841
|
-
const [targetKey, parser] = mapping;
|
|
842
|
-
const parsedValue = parser(value);
|
|
843
|
-
if (typeof parsedValue === "string" || !Number.isNaN(parsedValue))
|
|
844
|
-
overrides[targetKey] = parsedValue;
|
|
845
|
-
}
|
|
846
|
-
}
|
|
847
|
-
offset = recordEnd;
|
|
848
|
-
}
|
|
849
|
-
if (Object.keys(pax).length > 0)
|
|
850
|
-
overrides.pax = pax;
|
|
851
|
-
return overrides;
|
|
852
|
-
}
|
|
853
|
-
function applyOverrides(header, overrides) {
|
|
854
|
-
if (overrides.name !== undefined)
|
|
855
|
-
header.name = overrides.name;
|
|
856
|
-
if (overrides.linkname !== undefined)
|
|
857
|
-
header.linkname = overrides.linkname;
|
|
858
|
-
if (overrides.size !== undefined)
|
|
859
|
-
header.size = overrides.size;
|
|
860
|
-
if (overrides.mtime !== undefined)
|
|
861
|
-
header.mtime = /* @__PURE__ */ new Date(overrides.mtime * 1000);
|
|
862
|
-
if (overrides.uid !== undefined)
|
|
863
|
-
header.uid = overrides.uid;
|
|
864
|
-
if (overrides.gid !== undefined)
|
|
865
|
-
header.gid = overrides.gid;
|
|
866
|
-
if (overrides.uname !== undefined)
|
|
867
|
-
header.uname = overrides.uname;
|
|
868
|
-
if (overrides.gname !== undefined)
|
|
869
|
-
header.gname = overrides.gname;
|
|
870
|
-
if (overrides.pax)
|
|
871
|
-
header.pax = Object.assign({}, header.pax ?? {}, overrides.pax);
|
|
872
|
-
}
|
|
873
|
-
function getMetaParser(type) {
|
|
874
|
-
switch (type) {
|
|
875
|
-
case "pax-global-header":
|
|
876
|
-
case "pax-header":
|
|
877
|
-
return parsePax;
|
|
878
|
-
case "gnu-long-name":
|
|
879
|
-
return (data) => ({ name: readString(data, 0, data.length) });
|
|
880
|
-
case "gnu-long-link-name":
|
|
881
|
-
return (data) => ({ linkname: readString(data, 0, data.length) });
|
|
882
|
-
default:
|
|
883
|
-
return;
|
|
884
|
-
}
|
|
885
|
-
}
|
|
886
|
-
function getHeaderBlocks(header) {
|
|
887
|
-
const base = createTarHeader(header);
|
|
888
|
-
const pax = generatePax(header);
|
|
889
|
-
if (!pax)
|
|
890
|
-
return [base];
|
|
891
|
-
const paxPadding = -pax.paxBody.length & BLOCK_SIZE_MASK;
|
|
892
|
-
const paddingBlocks = paxPadding > 0 ? [ZERO_BLOCK2.subarray(0, paxPadding)] : [];
|
|
893
|
-
return [
|
|
894
|
-
pax.paxHeader,
|
|
895
|
-
pax.paxBody,
|
|
896
|
-
...paddingBlocks,
|
|
897
|
-
base
|
|
898
|
-
];
|
|
899
|
-
}
|
|
900
|
-
var EOF_BUFFER2 = new Uint8Array(BLOCK_SIZE2 * 2);
|
|
901
|
-
function createTarPacker(onData, onError, onFinalize) {
|
|
902
|
-
let currentHeader = null;
|
|
903
|
-
let bytesWritten = 0;
|
|
904
|
-
let finalized = false;
|
|
905
|
-
return {
|
|
906
|
-
add(header) {
|
|
907
|
-
if (finalized) {
|
|
908
|
-
const error = /* @__PURE__ */ new Error("No new tar entries after finalize.");
|
|
909
|
-
onError(error);
|
|
910
|
-
throw error;
|
|
911
|
-
}
|
|
912
|
-
if (currentHeader !== null) {
|
|
913
|
-
const error = /* @__PURE__ */ new Error("Previous entry must be completed before adding a new one");
|
|
914
|
-
onError(error);
|
|
915
|
-
throw error;
|
|
916
|
-
}
|
|
917
|
-
try {
|
|
918
|
-
const size = isBodyless(header) ? 0 : header.size ?? 0;
|
|
919
|
-
const headerBlocks = getHeaderBlocks({
|
|
920
|
-
...header,
|
|
921
|
-
size
|
|
922
|
-
});
|
|
923
|
-
for (const block of headerBlocks)
|
|
924
|
-
onData(block);
|
|
925
|
-
currentHeader = {
|
|
926
|
-
...header,
|
|
927
|
-
size
|
|
928
|
-
};
|
|
929
|
-
bytesWritten = 0;
|
|
930
|
-
} catch (error) {
|
|
931
|
-
onError(error);
|
|
932
|
-
}
|
|
933
|
-
},
|
|
934
|
-
write(chunk) {
|
|
935
|
-
if (!currentHeader) {
|
|
936
|
-
const error = /* @__PURE__ */ new Error("No active tar entry.");
|
|
937
|
-
onError(error);
|
|
938
|
-
throw error;
|
|
939
|
-
}
|
|
940
|
-
if (finalized) {
|
|
941
|
-
const error = /* @__PURE__ */ new Error("Cannot write data after finalize.");
|
|
942
|
-
onError(error);
|
|
943
|
-
throw error;
|
|
944
|
-
}
|
|
945
|
-
const newTotal = bytesWritten + chunk.length;
|
|
946
|
-
if (newTotal > currentHeader.size) {
|
|
947
|
-
const error = /* @__PURE__ */ new Error(`"${currentHeader.name}" exceeds given size of ${currentHeader.size} bytes.`);
|
|
948
|
-
onError(error);
|
|
949
|
-
throw error;
|
|
950
|
-
}
|
|
951
|
-
try {
|
|
952
|
-
bytesWritten = newTotal;
|
|
953
|
-
onData(chunk);
|
|
954
|
-
} catch (error) {
|
|
955
|
-
onError(error);
|
|
956
|
-
}
|
|
957
|
-
},
|
|
958
|
-
endEntry() {
|
|
959
|
-
if (!currentHeader) {
|
|
960
|
-
const error = /* @__PURE__ */ new Error("No active entry to end.");
|
|
961
|
-
onError(error);
|
|
962
|
-
throw error;
|
|
963
|
-
}
|
|
964
|
-
if (finalized) {
|
|
965
|
-
const error = /* @__PURE__ */ new Error("Cannot end entry after finalize.");
|
|
966
|
-
onError(error);
|
|
967
|
-
throw error;
|
|
968
|
-
}
|
|
969
|
-
try {
|
|
970
|
-
if (bytesWritten !== currentHeader.size) {
|
|
971
|
-
const error = /* @__PURE__ */ new Error(`Size mismatch for "${currentHeader.name}".`);
|
|
972
|
-
onError(error);
|
|
973
|
-
throw error;
|
|
974
|
-
}
|
|
975
|
-
const paddingSize = -currentHeader.size & BLOCK_SIZE_MASK;
|
|
976
|
-
if (paddingSize > 0)
|
|
977
|
-
onData(new Uint8Array(paddingSize));
|
|
978
|
-
currentHeader = null;
|
|
979
|
-
bytesWritten = 0;
|
|
980
|
-
} catch (error) {
|
|
981
|
-
onError(error);
|
|
982
|
-
throw error;
|
|
983
|
-
}
|
|
984
|
-
},
|
|
985
|
-
finalize() {
|
|
986
|
-
if (finalized) {
|
|
987
|
-
const error = /* @__PURE__ */ new Error("Archive has already been finalized");
|
|
988
|
-
onError(error);
|
|
989
|
-
throw error;
|
|
990
|
-
}
|
|
991
|
-
if (currentHeader !== null) {
|
|
992
|
-
const error = /* @__PURE__ */ new Error("Cannot finalize while an entry is still active");
|
|
993
|
-
onError(error);
|
|
994
|
-
throw error;
|
|
995
|
-
}
|
|
996
|
-
try {
|
|
997
|
-
onData(EOF_BUFFER2);
|
|
998
|
-
finalized = true;
|
|
999
|
-
if (onFinalize)
|
|
1000
|
-
onFinalize();
|
|
1001
|
-
} catch (error) {
|
|
1002
|
-
onError(error);
|
|
1003
|
-
}
|
|
1004
|
-
}
|
|
1005
|
-
};
|
|
1006
|
-
}
|
|
1007
|
-
var INITIAL_CAPACITY = 256;
|
|
1008
|
-
function createChunkQueue() {
|
|
1009
|
-
let chunks = new Array(INITIAL_CAPACITY);
|
|
1010
|
-
let capacityMask = chunks.length - 1;
|
|
1011
|
-
let head = 0;
|
|
1012
|
-
let tail = 0;
|
|
1013
|
-
let totalAvailable = 0;
|
|
1014
|
-
const consumeFromHead = (count) => {
|
|
1015
|
-
const chunk = chunks[head];
|
|
1016
|
-
if (count === chunk.length) {
|
|
1017
|
-
chunks[head] = EMPTY2;
|
|
1018
|
-
head = head + 1 & capacityMask;
|
|
1019
|
-
} else
|
|
1020
|
-
chunks[head] = chunk.subarray(count);
|
|
1021
|
-
totalAvailable -= count;
|
|
1022
|
-
if (totalAvailable === 0 && chunks.length > INITIAL_CAPACITY) {
|
|
1023
|
-
chunks = new Array(INITIAL_CAPACITY);
|
|
1024
|
-
capacityMask = INITIAL_CAPACITY - 1;
|
|
1025
|
-
head = 0;
|
|
1026
|
-
tail = 0;
|
|
1027
|
-
}
|
|
1028
|
-
};
|
|
1029
|
-
function pull(bytes, callback) {
|
|
1030
|
-
if (callback) {
|
|
1031
|
-
let fed = 0;
|
|
1032
|
-
let remaining$1 = Math.min(bytes, totalAvailable);
|
|
1033
|
-
while (remaining$1 > 0) {
|
|
1034
|
-
const chunk = chunks[head];
|
|
1035
|
-
const toFeed = Math.min(remaining$1, chunk.length);
|
|
1036
|
-
const segment = toFeed === chunk.length ? chunk : chunk.subarray(0, toFeed);
|
|
1037
|
-
consumeFromHead(toFeed);
|
|
1038
|
-
remaining$1 -= toFeed;
|
|
1039
|
-
fed += toFeed;
|
|
1040
|
-
if (!callback(segment))
|
|
1041
|
-
break;
|
|
1042
|
-
}
|
|
1043
|
-
return fed;
|
|
1044
|
-
}
|
|
1045
|
-
if (totalAvailable < bytes)
|
|
1046
|
-
return null;
|
|
1047
|
-
if (bytes === 0)
|
|
1048
|
-
return EMPTY2;
|
|
1049
|
-
const firstChunk = chunks[head];
|
|
1050
|
-
if (firstChunk.length >= bytes) {
|
|
1051
|
-
const view = firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);
|
|
1052
|
-
consumeFromHead(bytes);
|
|
1053
|
-
return view;
|
|
1054
|
-
}
|
|
1055
|
-
const result = new Uint8Array(bytes);
|
|
1056
|
-
let copied = 0;
|
|
1057
|
-
let remaining = bytes;
|
|
1058
|
-
while (remaining > 0) {
|
|
1059
|
-
const chunk = chunks[head];
|
|
1060
|
-
const toCopy = Math.min(remaining, chunk.length);
|
|
1061
|
-
result.set(toCopy === chunk.length ? chunk : chunk.subarray(0, toCopy), copied);
|
|
1062
|
-
copied += toCopy;
|
|
1063
|
-
remaining -= toCopy;
|
|
1064
|
-
consumeFromHead(toCopy);
|
|
1065
|
-
}
|
|
1066
|
-
return result;
|
|
1067
|
-
}
|
|
1068
|
-
return {
|
|
1069
|
-
push: (chunk) => {
|
|
1070
|
-
if (chunk.length === 0)
|
|
1071
|
-
return;
|
|
1072
|
-
let nextTail = tail + 1 & capacityMask;
|
|
1073
|
-
if (nextTail === head) {
|
|
1074
|
-
const oldLen = chunks.length;
|
|
1075
|
-
const newLen = oldLen * 2;
|
|
1076
|
-
const newChunks = new Array(newLen);
|
|
1077
|
-
const count = tail - head + oldLen & oldLen - 1;
|
|
1078
|
-
if (head < tail)
|
|
1079
|
-
for (let i = 0;i < count; i++)
|
|
1080
|
-
newChunks[i] = chunks[head + i];
|
|
1081
|
-
else if (count > 0) {
|
|
1082
|
-
const firstPart = oldLen - head;
|
|
1083
|
-
for (let i = 0;i < firstPart; i++)
|
|
1084
|
-
newChunks[i] = chunks[head + i];
|
|
1085
|
-
for (let i = 0;i < tail; i++)
|
|
1086
|
-
newChunks[firstPart + i] = chunks[i];
|
|
1087
|
-
}
|
|
1088
|
-
chunks = newChunks;
|
|
1089
|
-
capacityMask = newLen - 1;
|
|
1090
|
-
head = 0;
|
|
1091
|
-
tail = count;
|
|
1092
|
-
nextTail = tail + 1 & capacityMask;
|
|
1093
|
-
}
|
|
1094
|
-
chunks[tail] = chunk;
|
|
1095
|
-
tail = nextTail;
|
|
1096
|
-
totalAvailable += chunk.length;
|
|
1097
|
-
},
|
|
1098
|
-
available: () => totalAvailable,
|
|
1099
|
-
peek: (bytes) => {
|
|
1100
|
-
if (totalAvailable < bytes)
|
|
1101
|
-
return null;
|
|
1102
|
-
if (bytes === 0)
|
|
1103
|
-
return EMPTY2;
|
|
1104
|
-
const firstChunk = chunks[head];
|
|
1105
|
-
if (firstChunk.length >= bytes)
|
|
1106
|
-
return firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);
|
|
1107
|
-
const result = new Uint8Array(bytes);
|
|
1108
|
-
let copied = 0;
|
|
1109
|
-
let index = head;
|
|
1110
|
-
while (copied < bytes) {
|
|
1111
|
-
const chunk = chunks[index];
|
|
1112
|
-
const toCopy = Math.min(bytes - copied, chunk.length);
|
|
1113
|
-
if (toCopy === chunk.length)
|
|
1114
|
-
result.set(chunk, copied);
|
|
1115
|
-
else
|
|
1116
|
-
result.set(chunk.subarray(0, toCopy), copied);
|
|
1117
|
-
copied += toCopy;
|
|
1118
|
-
index = index + 1 & capacityMask;
|
|
1119
|
-
}
|
|
1120
|
-
return result;
|
|
1121
|
-
},
|
|
1122
|
-
discard: (bytes) => {
|
|
1123
|
-
if (bytes > totalAvailable)
|
|
1124
|
-
throw new Error("Too many bytes consumed");
|
|
1125
|
-
if (bytes === 0)
|
|
1126
|
-
return;
|
|
1127
|
-
let remaining = bytes;
|
|
1128
|
-
while (remaining > 0) {
|
|
1129
|
-
const chunk = chunks[head];
|
|
1130
|
-
const toConsume = Math.min(remaining, chunk.length);
|
|
1131
|
-
consumeFromHead(toConsume);
|
|
1132
|
-
remaining -= toConsume;
|
|
1133
|
-
}
|
|
1134
|
-
},
|
|
1135
|
-
pull
|
|
1136
|
-
};
|
|
1137
|
-
}
|
|
1138
|
-
var STATE_HEADER = 0;
|
|
1139
|
-
var STATE_BODY = 1;
|
|
1140
|
-
var truncateErr = /* @__PURE__ */ new Error("Tar archive is truncated.");
|
|
1141
|
-
function createUnpacker(options = {}) {
|
|
1142
|
-
const strict = options.strict ?? false;
|
|
1143
|
-
const { available, peek, push, discard, pull } = createChunkQueue();
|
|
1144
|
-
let state = STATE_HEADER;
|
|
1145
|
-
let ended = false;
|
|
1146
|
-
let done = false;
|
|
1147
|
-
let eof = false;
|
|
1148
|
-
let currentEntry = null;
|
|
1149
|
-
const paxGlobals = {};
|
|
1150
|
-
let nextEntryOverrides = {};
|
|
1151
|
-
const unpacker = {
|
|
1152
|
-
isEntryActive: () => state === STATE_BODY,
|
|
1153
|
-
isBodyComplete: () => !currentEntry || currentEntry.remaining === 0,
|
|
1154
|
-
write(chunk) {
|
|
1155
|
-
if (ended)
|
|
1156
|
-
throw new Error("Archive already ended.");
|
|
1157
|
-
push(chunk);
|
|
1158
|
-
},
|
|
1159
|
-
end() {
|
|
1160
|
-
ended = true;
|
|
1161
|
-
},
|
|
1162
|
-
readHeader() {
|
|
1163
|
-
if (state !== STATE_HEADER)
|
|
1164
|
-
throw new Error("Cannot read header while an entry is active");
|
|
1165
|
-
if (done)
|
|
1166
|
-
return;
|
|
1167
|
-
while (!done) {
|
|
1168
|
-
if (available() < BLOCK_SIZE2) {
|
|
1169
|
-
if (ended) {
|
|
1170
|
-
if (available() > 0 && strict)
|
|
1171
|
-
throw truncateErr;
|
|
1172
|
-
done = true;
|
|
1173
|
-
return;
|
|
1174
|
-
}
|
|
1175
|
-
return null;
|
|
1176
|
-
}
|
|
1177
|
-
const headerBlock = peek(BLOCK_SIZE2);
|
|
1178
|
-
if (isZeroBlock(headerBlock)) {
|
|
1179
|
-
if (available() < BLOCK_SIZE2 * 2) {
|
|
1180
|
-
if (ended) {
|
|
1181
|
-
if (strict)
|
|
1182
|
-
throw truncateErr;
|
|
1183
|
-
done = true;
|
|
1184
|
-
return;
|
|
1185
|
-
}
|
|
1186
|
-
return null;
|
|
1187
|
-
}
|
|
1188
|
-
if (isZeroBlock(peek(BLOCK_SIZE2 * 2).subarray(BLOCK_SIZE2))) {
|
|
1189
|
-
discard(BLOCK_SIZE2 * 2);
|
|
1190
|
-
done = true;
|
|
1191
|
-
eof = true;
|
|
1192
|
-
return;
|
|
1193
|
-
}
|
|
1194
|
-
if (strict)
|
|
1195
|
-
throw new Error("Invalid tar header.");
|
|
1196
|
-
discard(BLOCK_SIZE2);
|
|
1197
|
-
continue;
|
|
1198
|
-
}
|
|
1199
|
-
let internalHeader;
|
|
1200
|
-
try {
|
|
1201
|
-
internalHeader = parseUstarHeader(headerBlock, strict);
|
|
1202
|
-
} catch (err) {
|
|
1203
|
-
if (strict)
|
|
1204
|
-
throw err;
|
|
1205
|
-
discard(BLOCK_SIZE2);
|
|
1206
|
-
continue;
|
|
1207
|
-
}
|
|
1208
|
-
const metaParser = getMetaParser(internalHeader.type);
|
|
1209
|
-
if (metaParser) {
|
|
1210
|
-
const paddedSize = internalHeader.size + BLOCK_SIZE_MASK & ~BLOCK_SIZE_MASK;
|
|
1211
|
-
if (available() < BLOCK_SIZE2 + paddedSize) {
|
|
1212
|
-
if (ended && strict)
|
|
1213
|
-
throw truncateErr;
|
|
1214
|
-
return null;
|
|
1215
|
-
}
|
|
1216
|
-
discard(BLOCK_SIZE2);
|
|
1217
|
-
const overrides = metaParser(pull(paddedSize).subarray(0, internalHeader.size));
|
|
1218
|
-
const target = internalHeader.type === "pax-global-header" ? paxGlobals : nextEntryOverrides;
|
|
1219
|
-
for (const key in overrides)
|
|
1220
|
-
target[key] = overrides[key];
|
|
1221
|
-
continue;
|
|
1222
|
-
}
|
|
1223
|
-
discard(BLOCK_SIZE2);
|
|
1224
|
-
const header = internalHeader;
|
|
1225
|
-
if (internalHeader.prefix)
|
|
1226
|
-
header.name = `${internalHeader.prefix}/${header.name}`;
|
|
1227
|
-
applyOverrides(header, paxGlobals);
|
|
1228
|
-
applyOverrides(header, nextEntryOverrides);
|
|
1229
|
-
nextEntryOverrides = {};
|
|
1230
|
-
currentEntry = {
|
|
1231
|
-
header,
|
|
1232
|
-
remaining: header.size,
|
|
1233
|
-
padding: -header.size & BLOCK_SIZE_MASK
|
|
1234
|
-
};
|
|
1235
|
-
state = STATE_BODY;
|
|
1236
|
-
return header;
|
|
1237
|
-
}
|
|
1238
|
-
},
|
|
1239
|
-
streamBody(callback) {
|
|
1240
|
-
if (state !== STATE_BODY || !currentEntry || currentEntry.remaining === 0)
|
|
1241
|
-
return 0;
|
|
1242
|
-
const bytesToFeed = Math.min(currentEntry.remaining, available());
|
|
1243
|
-
if (bytesToFeed === 0)
|
|
1244
|
-
return 0;
|
|
1245
|
-
const fed = pull(bytesToFeed, callback);
|
|
1246
|
-
currentEntry.remaining -= fed;
|
|
1247
|
-
return fed;
|
|
1248
|
-
},
|
|
1249
|
-
skipPadding() {
|
|
1250
|
-
if (state !== STATE_BODY || !currentEntry)
|
|
1251
|
-
return true;
|
|
1252
|
-
if (currentEntry.remaining > 0)
|
|
1253
|
-
throw new Error("Body not fully consumed");
|
|
1254
|
-
if (available() < currentEntry.padding)
|
|
1255
|
-
return false;
|
|
1256
|
-
discard(currentEntry.padding);
|
|
1257
|
-
currentEntry = null;
|
|
1258
|
-
state = STATE_HEADER;
|
|
1259
|
-
return true;
|
|
1260
|
-
},
|
|
1261
|
-
skipEntry() {
|
|
1262
|
-
if (state !== STATE_BODY || !currentEntry)
|
|
1263
|
-
return true;
|
|
1264
|
-
const toDiscard = Math.min(currentEntry.remaining, available());
|
|
1265
|
-
if (toDiscard > 0) {
|
|
1266
|
-
discard(toDiscard);
|
|
1267
|
-
currentEntry.remaining -= toDiscard;
|
|
1268
|
-
}
|
|
1269
|
-
if (currentEntry.remaining > 0)
|
|
1270
|
-
return false;
|
|
1271
|
-
return unpacker.skipPadding();
|
|
1272
|
-
},
|
|
1273
|
-
validateEOF() {
|
|
1274
|
-
if (strict) {
|
|
1275
|
-
if (!eof)
|
|
1276
|
-
throw truncateErr;
|
|
1277
|
-
if (available() > 0) {
|
|
1278
|
-
if (pull(available()).some((byte) => byte !== 0))
|
|
1279
|
-
throw new Error("Invalid EOF.");
|
|
1280
|
-
}
|
|
1281
|
-
}
|
|
1282
|
-
}
|
|
1283
|
-
};
|
|
1284
|
-
return unpacker;
|
|
1285
|
-
}
|
|
1286
|
-
function isZeroBlock(block) {
|
|
1287
|
-
if (block.byteOffset % 8 === 0) {
|
|
1288
|
-
const view = new BigUint64Array(block.buffer, block.byteOffset, block.length / 8);
|
|
1289
|
-
for (let i = 0;i < view.length; i++)
|
|
1290
|
-
if (view[i] !== 0n)
|
|
1291
|
-
return false;
|
|
1292
|
-
return true;
|
|
1293
|
-
}
|
|
1294
|
-
for (let i = 0;i < block.length; i++)
|
|
1295
|
-
if (block[i] !== 0)
|
|
1296
|
-
return false;
|
|
1297
|
-
return true;
|
|
1298
|
-
}
|
|
1299
|
-
function createTarPacker2() {
|
|
1300
|
-
let streamController;
|
|
1301
|
-
let packer;
|
|
1302
|
-
return {
|
|
1303
|
-
readable: new ReadableStream({ start(controller) {
|
|
1304
|
-
streamController = controller;
|
|
1305
|
-
packer = createTarPacker(controller.enqueue.bind(controller), controller.error.bind(controller), controller.close.bind(controller));
|
|
1306
|
-
} }),
|
|
1307
|
-
controller: {
|
|
1308
|
-
add(header) {
|
|
1309
|
-
const bodyless = isBodyless(header);
|
|
1310
|
-
const h = { ...header };
|
|
1311
|
-
if (bodyless)
|
|
1312
|
-
h.size = 0;
|
|
1313
|
-
packer.add(h);
|
|
1314
|
-
if (bodyless)
|
|
1315
|
-
packer.endEntry();
|
|
1316
|
-
return new WritableStream({
|
|
1317
|
-
write(chunk) {
|
|
1318
|
-
packer.write(chunk);
|
|
1319
|
-
},
|
|
1320
|
-
close() {
|
|
1321
|
-
if (!bodyless)
|
|
1322
|
-
packer.endEntry();
|
|
1323
|
-
},
|
|
1324
|
-
abort(reason) {
|
|
1325
|
-
streamController.error(reason);
|
|
1326
|
-
}
|
|
1327
|
-
});
|
|
1328
|
-
},
|
|
1329
|
-
finalize() {
|
|
1330
|
-
packer.finalize();
|
|
1331
|
-
},
|
|
1332
|
-
error(err) {
|
|
1333
|
-
streamController.error(err);
|
|
1334
|
-
}
|
|
1335
|
-
}
|
|
1336
|
-
};
|
|
1337
|
-
}
|
|
1338
|
-
async function streamToBuffer(stream) {
|
|
1339
|
-
const chunks = [];
|
|
1340
|
-
const reader = stream.getReader();
|
|
1341
|
-
let totalLength = 0;
|
|
1342
|
-
try {
|
|
1343
|
-
while (true) {
|
|
1344
|
-
const { done, value } = await reader.read();
|
|
1345
|
-
if (done)
|
|
1346
|
-
break;
|
|
1347
|
-
chunks.push(value);
|
|
1348
|
-
totalLength += value.length;
|
|
1349
|
-
}
|
|
1350
|
-
const result = new Uint8Array(totalLength);
|
|
1351
|
-
let offset = 0;
|
|
1352
|
-
for (const chunk of chunks) {
|
|
1353
|
-
result.set(chunk, offset);
|
|
1354
|
-
offset += chunk.length;
|
|
1355
|
-
}
|
|
1356
|
-
return result;
|
|
1357
|
-
} finally {
|
|
1358
|
-
reader.releaseLock();
|
|
1359
|
-
}
|
|
1360
|
-
}
|
|
1361
|
-
var drain = (stream) => stream.pipeTo(new WritableStream);
|
|
1362
|
-
function createTarDecoder(options = {}) {
|
|
1363
|
-
const unpacker = createUnpacker(options);
|
|
1364
|
-
let bodyController = null;
|
|
1365
|
-
let pumping = false;
|
|
1366
|
-
const pump = (controller) => {
|
|
1367
|
-
if (pumping)
|
|
1368
|
-
return;
|
|
1369
|
-
pumping = true;
|
|
1370
|
-
try {
|
|
1371
|
-
while (true)
|
|
1372
|
-
if (unpacker.isEntryActive()) {
|
|
1373
|
-
if (bodyController) {
|
|
1374
|
-
if (unpacker.streamBody((c) => (bodyController.enqueue(c), true)) === 0 && !unpacker.isBodyComplete())
|
|
1375
|
-
break;
|
|
1376
|
-
} else if (!unpacker.skipEntry())
|
|
1377
|
-
break;
|
|
1378
|
-
if (unpacker.isBodyComplete()) {
|
|
1379
|
-
try {
|
|
1380
|
-
bodyController?.close();
|
|
1381
|
-
} catch {}
|
|
1382
|
-
bodyController = null;
|
|
1383
|
-
if (!unpacker.skipPadding())
|
|
1384
|
-
break;
|
|
1385
|
-
}
|
|
1386
|
-
} else {
|
|
1387
|
-
const header = unpacker.readHeader();
|
|
1388
|
-
if (header === null || header === undefined)
|
|
1389
|
-
break;
|
|
1390
|
-
controller.enqueue({
|
|
1391
|
-
header,
|
|
1392
|
-
body: new ReadableStream({
|
|
1393
|
-
start(c) {
|
|
1394
|
-
if (header.size === 0)
|
|
1395
|
-
c.close();
|
|
1396
|
-
else
|
|
1397
|
-
bodyController = c;
|
|
1398
|
-
},
|
|
1399
|
-
pull: () => pump(controller),
|
|
1400
|
-
cancel() {
|
|
1401
|
-
bodyController = null;
|
|
1402
|
-
pump(controller);
|
|
1403
|
-
}
|
|
1404
|
-
})
|
|
1405
|
-
});
|
|
1406
|
-
}
|
|
1407
|
-
} catch (error) {
|
|
1408
|
-
try {
|
|
1409
|
-
bodyController?.error(error);
|
|
1410
|
-
} catch {}
|
|
1411
|
-
bodyController = null;
|
|
1412
|
-
throw error;
|
|
1413
|
-
} finally {
|
|
1414
|
-
pumping = false;
|
|
1415
|
-
}
|
|
1416
|
-
};
|
|
1417
|
-
return new TransformStream({
|
|
1418
|
-
transform(chunk, controller) {
|
|
1419
|
-
try {
|
|
1420
|
-
unpacker.write(chunk);
|
|
1421
|
-
pump(controller);
|
|
1422
|
-
} catch (error) {
|
|
1423
|
-
try {
|
|
1424
|
-
bodyController?.error(error);
|
|
1425
|
-
} catch {}
|
|
1426
|
-
throw error;
|
|
1427
|
-
}
|
|
1428
|
-
},
|
|
1429
|
-
flush(controller) {
|
|
1430
|
-
try {
|
|
1431
|
-
unpacker.end();
|
|
1432
|
-
pump(controller);
|
|
1433
|
-
unpacker.validateEOF();
|
|
1434
|
-
if (unpacker.isEntryActive() && !unpacker.isBodyComplete())
|
|
1435
|
-
try {
|
|
1436
|
-
bodyController?.close();
|
|
1437
|
-
} catch {}
|
|
1438
|
-
} catch (error) {
|
|
1439
|
-
try {
|
|
1440
|
-
bodyController?.error(error);
|
|
1441
|
-
} catch {}
|
|
1442
|
-
throw error;
|
|
1443
|
-
}
|
|
1444
|
-
}
|
|
1445
|
-
}, undefined, { highWaterMark: 1 });
|
|
1446
|
-
}
|
|
1447
|
-
async function packTar(entries) {
|
|
1448
|
-
const { readable, controller } = createTarPacker2();
|
|
1449
|
-
await (async () => {
|
|
1450
|
-
for (const entry of entries) {
|
|
1451
|
-
const entryStream = controller.add(entry.header);
|
|
1452
|
-
const body = "body" in entry ? entry.body : entry.data;
|
|
1453
|
-
if (!body) {
|
|
1454
|
-
await entryStream.close();
|
|
1455
|
-
continue;
|
|
1456
|
-
}
|
|
1457
|
-
if (body instanceof ReadableStream)
|
|
1458
|
-
await body.pipeTo(entryStream);
|
|
1459
|
-
else if (body instanceof Blob)
|
|
1460
|
-
await body.stream().pipeTo(entryStream);
|
|
1461
|
-
else
|
|
1462
|
-
try {
|
|
1463
|
-
const chunk = await normalizeBody(body);
|
|
1464
|
-
if (chunk.length > 0) {
|
|
1465
|
-
const writer = entryStream.getWriter();
|
|
1466
|
-
await writer.write(chunk);
|
|
1467
|
-
await writer.close();
|
|
1468
|
-
} else
|
|
1469
|
-
await entryStream.close();
|
|
1470
|
-
} catch {
|
|
1471
|
-
throw new TypeError(`Unsupported content type for entry "${entry.header.name}".`);
|
|
1472
|
-
}
|
|
1473
|
-
}
|
|
1474
|
-
})().then(() => controller.finalize()).catch((err) => controller.error(err));
|
|
1475
|
-
return new Uint8Array(await streamToBuffer(readable));
|
|
1476
|
-
}
|
|
1477
|
-
async function unpackTar(archive, options = {}) {
|
|
1478
|
-
const sourceStream = archive instanceof ReadableStream ? archive : new ReadableStream({ start(controller) {
|
|
1479
|
-
controller.enqueue(archive instanceof Uint8Array ? archive : new Uint8Array(archive));
|
|
1480
|
-
controller.close();
|
|
1481
|
-
} });
|
|
1482
|
-
const results = [];
|
|
1483
|
-
const entryStream = sourceStream.pipeThrough(createTarDecoder(options));
|
|
1484
|
-
for await (const entry of entryStream) {
|
|
1485
|
-
let processedHeader;
|
|
1486
|
-
try {
|
|
1487
|
-
processedHeader = transformHeader(entry.header, options);
|
|
1488
|
-
} catch (error) {
|
|
1489
|
-
await entry.body.cancel();
|
|
1490
|
-
throw error;
|
|
1491
|
-
}
|
|
1492
|
-
if (processedHeader === null) {
|
|
1493
|
-
await drain(entry.body);
|
|
1494
|
-
continue;
|
|
1495
|
-
}
|
|
1496
|
-
if (isBodyless(processedHeader)) {
|
|
1497
|
-
await drain(entry.body);
|
|
1498
|
-
results.push({ header: processedHeader });
|
|
1499
|
-
} else
|
|
1500
|
-
results.push({
|
|
1501
|
-
header: processedHeader,
|
|
1502
|
-
data: await streamToBuffer(entry.body)
|
|
1503
|
-
});
|
|
1504
|
-
}
|
|
1505
|
-
return results;
|
|
1506
|
-
}
|
|
1507
|
-
var gzipAsync2 = promisify2(gzip2);
|
|
1508
|
-
var gunzipAsync2 = promisify2(gunzip2);
|
|
1509
|
-
|
|
1510
|
-
class RXCImpl {
|
|
1511
|
-
_buffer;
|
|
1512
|
-
_filesCache = null;
|
|
1513
|
-
constructor(buffer) {
|
|
1514
|
-
this._buffer = buffer;
|
|
1515
|
-
}
|
|
1516
|
-
get stream() {
|
|
1517
|
-
const buffer = this._buffer;
|
|
1518
|
-
return new ReadableStream({
|
|
1519
|
-
start(controller) {
|
|
1520
|
-
controller.enqueue(new Uint8Array(buffer));
|
|
1521
|
-
controller.close();
|
|
1522
|
-
}
|
|
1523
|
-
});
|
|
1524
|
-
}
|
|
1525
|
-
async buffer() {
|
|
1526
|
-
return this._buffer;
|
|
1527
|
-
}
|
|
1528
|
-
async file(path) {
|
|
1529
|
-
const filesMap = await this.files();
|
|
1530
|
-
const content = filesMap.get(path);
|
|
1531
|
-
if (!content) {
|
|
1532
|
-
throw new ContentError(`file not found: ${path}`);
|
|
1533
|
-
}
|
|
1534
|
-
return content;
|
|
1535
|
-
}
|
|
1536
|
-
async files() {
|
|
1537
|
-
if (this._filesCache) {
|
|
1538
|
-
return this._filesCache;
|
|
1539
|
-
}
|
|
1540
|
-
const tarBuffer = await gunzipAsync2(this._buffer);
|
|
1541
|
-
const entries = await unpackTar(tarBuffer);
|
|
1542
|
-
const filesMap = new Map;
|
|
1543
|
-
for (const entry of entries) {
|
|
1544
|
-
if ((entry.header.type === "file" || entry.header.type === undefined) && entry.data) {
|
|
1545
|
-
filesMap.set(entry.header.name, Buffer.from(entry.data));
|
|
1546
|
-
}
|
|
1547
|
-
}
|
|
1548
|
-
this._filesCache = filesMap;
|
|
1549
|
-
return filesMap;
|
|
1550
|
-
}
|
|
1551
|
-
}
|
|
1552
|
-
function isArchiveInput(input) {
|
|
1553
|
-
return "archive" in input && Buffer.isBuffer(input.archive);
|
|
1554
|
-
}
|
|
1555
|
-
async function createRXC(input) {
|
|
1556
|
-
if (isArchiveInput(input)) {
|
|
1557
|
-
return new RXCImpl(input.archive);
|
|
1558
|
-
}
|
|
1559
|
-
const entries = Object.entries(input).map(([name, content]) => {
|
|
1560
|
-
const body = typeof content === "string" ? content : content instanceof Uint8Array ? content : new Uint8Array(content);
|
|
1561
|
-
const size = typeof content === "string" ? Buffer.byteLength(content) : content.length;
|
|
1562
|
-
return {
|
|
1563
|
-
header: { name, size, type: "file" },
|
|
1564
|
-
body
|
|
1565
|
-
};
|
|
1566
|
-
});
|
|
1567
|
-
const tarBuffer = await packTar(entries);
|
|
1568
|
-
const gzipBuffer = await gzipAsync2(Buffer.from(tarBuffer));
|
|
1569
|
-
return new RXCImpl(gzipBuffer);
|
|
1570
|
-
}
|
|
1571
|
-
|
|
1572
|
-
class ResourceTypeError extends ResourceXError2 {
|
|
1573
|
-
constructor(message) {
|
|
1574
|
-
super(message);
|
|
1575
|
-
this.name = "ResourceTypeError";
|
|
1576
|
-
}
|
|
1577
|
-
}
|
|
1578
|
-
var textSerializer = {
|
|
1579
|
-
async serialize(rxr) {
|
|
1580
|
-
return rxr.content.buffer();
|
|
1581
|
-
},
|
|
1582
|
-
async deserialize(data, manifest) {
|
|
1583
|
-
return {
|
|
1584
|
-
locator: parseRXL2(manifest.toLocator()),
|
|
1585
|
-
manifest,
|
|
1586
|
-
content: await createRXC({ archive: data })
|
|
1587
|
-
};
|
|
1588
|
-
}
|
|
1589
|
-
};
|
|
1590
|
-
var textResolver = {
|
|
1591
|
-
schema: undefined,
|
|
1592
|
-
async resolve(rxr) {
|
|
1593
|
-
return {
|
|
1594
|
-
resource: rxr,
|
|
1595
|
-
schema: undefined,
|
|
1596
|
-
execute: async () => {
|
|
1597
|
-
const buffer = await rxr.content.file("content");
|
|
1598
|
-
return buffer.toString("utf-8");
|
|
1599
|
-
}
|
|
1600
|
-
};
|
|
1601
|
-
}
|
|
1602
|
-
};
|
|
1603
|
-
var textType = {
|
|
1604
|
-
name: "text",
|
|
1605
|
-
aliases: ["txt", "plaintext"],
|
|
1606
|
-
description: "Plain text content",
|
|
1607
|
-
serializer: textSerializer,
|
|
1608
|
-
resolver: textResolver
|
|
1609
|
-
};
|
|
1610
|
-
var jsonSerializer = {
|
|
1611
|
-
async serialize(rxr) {
|
|
1612
|
-
return rxr.content.buffer();
|
|
1613
|
-
},
|
|
1614
|
-
async deserialize(data, manifest) {
|
|
1615
|
-
return {
|
|
1616
|
-
locator: parseRXL2(manifest.toLocator()),
|
|
1617
|
-
manifest,
|
|
1618
|
-
content: await createRXC({ archive: data })
|
|
1619
|
-
};
|
|
1620
|
-
}
|
|
1621
|
-
};
|
|
1622
|
-
var jsonResolver = {
|
|
1623
|
-
schema: undefined,
|
|
1624
|
-
async resolve(rxr) {
|
|
1625
|
-
return {
|
|
1626
|
-
resource: rxr,
|
|
1627
|
-
schema: undefined,
|
|
1628
|
-
execute: async () => {
|
|
1629
|
-
const buffer = await rxr.content.file("content");
|
|
1630
|
-
return JSON.parse(buffer.toString("utf-8"));
|
|
1631
|
-
}
|
|
1632
|
-
};
|
|
1633
|
-
}
|
|
1634
|
-
};
|
|
1635
|
-
var jsonType = {
|
|
1636
|
-
name: "json",
|
|
1637
|
-
aliases: ["config", "manifest"],
|
|
1638
|
-
description: "JSON content",
|
|
1639
|
-
serializer: jsonSerializer,
|
|
1640
|
-
resolver: jsonResolver
|
|
1641
|
-
};
|
|
1642
|
-
var binarySerializer = {
|
|
1643
|
-
async serialize(rxr) {
|
|
1644
|
-
return rxr.content.buffer();
|
|
1645
|
-
},
|
|
1646
|
-
async deserialize(data, manifest) {
|
|
1647
|
-
return {
|
|
1648
|
-
locator: parseRXL2(manifest.toLocator()),
|
|
1649
|
-
manifest,
|
|
1650
|
-
content: await createRXC({ archive: data })
|
|
1651
|
-
};
|
|
1652
|
-
}
|
|
1653
|
-
};
|
|
1654
|
-
var binaryResolver = {
|
|
1655
|
-
schema: undefined,
|
|
1656
|
-
async resolve(rxr) {
|
|
1657
|
-
return {
|
|
1658
|
-
resource: rxr,
|
|
1659
|
-
schema: undefined,
|
|
1660
|
-
execute: async () => {
|
|
1661
|
-
return rxr.content.file("content");
|
|
1662
|
-
}
|
|
1663
|
-
};
|
|
1664
|
-
}
|
|
1665
|
-
};
|
|
1666
|
-
var binaryType = {
|
|
1667
|
-
name: "binary",
|
|
1668
|
-
aliases: ["bin", "blob", "raw"],
|
|
1669
|
-
description: "Binary content",
|
|
1670
|
-
serializer: binarySerializer,
|
|
1671
|
-
resolver: binaryResolver
|
|
1672
|
-
};
|
|
1673
|
-
var builtinTypes = [textType, jsonType, binaryType];
|
|
1674
|
-
|
|
1675
|
-
class TypeHandlerChain {
|
|
1676
|
-
handlers = new Map;
|
|
1677
|
-
constructor() {
|
|
1678
|
-
for (const type of builtinTypes) {
|
|
1679
|
-
this.registerBuiltin(type);
|
|
1680
|
-
}
|
|
1681
|
-
}
|
|
1682
|
-
static create() {
|
|
1683
|
-
return new TypeHandlerChain;
|
|
1684
|
-
}
|
|
1685
|
-
registerBuiltin(type) {
|
|
1686
|
-
this.handlers.set(type.name, type);
|
|
1687
|
-
if (type.aliases) {
|
|
1688
|
-
for (const alias of type.aliases) {
|
|
1689
|
-
this.handlers.set(alias, type);
|
|
1690
|
-
}
|
|
1691
|
-
}
|
|
1692
|
-
}
|
|
1693
|
-
register(type) {
|
|
1694
|
-
if (this.handlers.has(type.name)) {
|
|
1695
|
-
throw new ResourceTypeError(`Type '${type.name}' is already registered`);
|
|
1696
|
-
}
|
|
1697
|
-
this.handlers.set(type.name, type);
|
|
1698
|
-
if (type.aliases) {
|
|
1699
|
-
for (const alias of type.aliases) {
|
|
1700
|
-
if (this.handlers.has(alias)) {
|
|
1701
|
-
throw new ResourceTypeError(`Alias '${alias}' conflicts with existing type or alias`);
|
|
1702
|
-
}
|
|
1703
|
-
this.handlers.set(alias, type);
|
|
1704
|
-
}
|
|
1705
|
-
}
|
|
1706
|
-
}
|
|
1707
|
-
canHandle(typeName) {
|
|
1708
|
-
return this.handlers.has(typeName);
|
|
1709
|
-
}
|
|
1710
|
-
getHandler(typeName) {
|
|
1711
|
-
return this.handlers.get(typeName);
|
|
1712
|
-
}
|
|
1713
|
-
getSupportedTypes() {
|
|
1714
|
-
return Array.from(this.handlers.keys());
|
|
1715
|
-
}
|
|
1716
|
-
async serialize(rxr) {
|
|
1717
|
-
const typeName = rxr.manifest.type;
|
|
1718
|
-
const handler = this.handlers.get(typeName);
|
|
1719
|
-
if (!handler) {
|
|
1720
|
-
throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);
|
|
1721
|
-
}
|
|
1722
|
-
return handler.serializer.serialize(rxr);
|
|
1723
|
-
}
|
|
1724
|
-
async deserialize(data, manifest) {
|
|
1725
|
-
const typeName = manifest.type;
|
|
1726
|
-
const handler = this.handlers.get(typeName);
|
|
1727
|
-
if (!handler) {
|
|
1728
|
-
throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);
|
|
1729
|
-
}
|
|
1730
|
-
return handler.serializer.deserialize(data, manifest);
|
|
1731
|
-
}
|
|
1732
|
-
async resolve(rxr) {
|
|
1733
|
-
const typeName = rxr.manifest.type;
|
|
1734
|
-
const handler = this.handlers.get(typeName);
|
|
1735
|
-
if (!handler) {
|
|
1736
|
-
throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);
|
|
1737
|
-
}
|
|
1738
|
-
return handler.resolver.resolve(rxr);
|
|
1739
|
-
}
|
|
1740
|
-
clearExtensions() {
|
|
1741
|
-
this.handlers.clear();
|
|
1742
|
-
for (const type of builtinTypes) {
|
|
1743
|
-
this.registerBuiltin(type);
|
|
1744
|
-
}
|
|
1745
|
-
}
|
|
1746
|
-
}
|
|
1747
|
-
var DEFAULT_PATH = `${homedir()}/.resourcex`;
|
|
1748
|
-
|
|
1749
|
-
class LocalRegistry {
|
|
1750
|
-
basePath;
|
|
1751
|
-
typeHandler;
|
|
1752
|
-
constructor(config) {
|
|
1753
|
-
this.basePath = config?.path ?? DEFAULT_PATH;
|
|
1754
|
-
this.typeHandler = TypeHandlerChain.create();
|
|
1755
|
-
if (config?.types) {
|
|
1756
|
-
for (const type of config.types) {
|
|
1757
|
-
this.typeHandler.register(type);
|
|
1758
|
-
}
|
|
1759
|
-
}
|
|
1760
|
-
}
|
|
1761
|
-
supportType(type) {
|
|
1762
|
-
this.typeHandler.register(type);
|
|
1763
|
-
}
|
|
1764
|
-
buildPath(locator, area) {
|
|
1765
|
-
const rxl = typeof locator === "string" ? parseRXL(locator) : locator;
|
|
1766
|
-
const resourceName = rxl.type ? `${rxl.name}.${rxl.type}` : rxl.name;
|
|
1767
|
-
const version = rxl.version ?? "latest";
|
|
1768
|
-
if (area === "local") {
|
|
1769
|
-
return join2(this.basePath, "local", resourceName, version);
|
|
1770
|
-
} else {
|
|
1771
|
-
const domain = rxl.domain ?? "localhost";
|
|
1772
|
-
let path = join2(this.basePath, "cache", domain);
|
|
1773
|
-
if (rxl.path) {
|
|
1774
|
-
path = join2(path, rxl.path);
|
|
1775
|
-
}
|
|
1776
|
-
return join2(path, resourceName, version);
|
|
1777
|
-
}
|
|
1778
|
-
}
|
|
1779
|
-
isLocalOnlyLocator(locator) {
|
|
1780
|
-
const rxl = typeof locator === "string" ? parseRXL(locator) : locator;
|
|
1781
|
-
return !rxl.domain || rxl.domain === "localhost";
|
|
1782
|
-
}
|
|
1783
|
-
async existsAt(resourcePath) {
|
|
1784
|
-
const manifestPath = join2(resourcePath, "manifest.json");
|
|
1785
|
-
try {
|
|
1786
|
-
await stat2(manifestPath);
|
|
1787
|
-
return true;
|
|
1788
|
-
} catch {
|
|
1789
|
-
return false;
|
|
1790
|
-
}
|
|
1791
|
-
}
|
|
1792
|
-
async findArea(locator) {
|
|
1793
|
-
const localPath = this.buildPath(locator, "local");
|
|
1794
|
-
if (await this.existsAt(localPath)) {
|
|
1795
|
-
return "local";
|
|
1796
|
-
}
|
|
1797
|
-
const cachePath = this.buildPath(locator, "cache");
|
|
1798
|
-
if (await this.existsAt(cachePath)) {
|
|
1799
|
-
return "cache";
|
|
1800
|
-
}
|
|
1801
|
-
return null;
|
|
1802
|
-
}
|
|
1803
|
-
async loadFrom(resourcePath) {
|
|
1804
|
-
const manifestPath = join2(resourcePath, "manifest.json");
|
|
1805
|
-
const manifestContent = await readFile2(manifestPath, "utf-8");
|
|
1806
|
-
const manifestData = JSON.parse(manifestContent);
|
|
1807
|
-
const manifest = createRXM(manifestData);
|
|
1808
|
-
const contentPath = join2(resourcePath, "content.tar.gz");
|
|
1809
|
-
const data = await readFile2(contentPath);
|
|
1810
|
-
return this.typeHandler.deserialize(data, manifest);
|
|
1811
|
-
}
|
|
1812
|
-
async pull(_locator, _options) {
|
|
1813
|
-
throw new RegistryError("Pull not implemented yet - see issue #018");
|
|
1814
|
-
}
|
|
1815
|
-
async publish(_resource, _options) {
|
|
1816
|
-
throw new RegistryError("Publish not implemented yet - see issue #018");
|
|
1817
|
-
}
|
|
1818
|
-
async link(resource) {
|
|
1819
|
-
const locator = resource.manifest.toLocator();
|
|
1820
|
-
const resourcePath = this.buildPath(locator, "local");
|
|
1821
|
-
await mkdir2(resourcePath, { recursive: true });
|
|
1822
|
-
const manifestPath = join2(resourcePath, "manifest.json");
|
|
1823
|
-
await writeFile2(manifestPath, JSON.stringify(resource.manifest.toJSON(), null, 2), "utf-8");
|
|
1824
|
-
const contentPath = join2(resourcePath, "content.tar.gz");
|
|
1825
|
-
const serialized = await this.typeHandler.serialize(resource);
|
|
1826
|
-
await writeFile2(contentPath, serialized);
|
|
1827
|
-
}
|
|
1828
|
-
async get(locator) {
|
|
1829
|
-
const area = await this.findArea(locator);
|
|
1830
|
-
if (!area) {
|
|
1831
|
-
throw new RegistryError(`Resource not found: ${locator}`);
|
|
1832
|
-
}
|
|
1833
|
-
const resourcePath = this.buildPath(locator, area);
|
|
1834
|
-
return this.loadFrom(resourcePath);
|
|
1835
|
-
}
|
|
1836
|
-
async resolve(locator) {
|
|
1837
|
-
const rxr = await this.get(locator);
|
|
1838
|
-
return this.typeHandler.resolve(rxr);
|
|
1839
|
-
}
|
|
1840
|
-
async exists(locator) {
|
|
1841
|
-
const area = await this.findArea(locator);
|
|
1842
|
-
return area !== null;
|
|
1843
|
-
}
|
|
1844
|
-
async delete(locator) {
|
|
1845
|
-
const isLocal = this.isLocalOnlyLocator(locator);
|
|
1846
|
-
if (isLocal) {
|
|
1847
|
-
const localPath = this.buildPath(locator, "local");
|
|
1848
|
-
if (await this.existsAt(localPath)) {
|
|
1849
|
-
await rm2(localPath, { recursive: true, force: true });
|
|
1850
|
-
}
|
|
1851
|
-
} else {
|
|
1852
|
-
const cachePath = this.buildPath(locator, "cache");
|
|
1853
|
-
if (await this.existsAt(cachePath)) {
|
|
1854
|
-
await rm2(cachePath, { recursive: true, force: true });
|
|
1855
|
-
}
|
|
1856
|
-
}
|
|
1857
|
-
}
|
|
1858
|
-
async search(options) {
|
|
1859
|
-
const { query, limit, offset = 0 } = options ?? {};
|
|
1860
|
-
const locators = [];
|
|
1861
|
-
const localDir = join2(this.basePath, "local");
|
|
1862
|
-
try {
|
|
1863
|
-
const localEntries = await this.listRecursive(localDir);
|
|
1864
|
-
for (const entry of localEntries) {
|
|
1865
|
-
if (!entry.endsWith("manifest.json"))
|
|
1866
|
-
continue;
|
|
1867
|
-
const relativePath = entry.slice(localDir.length + 1);
|
|
1868
|
-
const rxl = this.parseLocalEntry(relativePath);
|
|
1869
|
-
if (rxl)
|
|
1870
|
-
locators.push(rxl);
|
|
1871
|
-
}
|
|
1872
|
-
} catch {}
|
|
1873
|
-
const cacheDir = join2(this.basePath, "cache");
|
|
1874
|
-
try {
|
|
1875
|
-
const cacheEntries = await this.listRecursive(cacheDir);
|
|
1876
|
-
for (const entry of cacheEntries) {
|
|
1877
|
-
if (!entry.endsWith("manifest.json"))
|
|
1878
|
-
continue;
|
|
1879
|
-
const relativePath = entry.slice(cacheDir.length + 1);
|
|
1880
|
-
const rxl = this.parseCacheEntry(relativePath);
|
|
1881
|
-
if (rxl)
|
|
1882
|
-
locators.push(rxl);
|
|
1883
|
-
}
|
|
1884
|
-
} catch {}
|
|
1885
|
-
let filtered = locators;
|
|
1886
|
-
if (query) {
|
|
1887
|
-
const lowerQuery = query.toLowerCase();
|
|
1888
|
-
filtered = locators.filter((rxl) => {
|
|
1889
|
-
const searchText = `${rxl.domain ?? ""} ${rxl.path ?? ""} ${rxl.name} ${rxl.type ?? ""}`.toLowerCase();
|
|
1890
|
-
return searchText.includes(lowerQuery);
|
|
1891
|
-
});
|
|
1892
|
-
}
|
|
1893
|
-
let result = filtered.slice(offset);
|
|
1894
|
-
if (limit !== undefined) {
|
|
1895
|
-
result = result.slice(0, limit);
|
|
1896
|
-
}
|
|
1897
|
-
return result;
|
|
1898
|
-
}
|
|
1899
|
-
async listRecursive(dir) {
|
|
1900
|
-
const results = [];
|
|
1901
|
-
try {
|
|
1902
|
-
const entries = await readdir2(dir, { withFileTypes: true });
|
|
1903
|
-
for (const entry of entries) {
|
|
1904
|
-
const fullPath = join2(dir, entry.name);
|
|
1905
|
-
if (entry.isDirectory()) {
|
|
1906
|
-
const subEntries = await this.listRecursive(fullPath);
|
|
1907
|
-
results.push(...subEntries);
|
|
1908
|
-
} else {
|
|
1909
|
-
results.push(fullPath);
|
|
1910
|
-
}
|
|
1911
|
-
}
|
|
1912
|
-
} catch {}
|
|
1913
|
-
return results;
|
|
1914
|
-
}
|
|
1915
|
-
parseLocalEntry(entry) {
|
|
1916
|
-
const dirPath = entry.replace(/[/\\]manifest\.json$/, "");
|
|
1917
|
-
const parts = dirPath.split(/[/\\]/);
|
|
1918
|
-
if (parts.length < 2) {
|
|
1919
|
-
return null;
|
|
1920
|
-
}
|
|
1921
|
-
const version = parts.pop();
|
|
1922
|
-
const nameTypePart = parts.shift();
|
|
1923
|
-
const { name, type } = this.parseNameType(nameTypePart);
|
|
1924
|
-
let locatorStr = name;
|
|
1925
|
-
if (type) {
|
|
1926
|
-
locatorStr += `.${type}`;
|
|
1927
|
-
}
|
|
1928
|
-
locatorStr += `@${version}`;
|
|
1929
|
-
try {
|
|
1930
|
-
return parseRXL(locatorStr);
|
|
1931
|
-
} catch {
|
|
1932
|
-
return null;
|
|
1933
|
-
}
|
|
1934
|
-
}
|
|
1935
|
-
parseCacheEntry(entry) {
|
|
1936
|
-
const dirPath = entry.replace(/[/\\]manifest\.json$/, "");
|
|
1937
|
-
const parts = dirPath.split(/[/\\]/);
|
|
1938
|
-
if (parts.length < 3) {
|
|
1939
|
-
return null;
|
|
1940
|
-
}
|
|
1941
|
-
const version = parts.pop();
|
|
1942
|
-
const nameTypePart = parts.pop();
|
|
1943
|
-
const domain = parts.shift();
|
|
1944
|
-
const path = parts.length > 0 ? parts.join("/") : undefined;
|
|
1945
|
-
const { name, type } = this.parseNameType(nameTypePart);
|
|
1946
|
-
let locatorStr = domain;
|
|
1947
|
-
if (path) {
|
|
1948
|
-
locatorStr += `/${path}`;
|
|
1949
|
-
}
|
|
1950
|
-
locatorStr += `/${name}`;
|
|
1951
|
-
if (type) {
|
|
1952
|
-
locatorStr += `.${type}`;
|
|
1953
|
-
}
|
|
1954
|
-
locatorStr += `@${version}`;
|
|
1955
|
-
try {
|
|
1956
|
-
return parseRXL(locatorStr);
|
|
1957
|
-
} catch {
|
|
1958
|
-
return null;
|
|
1959
|
-
}
|
|
1960
|
-
}
|
|
1961
|
-
parseNameType(nameTypePart) {
|
|
1962
|
-
const dotIndex = nameTypePart.lastIndexOf(".");
|
|
1963
|
-
if (dotIndex !== -1) {
|
|
1964
|
-
return {
|
|
1965
|
-
name: nameTypePart.substring(0, dotIndex),
|
|
1966
|
-
type: nameTypePart.substring(dotIndex + 1)
|
|
1967
|
-
};
|
|
1968
|
-
} else {
|
|
1969
|
-
return { name: nameTypePart, type: undefined };
|
|
1970
|
-
}
|
|
1971
|
-
}
|
|
1972
|
-
}
|
|
1973
|
-
|
|
1974
|
-
class RemoteRegistry {
|
|
1975
|
-
endpoint;
|
|
1976
|
-
typeHandler;
|
|
1977
|
-
constructor(config) {
|
|
1978
|
-
this.endpoint = config.endpoint.replace(/\/$/, "");
|
|
1979
|
-
this.typeHandler = TypeHandlerChain.create();
|
|
1980
|
-
}
|
|
1981
|
-
supportType(type) {
|
|
1982
|
-
this.typeHandler.register(type);
|
|
1983
|
-
}
|
|
1984
|
-
async pull(_locator, _options) {
|
|
1985
|
-
throw new RegistryError("Cannot pull to remote registry - use local registry for pulling");
|
|
1986
|
-
}
|
|
1987
|
-
async publish(_resource, _options) {
|
|
1988
|
-
throw new RegistryError("Remote registry publish not implemented yet");
|
|
1989
|
-
}
|
|
1990
|
-
async link(_resource) {
|
|
1991
|
-
throw new RegistryError("Cannot link to remote registry - use local registry for linking");
|
|
1992
|
-
}
|
|
1993
|
-
async get(locator) {
|
|
1994
|
-
const manifestUrl = `${this.endpoint}/resource?locator=${encodeURIComponent(locator)}`;
|
|
1995
|
-
const manifestResponse = await fetch(manifestUrl);
|
|
1996
|
-
if (!manifestResponse.ok) {
|
|
1997
|
-
if (manifestResponse.status === 404) {
|
|
1998
|
-
throw new RegistryError(`Resource not found: ${locator}`);
|
|
1999
|
-
}
|
|
2000
|
-
throw new RegistryError(`Failed to fetch resource: ${manifestResponse.statusText}`);
|
|
2001
|
-
}
|
|
2002
|
-
const manifestData = await manifestResponse.json();
|
|
2003
|
-
const manifest = createRXM(manifestData);
|
|
2004
|
-
const contentUrl = `${this.endpoint}/content?locator=${encodeURIComponent(locator)}`;
|
|
2005
|
-
const contentResponse = await fetch(contentUrl);
|
|
2006
|
-
if (!contentResponse.ok) {
|
|
2007
|
-
throw new RegistryError(`Failed to fetch content: ${contentResponse.statusText}`);
|
|
2008
|
-
}
|
|
2009
|
-
const contentBuffer = Buffer.from(await contentResponse.arrayBuffer());
|
|
2010
|
-
return this.typeHandler.deserialize(contentBuffer, manifest);
|
|
2011
|
-
}
|
|
2012
|
-
async resolve(locator) {
|
|
2013
|
-
const rxr = await this.get(locator);
|
|
2014
|
-
return this.typeHandler.resolve(rxr);
|
|
2015
|
-
}
|
|
2016
|
-
async exists(locator) {
|
|
2017
|
-
const url = `${this.endpoint}/exists?locator=${encodeURIComponent(locator)}`;
|
|
2018
|
-
const response = await fetch(url);
|
|
2019
|
-
if (!response.ok) {
|
|
2020
|
-
return false;
|
|
2021
|
-
}
|
|
2022
|
-
const data = await response.json();
|
|
2023
|
-
return data.exists === true;
|
|
2024
|
-
}
|
|
2025
|
-
async delete(_locator) {
|
|
2026
|
-
throw new RegistryError("Cannot delete from remote registry - use local registry for deletion");
|
|
2027
|
-
}
|
|
2028
|
-
async search(options) {
|
|
2029
|
-
const params = new URLSearchParams;
|
|
2030
|
-
if (options?.query)
|
|
2031
|
-
params.set("query", options.query);
|
|
2032
|
-
if (options?.limit !== undefined)
|
|
2033
|
-
params.set("limit", String(options.limit));
|
|
2034
|
-
if (options?.offset !== undefined)
|
|
2035
|
-
params.set("offset", String(options.offset));
|
|
2036
|
-
const url = `${this.endpoint}/search?${params.toString()}`;
|
|
2037
|
-
const response = await fetch(url);
|
|
2038
|
-
if (!response.ok) {
|
|
2039
|
-
throw new RegistryError(`Search failed: ${response.statusText}`);
|
|
2040
|
-
}
|
|
2041
|
-
const data = await response.json();
|
|
2042
|
-
return (data.results || []).map((locator) => parseRXL(locator));
|
|
2043
|
-
}
|
|
2044
|
-
}
|
|
2045
|
-
async function discoverRegistry(domain) {
|
|
2046
|
-
const wellKnownUrl = `https://${domain}/.well-known/resourcex`;
|
|
2047
|
-
try {
|
|
2048
|
-
const response = await fetch(wellKnownUrl);
|
|
2049
|
-
if (!response.ok) {
|
|
2050
|
-
throw new RegistryError(`Well-known discovery failed for ${domain}: ${response.statusText}`);
|
|
2051
|
-
}
|
|
2052
|
-
const data = await response.json();
|
|
2053
|
-
if (!data.registries || !Array.isArray(data.registries) || data.registries.length === 0) {
|
|
2054
|
-
throw new RegistryError(`Invalid well-known response for ${domain}: missing or empty registries`);
|
|
2055
|
-
}
|
|
2056
|
-
return {
|
|
2057
|
-
domain,
|
|
2058
|
-
registries: data.registries
|
|
2059
|
-
};
|
|
2060
|
-
} catch (error) {
|
|
2061
|
-
if (error instanceof RegistryError) {
|
|
2062
|
-
throw error;
|
|
2063
|
-
}
|
|
2064
|
-
throw new RegistryError(`Failed to discover registry for ${domain}: ${error.message}`);
|
|
2065
|
-
}
|
|
2066
|
-
}
|
|
2067
|
-
var DEFAULT_GIT_CACHE = `${homedir2()}/.resourcex/.git-cache`;
|
|
2068
|
-
|
|
2069
|
-
class GitRegistry {
|
|
2070
|
-
url;
|
|
2071
|
-
ref;
|
|
2072
|
-
basePath;
|
|
2073
|
-
cacheDir;
|
|
2074
|
-
typeHandler;
|
|
2075
|
-
trustedDomain;
|
|
2076
|
-
constructor(config) {
|
|
2077
|
-
this.url = config.url;
|
|
2078
|
-
this.ref = config.ref ?? "main";
|
|
2079
|
-
this.basePath = config.basePath ?? ".resourcex";
|
|
2080
|
-
this.typeHandler = TypeHandlerChain.create();
|
|
2081
|
-
this.trustedDomain = config.domain;
|
|
2082
|
-
if (this.isRemoteUrl(config.url) && !config.domain) {
|
|
2083
|
-
throw new RegistryError(`Remote git registry requires a trusted domain.
|
|
2084
|
-
|
|
2085
|
-
` + `Either:
|
|
2086
|
-
` + `1. Use discoverRegistry("your-domain.com") to auto-bind domain
|
|
2087
|
-
` + `2. Explicitly set domain: createRegistry({ type: "git", url: "...", domain: "your-domain.com" })
|
|
2088
|
-
|
|
2089
|
-
` + `This ensures resources from untrusted sources cannot impersonate your domain.`);
|
|
2090
|
-
}
|
|
2091
|
-
this.cacheDir = this.buildCacheDir(config.url);
|
|
2092
|
-
}
|
|
2093
|
-
isRemoteUrl(url) {
|
|
2094
|
-
return url.startsWith("git@") || url.startsWith("https://") || url.startsWith("http://");
|
|
2095
|
-
}
|
|
2096
|
-
buildCacheDir(url) {
|
|
2097
|
-
let normalized = url;
|
|
2098
|
-
if (url.startsWith("git@")) {
|
|
2099
|
-
normalized = url.slice(4).replace(":", "/");
|
|
2100
|
-
}
|
|
2101
|
-
if (normalized.endsWith(".git")) {
|
|
2102
|
-
normalized = normalized.slice(0, -4);
|
|
2103
|
-
}
|
|
2104
|
-
const dirName = normalized.replace(/\//g, "-");
|
|
2105
|
-
return join22(DEFAULT_GIT_CACHE, dirName);
|
|
2106
|
-
}
|
|
2107
|
-
supportType(type) {
|
|
2108
|
-
this.typeHandler.register(type);
|
|
2109
|
-
}
|
|
2110
|
-
async ensureCloned() {
|
|
2111
|
-
const gitDir = join22(this.cacheDir, ".git");
|
|
2112
|
-
try {
|
|
2113
|
-
await stat22(gitDir);
|
|
2114
|
-
this.gitExec(`fetch origin ${this.ref}`);
|
|
2115
|
-
this.gitExec(`checkout FETCH_HEAD`);
|
|
2116
|
-
} catch {
|
|
2117
|
-
await mkdir22(DEFAULT_GIT_CACHE, { recursive: true });
|
|
2118
|
-
execSync(`git clone --depth 1 --branch ${this.ref} ${this.url} ${this.cacheDir}`, {
|
|
2119
|
-
stdio: "pipe"
|
|
2120
|
-
});
|
|
2121
|
-
}
|
|
2122
|
-
}
|
|
2123
|
-
gitExec(command) {
|
|
2124
|
-
execSync(`git -C ${this.cacheDir} ${command}`, { stdio: "pipe" });
|
|
2125
|
-
}
|
|
2126
|
-
buildResourcePath(locator) {
|
|
2127
|
-
const rxl = parseRXL(locator);
|
|
2128
|
-
const domain = rxl.domain ?? "localhost";
|
|
2129
|
-
const version = rxl.version ?? "latest";
|
|
2130
|
-
let path = join22(this.cacheDir, this.basePath, domain);
|
|
2131
|
-
if (rxl.path) {
|
|
2132
|
-
path = join22(path, rxl.path);
|
|
2133
|
-
}
|
|
2134
|
-
const resourceName = rxl.type ? `${rxl.name}.${rxl.type}` : rxl.name;
|
|
2135
|
-
return join22(path, resourceName, version);
|
|
2136
|
-
}
|
|
2137
|
-
async get(locator) {
|
|
2138
|
-
await this.ensureCloned();
|
|
2139
|
-
const resourcePath = this.buildResourcePath(locator);
|
|
2140
|
-
const manifestPath = join22(resourcePath, "manifest.json");
|
|
2141
|
-
try {
|
|
2142
|
-
await stat22(manifestPath);
|
|
2143
|
-
} catch {
|
|
2144
|
-
throw new RegistryError(`Resource not found: ${locator}`);
|
|
2145
|
-
}
|
|
2146
|
-
const manifestContent = await readFile22(manifestPath, "utf-8");
|
|
2147
|
-
const manifestData = JSON.parse(manifestContent);
|
|
2148
|
-
const manifest = createRXM(manifestData);
|
|
2149
|
-
if (this.trustedDomain && manifest.domain !== this.trustedDomain) {
|
|
2150
|
-
throw new RegistryError(`Untrusted domain: resource claims "${manifest.domain}" but registry only trusts "${this.trustedDomain}"`);
|
|
2151
|
-
}
|
|
2152
|
-
const contentPath = join22(resourcePath, "content.tar.gz");
|
|
2153
|
-
const data = await readFile22(contentPath);
|
|
2154
|
-
return this.typeHandler.deserialize(data, manifest);
|
|
2155
|
-
}
|
|
2156
|
-
async resolve(locator) {
|
|
2157
|
-
const rxr = await this.get(locator);
|
|
2158
|
-
return this.typeHandler.resolve(rxr);
|
|
2159
|
-
}
|
|
2160
|
-
async exists(locator) {
|
|
2161
|
-
try {
|
|
2162
|
-
await this.ensureCloned();
|
|
2163
|
-
const resourcePath = this.buildResourcePath(locator);
|
|
2164
|
-
const manifestPath = join22(resourcePath, "manifest.json");
|
|
2165
|
-
await stat22(manifestPath);
|
|
2166
|
-
return true;
|
|
2167
|
-
} catch {
|
|
2168
|
-
return false;
|
|
2169
|
-
}
|
|
2170
|
-
}
|
|
2171
|
-
async search(options) {
|
|
2172
|
-
await this.ensureCloned();
|
|
2173
|
-
const { query, limit, offset = 0 } = options ?? {};
|
|
2174
|
-
const locators = [];
|
|
2175
|
-
const baseDir = join22(this.cacheDir, this.basePath);
|
|
2176
|
-
try {
|
|
2177
|
-
const entries = await this.listRecursive(baseDir);
|
|
2178
|
-
for (const entry of entries) {
|
|
2179
|
-
if (!entry.endsWith("manifest.json"))
|
|
2180
|
-
continue;
|
|
2181
|
-
const relativePath = entry.slice(baseDir.length + 1);
|
|
2182
|
-
const rxl = this.parseEntryToRXL(relativePath);
|
|
2183
|
-
if (rxl)
|
|
2184
|
-
locators.push(rxl);
|
|
2185
|
-
}
|
|
2186
|
-
} catch {
|
|
2187
|
-
return [];
|
|
2188
|
-
}
|
|
2189
|
-
let filtered = locators;
|
|
2190
|
-
if (query) {
|
|
2191
|
-
const lowerQuery = query.toLowerCase();
|
|
2192
|
-
filtered = locators.filter((rxl) => {
|
|
2193
|
-
const searchText = `${rxl.domain ?? ""} ${rxl.path ?? ""} ${rxl.name} ${rxl.type ?? ""}`.toLowerCase();
|
|
2194
|
-
return searchText.includes(lowerQuery);
|
|
2195
|
-
});
|
|
2196
|
-
}
|
|
2197
|
-
let result = filtered.slice(offset);
|
|
2198
|
-
if (limit !== undefined) {
|
|
2199
|
-
result = result.slice(0, limit);
|
|
2200
|
-
}
|
|
2201
|
-
return result;
|
|
2202
|
-
}
|
|
2203
|
-
async listRecursive(dir) {
|
|
2204
|
-
const results = [];
|
|
2205
|
-
try {
|
|
2206
|
-
const entries = await readdir22(dir, { withFileTypes: true });
|
|
2207
|
-
for (const entry of entries) {
|
|
2208
|
-
const fullPath = join22(dir, entry.name);
|
|
2209
|
-
if (entry.isDirectory()) {
|
|
2210
|
-
const subEntries = await this.listRecursive(fullPath);
|
|
2211
|
-
results.push(...subEntries);
|
|
2212
|
-
} else {
|
|
2213
|
-
results.push(fullPath);
|
|
2214
|
-
}
|
|
2215
|
-
}
|
|
2216
|
-
} catch {}
|
|
2217
|
-
return results;
|
|
2218
|
-
}
|
|
2219
|
-
parseEntryToRXL(entry) {
|
|
2220
|
-
const dirPath = entry.replace(/[/\\]manifest\.json$/, "");
|
|
2221
|
-
const parts = dirPath.split(/[/\\]/);
|
|
2222
|
-
if (parts.length < 3)
|
|
2223
|
-
return null;
|
|
2224
|
-
const version = parts.pop();
|
|
2225
|
-
const nameTypePart = parts.pop();
|
|
2226
|
-
const domain = parts.shift();
|
|
2227
|
-
const path = parts.length > 0 ? parts.join("/") : undefined;
|
|
2228
|
-
const dotIndex = nameTypePart.lastIndexOf(".");
|
|
2229
|
-
let name;
|
|
2230
|
-
let type;
|
|
2231
|
-
if (dotIndex !== -1) {
|
|
2232
|
-
name = nameTypePart.substring(0, dotIndex);
|
|
2233
|
-
type = nameTypePart.substring(dotIndex + 1);
|
|
2234
|
-
} else {
|
|
2235
|
-
name = nameTypePart;
|
|
2236
|
-
type = undefined;
|
|
2237
|
-
}
|
|
2238
|
-
let locatorStr = domain;
|
|
2239
|
-
if (path)
|
|
2240
|
-
locatorStr += `/${path}`;
|
|
2241
|
-
locatorStr += `/${name}`;
|
|
2242
|
-
if (type)
|
|
2243
|
-
locatorStr += `.${type}`;
|
|
2244
|
-
locatorStr += `@${version}`;
|
|
2245
|
-
try {
|
|
2246
|
-
return parseRXL(locatorStr);
|
|
2247
|
-
} catch {
|
|
2248
|
-
return null;
|
|
2249
|
-
}
|
|
2250
|
-
}
|
|
2251
|
-
async pull(_locator, _options) {
|
|
2252
|
-
throw new RegistryError("GitRegistry is read-only - use LocalRegistry.pull()");
|
|
2253
|
-
}
|
|
2254
|
-
async publish(_resource, _options) {
|
|
2255
|
-
throw new RegistryError("GitRegistry is read-only - use LocalRegistry.publish()");
|
|
2256
|
-
}
|
|
2257
|
-
async link(_resource) {
|
|
2258
|
-
throw new RegistryError("GitRegistry is read-only - use LocalRegistry.link()");
|
|
2259
|
-
}
|
|
2260
|
-
async delete(_locator) {
|
|
2261
|
-
throw new RegistryError("GitRegistry is read-only - use LocalRegistry.delete()");
|
|
2262
|
-
}
|
|
2263
|
-
}
|
|
2264
|
-
function createRegistry(config) {
|
|
2265
|
-
if (isRemoteConfig(config)) {
|
|
2266
|
-
return new RemoteRegistry(config);
|
|
2267
|
-
}
|
|
2268
|
-
if (isGitConfig(config)) {
|
|
2269
|
-
return new GitRegistry(config);
|
|
2270
|
-
}
|
|
2271
|
-
return new LocalRegistry(config);
|
|
2272
|
-
}
|
|
2273
|
-
|
|
2274
|
-
// src/transport/rxr.ts
|
|
2275
|
-
var registryCache = new Map;
|
|
2276
|
-
|
|
2277
|
-
class RxrTransport {
|
|
2278
|
-
registry;
|
|
2279
|
-
name = "rxr";
|
|
2280
|
-
constructor(registry) {
|
|
2281
|
-
this.registry = registry;
|
|
2282
|
-
}
|
|
2283
|
-
async get(location, _params) {
|
|
2284
|
-
const { domain, rxl, internalPath } = this.parseLocation(location);
|
|
2285
|
-
const registry = await this.getRegistry(domain);
|
|
2286
|
-
const rxr = await registry.get(rxl);
|
|
2287
|
-
const files = await rxr.content.files();
|
|
2288
|
-
const file = files.get(internalPath);
|
|
2289
|
-
if (!file) {
|
|
2290
|
-
throw new TransportError(`File not found in resource: ${internalPath}`, this.name);
|
|
2291
|
-
}
|
|
2292
|
-
return {
|
|
2293
|
-
content: file,
|
|
2294
|
-
metadata: { type: "file", size: file.length }
|
|
2295
|
-
};
|
|
2296
|
-
}
|
|
2297
|
-
async set(_location, _content, _params) {
|
|
2298
|
-
throw new TransportError("RXR transport is read-only", this.name);
|
|
2299
|
-
}
|
|
2300
|
-
async exists(location) {
|
|
2301
|
-
try {
|
|
2302
|
-
const { domain, rxl, internalPath } = this.parseLocation(location);
|
|
2303
|
-
const registry = await this.getRegistry(domain);
|
|
2304
|
-
const rxr = await registry.get(rxl);
|
|
2305
|
-
const files = await rxr.content.files();
|
|
2306
|
-
return files.has(internalPath);
|
|
2307
|
-
} catch {
|
|
2308
|
-
return false;
|
|
2309
|
-
}
|
|
2310
|
-
}
|
|
2311
|
-
async delete(_location) {
|
|
2312
|
-
throw new TransportError("RXR transport is read-only", this.name);
|
|
2313
|
-
}
|
|
2314
|
-
async getRegistry(domain) {
|
|
2315
|
-
if (this.registry) {
|
|
2316
|
-
return this.registry;
|
|
2317
|
-
}
|
|
2318
|
-
if (registryCache.has(domain)) {
|
|
2319
|
-
return registryCache.get(domain);
|
|
2320
|
-
}
|
|
2321
|
-
let registry;
|
|
2322
|
-
if (domain === "localhost") {
|
|
2323
|
-
registry = createRegistry();
|
|
2324
|
-
} else {
|
|
2325
|
-
try {
|
|
2326
|
-
const discovery = await discoverRegistry(domain);
|
|
2327
|
-
const registryUrl = discovery.registries[0];
|
|
2328
|
-
if (this.isGitUrl(registryUrl)) {
|
|
2329
|
-
registry = createRegistry({
|
|
2330
|
-
type: "git",
|
|
2331
|
-
url: registryUrl,
|
|
2332
|
-
domain: discovery.domain
|
|
2333
|
-
});
|
|
2334
|
-
} else {
|
|
2335
|
-
registry = createRegistry({ endpoint: registryUrl });
|
|
2336
|
-
}
|
|
2337
|
-
} catch (error) {
|
|
2338
|
-
throw new TransportError(`Failed to discover registry for domain ${domain}: ${error.message}`, this.name);
|
|
2339
|
-
}
|
|
2340
|
-
}
|
|
2341
|
-
registryCache.set(domain, registry);
|
|
2342
|
-
return registry;
|
|
2343
|
-
}
|
|
2344
|
-
isGitUrl(url) {
|
|
2345
|
-
return url.startsWith("git@") || url.endsWith(".git");
|
|
2346
|
-
}
|
|
2347
|
-
parseLocation(location) {
|
|
2348
|
-
const atIndex = location.indexOf("@");
|
|
2349
|
-
if (atIndex === -1) {
|
|
2350
|
-
throw new TransportError(`Invalid RXR location (missing @version): ${location}`, this.name);
|
|
2351
|
-
}
|
|
2352
|
-
const slashAfterVersion = location.indexOf("/", atIndex);
|
|
2353
|
-
if (slashAfterVersion === -1) {
|
|
2354
|
-
throw new TransportError(`Invalid RXR location (missing internal path): ${location}`, this.name);
|
|
2355
|
-
}
|
|
2356
|
-
const firstSlash = location.indexOf("/");
|
|
2357
|
-
const domain = firstSlash > 0 ? location.slice(0, firstSlash) : "localhost";
|
|
2358
|
-
return {
|
|
2359
|
-
domain,
|
|
2360
|
-
rxl: location.slice(0, slashAfterVersion),
|
|
2361
|
-
internalPath: location.slice(slashAfterVersion + 1)
|
|
2362
|
-
};
|
|
2363
|
-
}
|
|
2364
|
-
}
|
|
2365
|
-
function clearRegistryCache() {
|
|
2366
|
-
registryCache.clear();
|
|
2367
|
-
}
|
|
2368
322
|
// src/semantic/text.ts
|
|
2369
323
|
class TextSemanticHandler {
|
|
2370
324
|
name = "text";
|
|
@@ -2564,20 +518,18 @@ function createARP(config) {
|
|
|
2564
518
|
}
|
|
2565
519
|
|
|
2566
520
|
// src/index.ts
|
|
2567
|
-
var VERSION = "2.
|
|
521
|
+
var VERSION = "2.1.0";
|
|
2568
522
|
export {
|
|
2569
523
|
textSemantic,
|
|
2570
524
|
httpsTransport,
|
|
2571
525
|
httpTransport,
|
|
2572
526
|
fileTransport,
|
|
2573
527
|
createARP,
|
|
2574
|
-
clearRegistryCache,
|
|
2575
528
|
binarySemantic,
|
|
2576
529
|
VERSION,
|
|
2577
530
|
TransportError,
|
|
2578
531
|
TextSemanticHandler,
|
|
2579
532
|
SemanticError,
|
|
2580
|
-
RxrTransport,
|
|
2581
533
|
ParseError,
|
|
2582
534
|
HttpTransportHandler,
|
|
2583
535
|
FileTransportHandler,
|
|
@@ -2586,4 +538,4 @@ export {
|
|
|
2586
538
|
ARP
|
|
2587
539
|
};
|
|
2588
540
|
|
|
2589
|
-
//# debugId=
|
|
541
|
+
//# debugId=C0084E960EA7C5F164756E2164756E21
|