@resourcexjs/registry 2.4.0 → 2.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js.map +1 -1
- package/package.json +5 -5
package/dist/index.js.map
CHANGED
|
@@ -7,7 +7,7 @@
|
|
|
7
7
|
"// src/bundler.ts\nimport { readFile } from \"node:fs/promises\";\nimport { resolve, isAbsolute } from \"node:path\";\nasync function bundleResourceType(sourcePath, basePath) {\n const fullPath = isAbsolute(sourcePath) ? sourcePath : resolve(basePath ?? process.cwd(), sourcePath);\n const source = await readFile(fullPath, \"utf-8\");\n const result = await Bun.build({\n stdin: {\n contents: source,\n resolveDir: resolve(fullPath, \"..\"),\n loader: \"ts\"\n },\n target: \"bun\",\n format: \"esm\",\n minify: false\n });\n if (!result.success) {\n const errors = result.logs.map((log) => log.message).join(`\n`);\n throw new Error(`Failed to bundle ${sourcePath}: ${errors}`);\n }\n const bundledCode = await result.outputs[0].text();\n const tempModule = await import(fullPath);\n const typeSource = tempModule.default;\n if (!typeSource.name) {\n throw new Error(`Resource type at ${sourcePath} must have a name`);\n }\n if (typeof typeSource.resolve !== \"function\") {\n throw new Error(`Resource type at ${sourcePath} must have a resolve function`);\n }\n return {\n name: typeSource.name,\n aliases: typeSource.aliases,\n description: typeSource.description ?? \"\",\n schema: typeSource.schema,\n code: bundledCode\n };\n}\n// ../core/dist/index.js\nimport { gzip, gunzip } from \"node:zlib\";\nimport { promisify } from \"node:util\";\n\nclass ResourceXError extends Error {\n constructor(message, options) {\n super(message, options);\n this.name = \"ResourceXError\";\n }\n}\nvar BLOCK_SIZE = 512;\nvar ZERO_BLOCK = new Uint8Array(BLOCK_SIZE);\nvar EMPTY = new Uint8Array(0);\nvar encoder = new TextEncoder;\nvar decoder = new TextDecoder;\nvar EOF_BUFFER = new Uint8Array(BLOCK_SIZE * 2);\nvar gzipAsync = promisify(gzip);\nvar gunzipAsync = promisify(gunzip);\n\n// src/errors.ts\nclass ResourceTypeError extends ResourceXError {\n constructor(message) {\n super(message);\n this.name = \"ResourceTypeError\";\n }\n}\n// src/builtinTypes.ts\nvar textType = {\n name: \"text\",\n aliases: [\"txt\", \"plaintext\"],\n description: \"Plain text content\",\n code: `// @resolver: text_type_default\n// src/builtins/text.type.ts\nvar text_type_default = {\n name: \"text\",\n aliases: [\"txt\", \"plaintext\"],\n description: \"Plain text content\",\n async resolve(ctx) {\n const content = ctx.files[\"content\"];\n return new TextDecoder().decode(content);\n }\n};`\n};\nvar jsonType = {\n name: \"json\",\n aliases: [\"config\", \"manifest\"],\n description: \"JSON content\",\n code: `// @resolver: json_type_default\n// src/builtins/json.type.ts\nvar json_type_default = {\n name: \"json\",\n aliases: [\"config\", \"manifest\"],\n description: \"JSON content\",\n async resolve(ctx) {\n const content = ctx.files[\"content\"];\n return JSON.parse(new TextDecoder().decode(content));\n }\n};`\n};\nvar binaryType = {\n name: \"binary\",\n aliases: [\"bin\", \"blob\", \"raw\"],\n description: \"Binary content\",\n code: `// @resolver: binary_type_default\n// src/builtins/binary.type.ts\nvar binary_type_default = {\n name: \"binary\",\n aliases: [\"bin\", \"blob\", \"raw\"],\n description: \"Binary content\",\n async resolve(ctx) {\n return ctx.files[\"content\"];\n }\n};`\n};\nvar builtinTypes = [textType, jsonType, binaryType];\n\n// src/TypeHandlerChain.ts\nclass TypeHandlerChain {\n handlers = new Map;\n constructor() {\n for (const type of builtinTypes) {\n this.registerInternal(type);\n }\n }\n static create() {\n return new TypeHandlerChain;\n }\n registerInternal(type) {\n this.handlers.set(type.name, type);\n if (type.aliases) {\n for (const alias of type.aliases) {\n this.handlers.set(alias, type);\n }\n }\n }\n register(type) {\n if (this.handlers.has(type.name)) {\n throw new ResourceTypeError(`Type '${type.name}' is already registered`);\n }\n this.handlers.set(type.name, type);\n if (type.aliases) {\n for (const alias of type.aliases) {\n if (this.handlers.has(alias)) {\n throw new ResourceTypeError(`Alias '${alias}' conflicts with existing type or alias`);\n }\n this.handlers.set(alias, type);\n }\n }\n }\n canHandle(typeName) {\n return this.handlers.has(typeName);\n }\n getHandler(typeName) {\n const handler = this.handlers.get(typeName);\n if (!handler) {\n throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);\n }\n return handler;\n }\n getHandlerOrUndefined(typeName) {\n return this.handlers.get(typeName);\n }\n getSupportedTypes() {\n return Array.from(this.handlers.keys());\n }\n clear() {\n this.handlers.clear();\n }\n}\nexport {\n textType,\n jsonType,\n bundleResourceType,\n builtinTypes,\n binaryType,\n TypeHandlerChain,\n ResourceTypeError\n};\n\n//# debugId=9BB0B47B5E1F216964756E2164756E21\n",
|
|
8
8
|
"// src/FolderLoader.ts\nimport { join, relative } from \"node:path\";\nimport { stat, readFile, readdir } from \"node:fs/promises\";\n\n// ../core/dist/index.js\nimport { gzip, gunzip } from \"node:zlib\";\nimport { promisify } from \"node:util\";\n\nclass ResourceXError extends Error {\n constructor(message, options) {\n super(message, options);\n this.name = \"ResourceXError\";\n }\n}\nclass ManifestError extends ResourceXError {\n constructor(message) {\n super(message);\n this.name = \"ManifestError\";\n }\n}\n\nclass ContentError extends ResourceXError {\n constructor(message) {\n super(message);\n this.name = \"ContentError\";\n }\n}\n\nclass RXLImpl {\n domain;\n path;\n name;\n type;\n version;\n constructor(parts) {\n this.domain = parts.domain;\n this.path = parts.path;\n this.name = parts.name;\n this.type = parts.type;\n this.version = parts.version;\n }\n toString() {\n let result = \"\";\n if (this.domain) {\n result += this.domain + \"/\";\n if (this.path) {\n result += this.path + \"/\";\n }\n }\n result += this.name;\n if (this.type) {\n result += \".\" + this.type;\n }\n if (this.version) {\n result += \"@\" + this.version;\n }\n return result;\n }\n}\nfunction isDomain(str) {\n if (str === \"localhost\")\n return true;\n return str.includes(\".\");\n}\nfunction parseRXL(locator) {\n let remaining = locator;\n let version;\n let type;\n let domain;\n let path;\n let name;\n const atIndex = remaining.indexOf(\"@\");\n if (atIndex !== -1) {\n version = remaining.slice(atIndex + 1);\n remaining = remaining.slice(0, atIndex);\n }\n const segments = remaining.split(\"/\");\n if (segments.length > 1 && isDomain(segments[0])) {\n domain = segments[0];\n const lastSegment = segments[segments.length - 1];\n if (segments.length > 2) {\n path = segments.slice(1, -1).join(\"/\");\n }\n remaining = lastSegment;\n } else {\n remaining = segments.join(\"/\");\n }\n const dotIndex = remaining.lastIndexOf(\".\");\n if (dotIndex !== -1) {\n type = remaining.slice(dotIndex + 1);\n name = remaining.slice(0, dotIndex);\n } else {\n name = remaining;\n }\n return new RXLImpl({ domain, path, name, type, version });\n}\n\nclass RXMImpl {\n domain;\n path;\n name;\n type;\n version;\n constructor(data) {\n this.domain = data.domain;\n this.path = data.path;\n this.name = data.name;\n this.type = data.type;\n this.version = data.version;\n }\n toLocator() {\n let result = this.domain + \"/\";\n if (this.path) {\n result += this.path + \"/\";\n }\n result += this.name;\n result += \".\" + this.type;\n result += \"@\" + this.version;\n return result;\n }\n toJSON() {\n const json = {\n domain: this.domain,\n name: this.name,\n type: this.type,\n version: this.version\n };\n if (this.path !== undefined) {\n json.path = this.path;\n }\n return json;\n }\n}\nfunction createRXM(data) {\n if (!data.domain) {\n throw new ManifestError(\"domain is required\");\n }\n if (!data.name) {\n throw new ManifestError(\"name is required\");\n }\n if (!data.type) {\n throw new ManifestError(\"type is required\");\n }\n if (!data.version) {\n throw new ManifestError(\"version is required\");\n }\n return new RXMImpl({\n domain: data.domain,\n path: data.path,\n name: data.name,\n type: data.type,\n version: data.version\n });\n}\nvar BLOCK_SIZE = 512;\nvar BLOCK_SIZE_MASK = 511;\nvar DEFAULT_FILE_MODE = 420;\nvar DEFAULT_DIR_MODE = 493;\nvar USTAR_NAME_OFFSET = 0;\nvar USTAR_NAME_SIZE = 100;\nvar USTAR_MODE_OFFSET = 100;\nvar USTAR_MODE_SIZE = 8;\nvar USTAR_UID_OFFSET = 108;\nvar USTAR_UID_SIZE = 8;\nvar USTAR_GID_OFFSET = 116;\nvar USTAR_GID_SIZE = 8;\nvar USTAR_SIZE_OFFSET = 124;\nvar USTAR_SIZE_SIZE = 12;\nvar USTAR_MTIME_OFFSET = 136;\nvar USTAR_MTIME_SIZE = 12;\nvar USTAR_CHECKSUM_OFFSET = 148;\nvar USTAR_CHECKSUM_SIZE = 8;\nvar USTAR_TYPEFLAG_OFFSET = 156;\nvar USTAR_TYPEFLAG_SIZE = 1;\nvar USTAR_LINKNAME_OFFSET = 157;\nvar USTAR_LINKNAME_SIZE = 100;\nvar USTAR_MAGIC_OFFSET = 257;\nvar USTAR_MAGIC_SIZE = 6;\nvar USTAR_VERSION_OFFSET = 263;\nvar USTAR_VERSION_SIZE = 2;\nvar USTAR_UNAME_OFFSET = 265;\nvar USTAR_UNAME_SIZE = 32;\nvar USTAR_GNAME_OFFSET = 297;\nvar USTAR_GNAME_SIZE = 32;\nvar USTAR_PREFIX_OFFSET = 345;\nvar USTAR_PREFIX_SIZE = 155;\nvar USTAR_VERSION = \"00\";\nvar USTAR_MAX_UID_GID = 2097151;\nvar USTAR_MAX_SIZE = 8589934591;\nvar FILE = \"file\";\nvar LINK = \"link\";\nvar SYMLINK = \"symlink\";\nvar DIRECTORY = \"directory\";\nvar TYPEFLAG = {\n file: \"0\",\n link: \"1\",\n symlink: \"2\",\n \"character-device\": \"3\",\n \"block-device\": \"4\",\n directory: \"5\",\n fifo: \"6\",\n \"pax-header\": \"x\",\n \"pax-global-header\": \"g\",\n \"gnu-long-name\": \"L\",\n \"gnu-long-link-name\": \"K\"\n};\nvar FLAGTYPE = {\n \"0\": FILE,\n \"1\": LINK,\n \"2\": SYMLINK,\n \"3\": \"character-device\",\n \"4\": \"block-device\",\n \"5\": DIRECTORY,\n \"6\": \"fifo\",\n x: \"pax-header\",\n g: \"pax-global-header\",\n L: \"gnu-long-name\",\n K: \"gnu-long-link-name\"\n};\nvar ZERO_BLOCK = new Uint8Array(BLOCK_SIZE);\nvar EMPTY = new Uint8Array(0);\nvar encoder = new TextEncoder;\nvar decoder = new TextDecoder;\nfunction writeString(view, offset, size, value) {\n if (value)\n encoder.encodeInto(value, view.subarray(offset, offset + size));\n}\nfunction writeOctal(view, offset, size, value) {\n if (value === undefined)\n return;\n const octalString = value.toString(8).padStart(size - 1, \"0\");\n encoder.encodeInto(octalString, view.subarray(offset, offset + size - 1));\n}\nfunction readString(view, offset, size) {\n const end = view.indexOf(0, offset);\n const sliceEnd = end === -1 || end > offset + size ? offset + size : end;\n return decoder.decode(view.subarray(offset, sliceEnd));\n}\nfunction readOctal(view, offset, size) {\n let value = 0;\n const end = offset + size;\n for (let i = offset;i < end; i++) {\n const charCode = view[i];\n if (charCode === 0)\n break;\n if (charCode === 32)\n continue;\n value = value * 8 + (charCode - 48);\n }\n return value;\n}\nfunction readNumeric(view, offset, size) {\n if (view[offset] & 128) {\n let result = 0;\n result = view[offset] & 127;\n for (let i = 1;i < size; i++)\n result = result * 256 + view[offset + i];\n if (!Number.isSafeInteger(result))\n throw new Error(\"TAR number too large\");\n return result;\n }\n return readOctal(view, offset, size);\n}\nvar isBodyless = (header) => header.type === DIRECTORY || header.type === SYMLINK || header.type === LINK;\nasync function normalizeBody(body) {\n if (body === null || body === undefined)\n return EMPTY;\n if (body instanceof Uint8Array)\n return body;\n if (typeof body === \"string\")\n return encoder.encode(body);\n if (body instanceof ArrayBuffer)\n return new Uint8Array(body);\n if (body instanceof Blob)\n return new Uint8Array(await body.arrayBuffer());\n throw new TypeError(\"Unsupported content type for entry body.\");\n}\nfunction transformHeader(header, options) {\n const { strip, filter, map } = options;\n if (!strip && !filter && !map)\n return header;\n const h = { ...header };\n if (strip && strip > 0) {\n const components = h.name.split(\"/\").filter(Boolean);\n if (strip >= components.length)\n return null;\n const newName = components.slice(strip).join(\"/\");\n h.name = h.type === DIRECTORY && !newName.endsWith(\"/\") ? `${newName}/` : newName;\n if (h.linkname?.startsWith(\"/\")) {\n const linkComponents = h.linkname.split(\"/\").filter(Boolean);\n h.linkname = strip >= linkComponents.length ? \"/\" : `/${linkComponents.slice(strip).join(\"/\")}`;\n }\n }\n if (filter?.(h) === false)\n return null;\n const result = map ? map(h) : h;\n if (result && (!result.name || !result.name.trim() || result.name === \".\" || result.name === \"/\"))\n return null;\n return result;\n}\nvar CHECKSUM_SPACE = 32;\nvar ASCII_ZERO = 48;\nfunction validateChecksum(block) {\n const stored = readOctal(block, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_SIZE);\n let sum = 0;\n for (let i = 0;i < block.length; i++)\n if (i >= USTAR_CHECKSUM_OFFSET && i < USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE)\n sum += CHECKSUM_SPACE;\n else\n sum += block[i];\n return stored === sum;\n}\nfunction writeChecksum(block) {\n block.fill(CHECKSUM_SPACE, USTAR_CHECKSUM_OFFSET, USTAR_CHECKSUM_OFFSET + USTAR_CHECKSUM_SIZE);\n let checksum = 0;\n for (const byte of block)\n checksum += byte;\n for (let i = USTAR_CHECKSUM_OFFSET + 6 - 1;i >= USTAR_CHECKSUM_OFFSET; i--) {\n block[i] = (checksum & 7) + ASCII_ZERO;\n checksum >>= 3;\n }\n block[USTAR_CHECKSUM_OFFSET + 6] = 0;\n block[USTAR_CHECKSUM_OFFSET + 7] = CHECKSUM_SPACE;\n}\nfunction generatePax(header) {\n const paxRecords = {};\n if (header.name.length > USTAR_NAME_SIZE) {\n if (findUstarSplit(header.name) === null)\n paxRecords.path = header.name;\n }\n if (header.linkname && header.linkname.length > USTAR_NAME_SIZE)\n paxRecords.linkpath = header.linkname;\n if (header.uname && header.uname.length > USTAR_UNAME_SIZE)\n paxRecords.uname = header.uname;\n if (header.gname && header.gname.length > USTAR_GNAME_SIZE)\n paxRecords.gname = header.gname;\n if (header.uid != null && header.uid > USTAR_MAX_UID_GID)\n paxRecords.uid = String(header.uid);\n if (header.gid != null && header.gid > USTAR_MAX_UID_GID)\n paxRecords.gid = String(header.gid);\n if (header.size != null && header.size > USTAR_MAX_SIZE)\n paxRecords.size = String(header.size);\n if (header.pax)\n Object.assign(paxRecords, header.pax);\n const paxEntries = Object.entries(paxRecords);\n if (paxEntries.length === 0)\n return null;\n const paxBody = encoder.encode(paxEntries.map(([key, value]) => {\n const record = `${key}=${value}\n`;\n const partLength = encoder.encode(record).length + 1;\n let totalLength = partLength + String(partLength).length;\n totalLength = partLength + String(totalLength).length;\n return `${totalLength} ${record}`;\n }).join(\"\"));\n return {\n paxHeader: createTarHeader({\n name: decoder.decode(encoder.encode(`PaxHeader/${header.name}`).slice(0, 100)),\n size: paxBody.length,\n type: \"pax-header\",\n mode: 420,\n mtime: header.mtime,\n uname: header.uname,\n gname: header.gname,\n uid: header.uid,\n gid: header.gid\n }),\n paxBody\n };\n}\nfunction findUstarSplit(path) {\n if (path.length <= USTAR_NAME_SIZE)\n return null;\n const minSlashIndex = path.length - USTAR_NAME_SIZE - 1;\n const slashIndex = path.lastIndexOf(\"/\", USTAR_PREFIX_SIZE);\n if (slashIndex > 0 && slashIndex >= minSlashIndex)\n return {\n prefix: path.slice(0, slashIndex),\n name: path.slice(slashIndex + 1)\n };\n return null;\n}\nfunction createTarHeader(header) {\n const view = new Uint8Array(BLOCK_SIZE);\n const size = isBodyless(header) ? 0 : header.size ?? 0;\n let name = header.name;\n let prefix = \"\";\n if (!header.pax?.path) {\n const split = findUstarSplit(name);\n if (split) {\n name = split.name;\n prefix = split.prefix;\n }\n }\n writeString(view, USTAR_NAME_OFFSET, USTAR_NAME_SIZE, name);\n writeOctal(view, USTAR_MODE_OFFSET, USTAR_MODE_SIZE, header.mode ?? (header.type === DIRECTORY ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE));\n writeOctal(view, USTAR_UID_OFFSET, USTAR_UID_SIZE, header.uid ?? 0);\n writeOctal(view, USTAR_GID_OFFSET, USTAR_GID_SIZE, header.gid ?? 0);\n writeOctal(view, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE, size);\n writeOctal(view, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE, Math.floor((header.mtime?.getTime() ?? Date.now()) / 1000));\n writeString(view, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE, TYPEFLAG[header.type ?? FILE]);\n writeString(view, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE, header.linkname);\n writeString(view, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE, \"ustar\\x00\");\n writeString(view, USTAR_VERSION_OFFSET, USTAR_VERSION_SIZE, USTAR_VERSION);\n writeString(view, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE, header.uname);\n writeString(view, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE, header.gname);\n writeString(view, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE, prefix);\n writeChecksum(view);\n return view;\n}\nfunction parseUstarHeader(block, strict) {\n if (strict && !validateChecksum(block))\n throw new Error(\"Invalid tar header checksum.\");\n const typeflag = readString(block, USTAR_TYPEFLAG_OFFSET, USTAR_TYPEFLAG_SIZE);\n const header = {\n name: readString(block, USTAR_NAME_OFFSET, USTAR_NAME_SIZE),\n mode: readOctal(block, USTAR_MODE_OFFSET, USTAR_MODE_SIZE),\n uid: readNumeric(block, USTAR_UID_OFFSET, USTAR_UID_SIZE),\n gid: readNumeric(block, USTAR_GID_OFFSET, USTAR_GID_SIZE),\n size: readNumeric(block, USTAR_SIZE_OFFSET, USTAR_SIZE_SIZE),\n mtime: /* @__PURE__ */ new Date(readNumeric(block, USTAR_MTIME_OFFSET, USTAR_MTIME_SIZE) * 1000),\n type: FLAGTYPE[typeflag] || FILE,\n linkname: readString(block, USTAR_LINKNAME_OFFSET, USTAR_LINKNAME_SIZE)\n };\n const magic = readString(block, USTAR_MAGIC_OFFSET, USTAR_MAGIC_SIZE);\n if (magic.trim() === \"ustar\") {\n header.uname = readString(block, USTAR_UNAME_OFFSET, USTAR_UNAME_SIZE);\n header.gname = readString(block, USTAR_GNAME_OFFSET, USTAR_GNAME_SIZE);\n }\n if (magic === \"ustar\")\n header.prefix = readString(block, USTAR_PREFIX_OFFSET, USTAR_PREFIX_SIZE);\n return header;\n}\nvar PAX_MAPPING = {\n path: [\"name\", (v) => v],\n linkpath: [\"linkname\", (v) => v],\n size: [\"size\", (v) => parseInt(v, 10)],\n mtime: [\"mtime\", parseFloat],\n uid: [\"uid\", (v) => parseInt(v, 10)],\n gid: [\"gid\", (v) => parseInt(v, 10)],\n uname: [\"uname\", (v) => v],\n gname: [\"gname\", (v) => v]\n};\nfunction parsePax(buffer) {\n const decoder$1 = new TextDecoder(\"utf-8\");\n const overrides = {};\n const pax = {};\n let offset = 0;\n while (offset < buffer.length) {\n const spaceIndex = buffer.indexOf(32, offset);\n if (spaceIndex === -1)\n break;\n const length = parseInt(decoder$1.decode(buffer.subarray(offset, spaceIndex)), 10);\n if (Number.isNaN(length) || length === 0)\n break;\n const recordEnd = offset + length;\n const [key, value] = decoder$1.decode(buffer.subarray(spaceIndex + 1, recordEnd - 1)).split(\"=\", 2);\n if (key && value !== undefined) {\n pax[key] = value;\n const mapping = PAX_MAPPING[key];\n if (mapping) {\n const [targetKey, parser] = mapping;\n const parsedValue = parser(value);\n if (typeof parsedValue === \"string\" || !Number.isNaN(parsedValue))\n overrides[targetKey] = parsedValue;\n }\n }\n offset = recordEnd;\n }\n if (Object.keys(pax).length > 0)\n overrides.pax = pax;\n return overrides;\n}\nfunction applyOverrides(header, overrides) {\n if (overrides.name !== undefined)\n header.name = overrides.name;\n if (overrides.linkname !== undefined)\n header.linkname = overrides.linkname;\n if (overrides.size !== undefined)\n header.size = overrides.size;\n if (overrides.mtime !== undefined)\n header.mtime = /* @__PURE__ */ new Date(overrides.mtime * 1000);\n if (overrides.uid !== undefined)\n header.uid = overrides.uid;\n if (overrides.gid !== undefined)\n header.gid = overrides.gid;\n if (overrides.uname !== undefined)\n header.uname = overrides.uname;\n if (overrides.gname !== undefined)\n header.gname = overrides.gname;\n if (overrides.pax)\n header.pax = Object.assign({}, header.pax ?? {}, overrides.pax);\n}\nfunction getMetaParser(type) {\n switch (type) {\n case \"pax-global-header\":\n case \"pax-header\":\n return parsePax;\n case \"gnu-long-name\":\n return (data) => ({ name: readString(data, 0, data.length) });\n case \"gnu-long-link-name\":\n return (data) => ({ linkname: readString(data, 0, data.length) });\n default:\n return;\n }\n}\nfunction getHeaderBlocks(header) {\n const base = createTarHeader(header);\n const pax = generatePax(header);\n if (!pax)\n return [base];\n const paxPadding = -pax.paxBody.length & BLOCK_SIZE_MASK;\n const paddingBlocks = paxPadding > 0 ? [ZERO_BLOCK.subarray(0, paxPadding)] : [];\n return [\n pax.paxHeader,\n pax.paxBody,\n ...paddingBlocks,\n base\n ];\n}\nvar EOF_BUFFER = new Uint8Array(BLOCK_SIZE * 2);\nfunction createTarPacker(onData, onError, onFinalize) {\n let currentHeader = null;\n let bytesWritten = 0;\n let finalized = false;\n return {\n add(header) {\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"No new tar entries after finalize.\");\n onError(error);\n throw error;\n }\n if (currentHeader !== null) {\n const error = /* @__PURE__ */ new Error(\"Previous entry must be completed before adding a new one\");\n onError(error);\n throw error;\n }\n try {\n const size = isBodyless(header) ? 0 : header.size ?? 0;\n const headerBlocks = getHeaderBlocks({\n ...header,\n size\n });\n for (const block of headerBlocks)\n onData(block);\n currentHeader = {\n ...header,\n size\n };\n bytesWritten = 0;\n } catch (error) {\n onError(error);\n }\n },\n write(chunk) {\n if (!currentHeader) {\n const error = /* @__PURE__ */ new Error(\"No active tar entry.\");\n onError(error);\n throw error;\n }\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"Cannot write data after finalize.\");\n onError(error);\n throw error;\n }\n const newTotal = bytesWritten + chunk.length;\n if (newTotal > currentHeader.size) {\n const error = /* @__PURE__ */ new Error(`\"${currentHeader.name}\" exceeds given size of ${currentHeader.size} bytes.`);\n onError(error);\n throw error;\n }\n try {\n bytesWritten = newTotal;\n onData(chunk);\n } catch (error) {\n onError(error);\n }\n },\n endEntry() {\n if (!currentHeader) {\n const error = /* @__PURE__ */ new Error(\"No active entry to end.\");\n onError(error);\n throw error;\n }\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"Cannot end entry after finalize.\");\n onError(error);\n throw error;\n }\n try {\n if (bytesWritten !== currentHeader.size) {\n const error = /* @__PURE__ */ new Error(`Size mismatch for \"${currentHeader.name}\".`);\n onError(error);\n throw error;\n }\n const paddingSize = -currentHeader.size & BLOCK_SIZE_MASK;\n if (paddingSize > 0)\n onData(new Uint8Array(paddingSize));\n currentHeader = null;\n bytesWritten = 0;\n } catch (error) {\n onError(error);\n throw error;\n }\n },\n finalize() {\n if (finalized) {\n const error = /* @__PURE__ */ new Error(\"Archive has already been finalized\");\n onError(error);\n throw error;\n }\n if (currentHeader !== null) {\n const error = /* @__PURE__ */ new Error(\"Cannot finalize while an entry is still active\");\n onError(error);\n throw error;\n }\n try {\n onData(EOF_BUFFER);\n finalized = true;\n if (onFinalize)\n onFinalize();\n } catch (error) {\n onError(error);\n }\n }\n };\n}\nvar INITIAL_CAPACITY = 256;\nfunction createChunkQueue() {\n let chunks = new Array(INITIAL_CAPACITY);\n let capacityMask = chunks.length - 1;\n let head = 0;\n let tail = 0;\n let totalAvailable = 0;\n const consumeFromHead = (count) => {\n const chunk = chunks[head];\n if (count === chunk.length) {\n chunks[head] = EMPTY;\n head = head + 1 & capacityMask;\n } else\n chunks[head] = chunk.subarray(count);\n totalAvailable -= count;\n if (totalAvailable === 0 && chunks.length > INITIAL_CAPACITY) {\n chunks = new Array(INITIAL_CAPACITY);\n capacityMask = INITIAL_CAPACITY - 1;\n head = 0;\n tail = 0;\n }\n };\n function pull(bytes, callback) {\n if (callback) {\n let fed = 0;\n let remaining$1 = Math.min(bytes, totalAvailable);\n while (remaining$1 > 0) {\n const chunk = chunks[head];\n const toFeed = Math.min(remaining$1, chunk.length);\n const segment = toFeed === chunk.length ? chunk : chunk.subarray(0, toFeed);\n consumeFromHead(toFeed);\n remaining$1 -= toFeed;\n fed += toFeed;\n if (!callback(segment))\n break;\n }\n return fed;\n }\n if (totalAvailable < bytes)\n return null;\n if (bytes === 0)\n return EMPTY;\n const firstChunk = chunks[head];\n if (firstChunk.length >= bytes) {\n const view = firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);\n consumeFromHead(bytes);\n return view;\n }\n const result = new Uint8Array(bytes);\n let copied = 0;\n let remaining = bytes;\n while (remaining > 0) {\n const chunk = chunks[head];\n const toCopy = Math.min(remaining, chunk.length);\n result.set(toCopy === chunk.length ? chunk : chunk.subarray(0, toCopy), copied);\n copied += toCopy;\n remaining -= toCopy;\n consumeFromHead(toCopy);\n }\n return result;\n }\n return {\n push: (chunk) => {\n if (chunk.length === 0)\n return;\n let nextTail = tail + 1 & capacityMask;\n if (nextTail === head) {\n const oldLen = chunks.length;\n const newLen = oldLen * 2;\n const newChunks = new Array(newLen);\n const count = tail - head + oldLen & oldLen - 1;\n if (head < tail)\n for (let i = 0;i < count; i++)\n newChunks[i] = chunks[head + i];\n else if (count > 0) {\n const firstPart = oldLen - head;\n for (let i = 0;i < firstPart; i++)\n newChunks[i] = chunks[head + i];\n for (let i = 0;i < tail; i++)\n newChunks[firstPart + i] = chunks[i];\n }\n chunks = newChunks;\n capacityMask = newLen - 1;\n head = 0;\n tail = count;\n nextTail = tail + 1 & capacityMask;\n }\n chunks[tail] = chunk;\n tail = nextTail;\n totalAvailable += chunk.length;\n },\n available: () => totalAvailable,\n peek: (bytes) => {\n if (totalAvailable < bytes)\n return null;\n if (bytes === 0)\n return EMPTY;\n const firstChunk = chunks[head];\n if (firstChunk.length >= bytes)\n return firstChunk.length === bytes ? firstChunk : firstChunk.subarray(0, bytes);\n const result = new Uint8Array(bytes);\n let copied = 0;\n let index = head;\n while (copied < bytes) {\n const chunk = chunks[index];\n const toCopy = Math.min(bytes - copied, chunk.length);\n if (toCopy === chunk.length)\n result.set(chunk, copied);\n else\n result.set(chunk.subarray(0, toCopy), copied);\n copied += toCopy;\n index = index + 1 & capacityMask;\n }\n return result;\n },\n discard: (bytes) => {\n if (bytes > totalAvailable)\n throw new Error(\"Too many bytes consumed\");\n if (bytes === 0)\n return;\n let remaining = bytes;\n while (remaining > 0) {\n const chunk = chunks[head];\n const toConsume = Math.min(remaining, chunk.length);\n consumeFromHead(toConsume);\n remaining -= toConsume;\n }\n },\n pull\n };\n}\nvar STATE_HEADER = 0;\nvar STATE_BODY = 1;\nvar truncateErr = /* @__PURE__ */ new Error(\"Tar archive is truncated.\");\nfunction createUnpacker(options = {}) {\n const strict = options.strict ?? false;\n const { available, peek, push, discard, pull } = createChunkQueue();\n let state = STATE_HEADER;\n let ended = false;\n let done = false;\n let eof = false;\n let currentEntry = null;\n const paxGlobals = {};\n let nextEntryOverrides = {};\n const unpacker = {\n isEntryActive: () => state === STATE_BODY,\n isBodyComplete: () => !currentEntry || currentEntry.remaining === 0,\n write(chunk) {\n if (ended)\n throw new Error(\"Archive already ended.\");\n push(chunk);\n },\n end() {\n ended = true;\n },\n readHeader() {\n if (state !== STATE_HEADER)\n throw new Error(\"Cannot read header while an entry is active\");\n if (done)\n return;\n while (!done) {\n if (available() < BLOCK_SIZE) {\n if (ended) {\n if (available() > 0 && strict)\n throw truncateErr;\n done = true;\n return;\n }\n return null;\n }\n const headerBlock = peek(BLOCK_SIZE);\n if (isZeroBlock(headerBlock)) {\n if (available() < BLOCK_SIZE * 2) {\n if (ended) {\n if (strict)\n throw truncateErr;\n done = true;\n return;\n }\n return null;\n }\n if (isZeroBlock(peek(BLOCK_SIZE * 2).subarray(BLOCK_SIZE))) {\n discard(BLOCK_SIZE * 2);\n done = true;\n eof = true;\n return;\n }\n if (strict)\n throw new Error(\"Invalid tar header.\");\n discard(BLOCK_SIZE);\n continue;\n }\n let internalHeader;\n try {\n internalHeader = parseUstarHeader(headerBlock, strict);\n } catch (err) {\n if (strict)\n throw err;\n discard(BLOCK_SIZE);\n continue;\n }\n const metaParser = getMetaParser(internalHeader.type);\n if (metaParser) {\n const paddedSize = internalHeader.size + BLOCK_SIZE_MASK & ~BLOCK_SIZE_MASK;\n if (available() < BLOCK_SIZE + paddedSize) {\n if (ended && strict)\n throw truncateErr;\n return null;\n }\n discard(BLOCK_SIZE);\n const overrides = metaParser(pull(paddedSize).subarray(0, internalHeader.size));\n const target = internalHeader.type === \"pax-global-header\" ? paxGlobals : nextEntryOverrides;\n for (const key in overrides)\n target[key] = overrides[key];\n continue;\n }\n discard(BLOCK_SIZE);\n const header = internalHeader;\n if (internalHeader.prefix)\n header.name = `${internalHeader.prefix}/${header.name}`;\n applyOverrides(header, paxGlobals);\n applyOverrides(header, nextEntryOverrides);\n nextEntryOverrides = {};\n currentEntry = {\n header,\n remaining: header.size,\n padding: -header.size & BLOCK_SIZE_MASK\n };\n state = STATE_BODY;\n return header;\n }\n },\n streamBody(callback) {\n if (state !== STATE_BODY || !currentEntry || currentEntry.remaining === 0)\n return 0;\n const bytesToFeed = Math.min(currentEntry.remaining, available());\n if (bytesToFeed === 0)\n return 0;\n const fed = pull(bytesToFeed, callback);\n currentEntry.remaining -= fed;\n return fed;\n },\n skipPadding() {\n if (state !== STATE_BODY || !currentEntry)\n return true;\n if (currentEntry.remaining > 0)\n throw new Error(\"Body not fully consumed\");\n if (available() < currentEntry.padding)\n return false;\n discard(currentEntry.padding);\n currentEntry = null;\n state = STATE_HEADER;\n return true;\n },\n skipEntry() {\n if (state !== STATE_BODY || !currentEntry)\n return true;\n const toDiscard = Math.min(currentEntry.remaining, available());\n if (toDiscard > 0) {\n discard(toDiscard);\n currentEntry.remaining -= toDiscard;\n }\n if (currentEntry.remaining > 0)\n return false;\n return unpacker.skipPadding();\n },\n validateEOF() {\n if (strict) {\n if (!eof)\n throw truncateErr;\n if (available() > 0) {\n if (pull(available()).some((byte) => byte !== 0))\n throw new Error(\"Invalid EOF.\");\n }\n }\n }\n };\n return unpacker;\n}\nfunction isZeroBlock(block) {\n if (block.byteOffset % 8 === 0) {\n const view = new BigUint64Array(block.buffer, block.byteOffset, block.length / 8);\n for (let i = 0;i < view.length; i++)\n if (view[i] !== 0n)\n return false;\n return true;\n }\n for (let i = 0;i < block.length; i++)\n if (block[i] !== 0)\n return false;\n return true;\n}\nfunction createTarPacker2() {\n let streamController;\n let packer;\n return {\n readable: new ReadableStream({ start(controller) {\n streamController = controller;\n packer = createTarPacker(controller.enqueue.bind(controller), controller.error.bind(controller), controller.close.bind(controller));\n } }),\n controller: {\n add(header) {\n const bodyless = isBodyless(header);\n const h = { ...header };\n if (bodyless)\n h.size = 0;\n packer.add(h);\n if (bodyless)\n packer.endEntry();\n return new WritableStream({\n write(chunk) {\n packer.write(chunk);\n },\n close() {\n if (!bodyless)\n packer.endEntry();\n },\n abort(reason) {\n streamController.error(reason);\n }\n });\n },\n finalize() {\n packer.finalize();\n },\n error(err) {\n streamController.error(err);\n }\n }\n };\n}\nasync function streamToBuffer(stream) {\n const chunks = [];\n const reader = stream.getReader();\n let totalLength = 0;\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done)\n break;\n chunks.push(value);\n totalLength += value.length;\n }\n const result = new Uint8Array(totalLength);\n let offset = 0;\n for (const chunk of chunks) {\n result.set(chunk, offset);\n offset += chunk.length;\n }\n return result;\n } finally {\n reader.releaseLock();\n }\n}\nvar drain = (stream) => stream.pipeTo(new WritableStream);\nfunction createTarDecoder(options = {}) {\n const unpacker = createUnpacker(options);\n let bodyController = null;\n let pumping = false;\n const pump = (controller) => {\n if (pumping)\n return;\n pumping = true;\n try {\n while (true)\n if (unpacker.isEntryActive()) {\n if (bodyController) {\n if (unpacker.streamBody((c) => (bodyController.enqueue(c), true)) === 0 && !unpacker.isBodyComplete())\n break;\n } else if (!unpacker.skipEntry())\n break;\n if (unpacker.isBodyComplete()) {\n try {\n bodyController?.close();\n } catch {}\n bodyController = null;\n if (!unpacker.skipPadding())\n break;\n }\n } else {\n const header = unpacker.readHeader();\n if (header === null || header === undefined)\n break;\n controller.enqueue({\n header,\n body: new ReadableStream({\n start(c) {\n if (header.size === 0)\n c.close();\n else\n bodyController = c;\n },\n pull: () => pump(controller),\n cancel() {\n bodyController = null;\n pump(controller);\n }\n })\n });\n }\n } catch (error) {\n try {\n bodyController?.error(error);\n } catch {}\n bodyController = null;\n throw error;\n } finally {\n pumping = false;\n }\n };\n return new TransformStream({\n transform(chunk, controller) {\n try {\n unpacker.write(chunk);\n pump(controller);\n } catch (error) {\n try {\n bodyController?.error(error);\n } catch {}\n throw error;\n }\n },\n flush(controller) {\n try {\n unpacker.end();\n pump(controller);\n unpacker.validateEOF();\n if (unpacker.isEntryActive() && !unpacker.isBodyComplete())\n try {\n bodyController?.close();\n } catch {}\n } catch (error) {\n try {\n bodyController?.error(error);\n } catch {}\n throw error;\n }\n }\n }, undefined, { highWaterMark: 1 });\n}\nasync function packTar(entries) {\n const { readable, controller } = createTarPacker2();\n await (async () => {\n for (const entry of entries) {\n const entryStream = controller.add(entry.header);\n const body = \"body\" in entry ? entry.body : entry.data;\n if (!body) {\n await entryStream.close();\n continue;\n }\n if (body instanceof ReadableStream)\n await body.pipeTo(entryStream);\n else if (body instanceof Blob)\n await body.stream().pipeTo(entryStream);\n else\n try {\n const chunk = await normalizeBody(body);\n if (chunk.length > 0) {\n const writer = entryStream.getWriter();\n await writer.write(chunk);\n await writer.close();\n } else\n await entryStream.close();\n } catch {\n throw new TypeError(`Unsupported content type for entry \"${entry.header.name}\".`);\n }\n }\n })().then(() => controller.finalize()).catch((err) => controller.error(err));\n return new Uint8Array(await streamToBuffer(readable));\n}\nasync function unpackTar(archive, options = {}) {\n const sourceStream = archive instanceof ReadableStream ? archive : new ReadableStream({ start(controller) {\n controller.enqueue(archive instanceof Uint8Array ? archive : new Uint8Array(archive));\n controller.close();\n } });\n const results = [];\n const entryStream = sourceStream.pipeThrough(createTarDecoder(options));\n for await (const entry of entryStream) {\n let processedHeader;\n try {\n processedHeader = transformHeader(entry.header, options);\n } catch (error) {\n await entry.body.cancel();\n throw error;\n }\n if (processedHeader === null) {\n await drain(entry.body);\n continue;\n }\n if (isBodyless(processedHeader)) {\n await drain(entry.body);\n results.push({ header: processedHeader });\n } else\n results.push({\n header: processedHeader,\n data: await streamToBuffer(entry.body)\n });\n }\n return results;\n}\nvar gzipAsync = promisify(gzip);\nvar gunzipAsync = promisify(gunzip);\n\nclass RXPImpl {\n _files;\n _pathsCache = null;\n _treeCache = null;\n constructor(files) {\n this._files = files;\n }\n paths() {\n if (this._pathsCache) {\n return this._pathsCache;\n }\n this._pathsCache = Array.from(this._files.keys()).sort();\n return this._pathsCache;\n }\n tree() {\n if (this._treeCache) {\n return this._treeCache;\n }\n const root = new Map;\n for (const path of this._files.keys()) {\n const parts = path.split(\"/\");\n let currentLevel = root;\n for (let i = 0;i < parts.length; i++) {\n const part = parts[i];\n const isFile = i === parts.length - 1;\n if (!currentLevel.has(part)) {\n const treeNode2 = {\n node: {\n name: part,\n type: isFile ? \"file\" : \"directory\",\n children: isFile ? undefined : []\n },\n children: new Map\n };\n currentLevel.set(part, treeNode2);\n }\n const treeNode = currentLevel.get(part);\n if (!isFile) {\n currentLevel = treeNode.children;\n }\n }\n }\n const convertToPathNodes = (level) => {\n return Array.from(level.values()).map((treeNode) => {\n if (treeNode.node.type === \"directory\" && treeNode.children.size > 0) {\n treeNode.node.children = convertToPathNodes(treeNode.children);\n }\n return treeNode.node;\n });\n };\n this._treeCache = convertToPathNodes(root);\n return this._treeCache;\n }\n async file(path) {\n const content = this._files.get(path);\n if (!content) {\n throw new ContentError(`file not found: ${path}`);\n }\n return content;\n }\n async files() {\n return new Map(this._files);\n }\n async pack() {\n const filesRecord = {};\n for (const [path, content] of this._files) {\n filesRecord[path] = content;\n }\n return createRXA(filesRecord);\n }\n}\n\nclass RXAImpl {\n _buffer;\n _rxpCache = null;\n constructor(buffer) {\n this._buffer = buffer;\n }\n get stream() {\n const buffer = this._buffer;\n return new ReadableStream({\n start(controller) {\n controller.enqueue(new Uint8Array(buffer));\n controller.close();\n }\n });\n }\n async buffer() {\n return this._buffer;\n }\n async extract() {\n if (this._rxpCache) {\n return this._rxpCache;\n }\n const tarBuffer = await gunzipAsync(this._buffer);\n const entries = await unpackTar(tarBuffer);\n const filesMap = new Map;\n for (const entry of entries) {\n if ((entry.header.type === \"file\" || entry.header.type === undefined) && entry.data) {\n filesMap.set(entry.header.name, Buffer.from(entry.data));\n }\n }\n this._rxpCache = new RXPImpl(filesMap);\n return this._rxpCache;\n }\n}\nfunction isBufferInput(input) {\n return \"buffer\" in input && Buffer.isBuffer(input.buffer);\n}\nasync function createRXA(input) {\n if (isBufferInput(input)) {\n return new RXAImpl(input.buffer);\n }\n const entries = Object.entries(input).map(([name, content]) => {\n const body = typeof content === \"string\" ? content : content instanceof Uint8Array ? content : new Uint8Array(content);\n const size = typeof content === \"string\" ? Buffer.byteLength(content) : content.length;\n return {\n header: { name, size, type: \"file\" },\n body\n };\n });\n const tarBuffer = await packTar(entries);\n const gzipBuffer = await gzipAsync(Buffer.from(tarBuffer));\n return new RXAImpl(gzipBuffer);\n}\n\n// src/FolderLoader.ts\nclass FolderLoader {\n async canLoad(source) {\n try {\n const stats = await stat(source);\n if (!stats.isDirectory()) {\n return false;\n }\n const manifestPath = join(source, \"resource.json\");\n const manifestStats = await stat(manifestPath);\n return manifestStats.isFile();\n } catch {\n return false;\n }\n }\n async load(folderPath) {\n const manifestPath = join(folderPath, \"resource.json\");\n let manifestJson;\n try {\n manifestJson = await readFile(manifestPath, \"utf-8\");\n } catch (error) {\n throw new ResourceXError(`Failed to read resource.json: ${error instanceof Error ? error.message : String(error)}`);\n }\n let manifestData;\n try {\n manifestData = JSON.parse(manifestJson);\n } catch (error) {\n throw new ResourceXError(`Invalid JSON in resource.json: ${error instanceof Error ? error.message : String(error)}`);\n }\n if (!manifestData.name) {\n throw new ResourceXError(\"Invalid resource.json: missing required field 'name'\");\n }\n if (!manifestData.type) {\n throw new ResourceXError(\"Invalid resource.json: missing required field 'type'\");\n }\n if (!manifestData.version) {\n throw new ResourceXError(\"Invalid resource.json: missing required field 'version'\");\n }\n const manifest = createRXM({\n domain: manifestData.domain ?? \"localhost\",\n path: manifestData.path,\n name: manifestData.name,\n type: manifestData.type,\n version: manifestData.version\n });\n const files = await this.readFolderFiles(folderPath);\n if (Object.keys(files).length === 0) {\n throw new ResourceXError(\"No content files found in resource folder\");\n }\n const archive = await createRXA(files);\n const locator = parseRXL(manifest.toLocator());\n return {\n locator,\n manifest,\n archive\n };\n }\n async readFolderFiles(folderPath, basePath = folderPath) {\n const files = {};\n const entries = await readdir(folderPath, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = join(folderPath, entry.name);\n const relativePath = relative(basePath, fullPath);\n if (relativePath === \"resource.json\") {\n continue;\n }\n if (entry.isFile()) {\n files[relativePath] = await readFile(fullPath);\n } else if (entry.isDirectory()) {\n const subFiles = await this.readFolderFiles(fullPath, basePath);\n Object.assign(files, subFiles);\n }\n }\n return files;\n }\n}\n// src/loadResource.ts\nasync function loadResource(source, config) {\n const loader = config?.loader ?? new FolderLoader;\n const canLoad = await loader.canLoad(source);\n if (!canLoad) {\n throw new ResourceXError(`Cannot load resource from: ${source}`);\n }\n return loader.load(source);\n}\nexport {\n loadResource,\n FolderLoader\n};\n\n//# debugId=3088D1AACA1476C364756E2164756E21\n",
|
|
9
9
|
"import { homedir } from \"node:os\";\nimport { join, resolve as resolvePath } from \"node:path\";\nimport { symlink, lstat, readlink } from \"node:fs/promises\";\nimport type { RXR, RXL } from \"@resourcexjs/core\";\nimport { parseRXL, createRXM, createRXA } from \"@resourcexjs/core\";\nimport { createARP, type ARP } from \"@resourcexjs/arp\";\nimport { loadResource } from \"@resourcexjs/loader\";\nimport { RegistryError } from \"../errors.js\";\nimport type { Storage, SearchOptions } from \"./Storage.js\";\n\nconst DEFAULT_PATH = `${homedir()}/.resourcex`;\n\n/**\n * LocalStorage configuration.\n */\nexport interface LocalStorageConfig {\n /**\n * Base path for storage. Defaults to ~/.resourcex\n */\n path?: string;\n}\n\n/**\n * Local filesystem storage implementation.\n * Uses ARP file transport for I/O operations.\n *\n * Storage structure:\n * - {basePath}/{domain}/{path}/{name}.{type}/{version}/\n * - manifest.json\n * - archive.tar.gz\n *\n * For localhost/no-domain resources:\n * - {basePath}/localhost/{name}.{type}/{version}/\n */\nexport class LocalStorage implements Storage {\n readonly type = \"local\";\n private readonly basePath: string;\n private readonly arp: ARP;\n\n constructor(config?: LocalStorageConfig) {\n this.basePath = config?.path ?? DEFAULT_PATH;\n this.arp = createARP();\n }\n\n /**\n * Create ARP URL for a file path.\n */\n private toArpUrl(filePath: string): string {\n return `arp:binary:file://${filePath}`;\n }\n\n /**\n * Build filesystem path for a resource.\n * Path: {basePath}/{domain}/{path}/{name}.{type}/{version}\n */\n private buildPath(locator: string | RXL): string {\n const rxl = typeof locator === \"string\" ? parseRXL(locator) : locator;\n const domain = rxl.domain ?? \"localhost\";\n const resourceName = rxl.type ? `${rxl.name}.${rxl.type}` : rxl.name;\n const version = rxl.version ?? \"latest\";\n\n let path = join(this.basePath, domain);\n if (rxl.path) {\n path = join(path, rxl.path);\n }\n return join(path, resourceName, version);\n }\n\n /**\n * Check if a resource exists at a specific path.\n * Handles both regular storage (manifest.json) and symlinked dev directories (resource.json).\n */\n private async existsAt(resourcePath: string): Promise<boolean> {\n // Check if path is a symlink (created by link())\n if (await this.isSymlink(resourcePath)) {\n // For symlinks, check if resource.json exists in the linked directory\n const targetPath = await readlink(resourcePath);\n const resourceJsonPath = join(targetPath, \"resource.json\");\n const arl = this.arp.parse(this.toArpUrl(resourceJsonPath));\n return arl.exists();\n }\n\n // Regular storage: check for manifest.json\n const manifestPath = join(resourcePath, \"manifest.json\");\n const arl = this.arp.parse(this.toArpUrl(manifestPath));\n return arl.exists();\n }\n\n /**\n * Check if a path is a symlink.\n */\n private async isSymlink(path: string): Promise<boolean> {\n try {\n const stats = await lstat(path);\n return stats.isSymbolicLink();\n } catch {\n return false;\n }\n }\n\n /**\n * Load resource from a specific path.\n * If path is a symlink, loads from the linked directory.\n */\n private async loadFrom(resourcePath: string): Promise<RXR> {\n // Check if this is a symlink (created by link())\n if (await this.isSymlink(resourcePath)) {\n const targetPath = await readlink(resourcePath);\n return loadResource(targetPath);\n }\n\n // Regular resource: read manifest and archive\n const manifestPath = join(resourcePath, \"manifest.json\");\n const manifestArl = this.arp.parse(this.toArpUrl(manifestPath));\n const manifestResource = await manifestArl.resolve();\n const manifestContent = (manifestResource.content as Buffer).toString(\"utf-8\");\n const manifestData = JSON.parse(manifestContent);\n const manifest = createRXM(manifestData);\n\n // Read archive\n const archivePath = join(resourcePath, \"archive.tar.gz\");\n const archiveArl = this.arp.parse(this.toArpUrl(archivePath));\n const archiveResource = await archiveArl.resolve();\n const data = archiveResource.content as Buffer;\n\n return {\n locator: parseRXL(manifest.toLocator()),\n manifest,\n archive: await createRXA({ buffer: data }),\n };\n }\n\n async get(locator: string): Promise<RXR> {\n const resourcePath = this.buildPath(locator);\n\n if (!(await this.existsAt(resourcePath))) {\n throw new RegistryError(`Resource not found: ${locator}`);\n }\n\n return this.loadFrom(resourcePath);\n }\n\n async put(rxr: RXR): Promise<void> {\n const locator = rxr.manifest.toLocator();\n const resourcePath = this.buildPath(locator);\n\n // Remove existing symlink if any\n if (await this.isSymlink(resourcePath)) {\n const arl = this.arp.parse(this.toArpUrl(resourcePath));\n await arl.delete();\n }\n\n // Ensure directory exists\n const dirArl = this.arp.parse(this.toArpUrl(resourcePath));\n await dirArl.mkdir();\n\n // Write manifest\n const manifestPath = join(resourcePath, \"manifest.json\");\n const manifestArl = this.arp.parse(this.toArpUrl(manifestPath));\n const manifestContent = Buffer.from(JSON.stringify(rxr.manifest.toJSON(), null, 2), \"utf-8\");\n await manifestArl.deposit(manifestContent);\n\n // Write archive\n const archivePath = join(resourcePath, \"archive.tar.gz\");\n const archiveArl = this.arp.parse(this.toArpUrl(archivePath));\n const archiveBuffer = await rxr.archive.buffer();\n await archiveArl.deposit(archiveBuffer);\n }\n\n async exists(locator: string): Promise<boolean> {\n const resourcePath = this.buildPath(locator);\n return this.existsAt(resourcePath);\n }\n\n async delete(locator: string): Promise<void> {\n const resourcePath = this.buildPath(locator);\n\n if (await this.existsAt(resourcePath)) {\n const arl = this.arp.parse(this.toArpUrl(resourcePath));\n await arl.delete();\n }\n }\n\n async search(options?: SearchOptions): Promise<RXL[]> {\n const { query, limit, offset = 0 } = options ?? {};\n const locators: RXL[] = [];\n\n // Scan base directory recursively for manifest.json files\n try {\n const baseArl = this.arp.parse(this.toArpUrl(this.basePath));\n const entries = await baseArl.list({ recursive: true, pattern: \"*.json\" });\n\n for (const entry of entries) {\n if (!entry.endsWith(\"manifest.json\")) continue;\n const rxl = this.parseEntryToRXL(entry);\n if (rxl) locators.push(rxl);\n }\n } catch {\n // Directory doesn't exist\n return [];\n }\n\n // Filter by query\n let filtered = locators;\n if (query) {\n const lowerQuery = query.toLowerCase();\n filtered = locators.filter((rxl) => {\n const searchText =\n `${rxl.domain ?? \"\"} ${rxl.path ?? \"\"} ${rxl.name} ${rxl.type ?? \"\"}`.toLowerCase();\n return searchText.includes(lowerQuery);\n });\n }\n\n // Apply pagination\n let result = filtered.slice(offset);\n if (limit !== undefined) {\n result = result.slice(0, limit);\n }\n\n return result;\n }\n\n /**\n * Link a development directory.\n * Creates a symlink so changes are reflected immediately.\n */\n async link(path: string): Promise<void> {\n // Load resource from directory to get locator info\n const rxr = await loadResource(path);\n const locator = rxr.manifest.toLocator();\n const resourcePath = this.buildPath(locator);\n\n // Remove existing if any\n try {\n const arl = this.arp.parse(this.toArpUrl(resourcePath));\n if (await arl.exists()) {\n await arl.delete();\n }\n } catch {\n // Ignore\n }\n\n // Ensure parent directory exists\n const parentPath = join(resourcePath, \"..\");\n const parentArl = this.arp.parse(this.toArpUrl(parentPath));\n await parentArl.mkdir();\n\n // Create symlink to absolute path\n const absolutePath = resolvePath(path);\n await symlink(absolutePath, resourcePath);\n }\n\n /**\n * Parse entry path to RXL.\n * Entry format: {domain}/{path}/{name}.{type}/{version}/manifest.json\n */\n private parseEntryToRXL(entry: string): RXL | null {\n // Remove /manifest.json suffix\n const dirPath = entry.replace(/[/\\\\]manifest\\.json$/, \"\");\n const parts = dirPath.split(/[/\\\\]/);\n\n if (parts.length < 3) {\n // Need at least: domain, name.type, version\n return null;\n }\n\n // Last part is version\n const version = parts.pop()!;\n // Second to last is {name}.{type}\n const nameTypePart = parts.pop()!;\n // First part is domain\n const domain = parts.shift()!;\n // Remaining parts are path (if any)\n const path = parts.length > 0 ? parts.join(\"/\") : undefined;\n\n // Split name and type\n const dotIndex = nameTypePart.lastIndexOf(\".\");\n let name: string;\n let type: string | undefined;\n\n if (dotIndex !== -1) {\n name = nameTypePart.substring(0, dotIndex);\n type = nameTypePart.substring(dotIndex + 1);\n } else {\n name = nameTypePart;\n type = undefined;\n }\n\n // Construct locator string\n let locatorStr = domain;\n if (path) locatorStr += `/${path}`;\n locatorStr += `/${name}`;\n if (type) locatorStr += `.${type}`;\n locatorStr += `@${version}`;\n\n try {\n return parseRXL(locatorStr);\n } catch {\n return null;\n }\n }\n}\n",
|
|
10
|
-
"// src/errors.ts\nclass ARPError extends Error {\n constructor(message, options) {\n super(message, options);\n this.name = \"ARPError\";\n }\n}\n\nclass ParseError extends ARPError {\n url;\n constructor(message, url) {\n super(message);\n this.url = url;\n this.name = \"ParseError\";\n }\n}\n\nclass TransportError extends ARPError {\n transport;\n constructor(message, transport, options) {\n super(message, options);\n this.transport = transport;\n this.name = \"TransportError\";\n }\n}\n\nclass SemanticError extends ARPError {\n semantic;\n constructor(message, semantic, options) {\n super(message, options);\n this.semantic = semantic;\n this.name = \"SemanticError\";\n }\n}\n\n// src/ARL.ts\nclass ARL {\n semantic;\n transport;\n location;\n resolver;\n constructor(semantic, transport, location, resolver) {\n this.semantic = semantic;\n this.transport = transport;\n this.location = location;\n this.resolver = resolver;\n }\n createContext(params) {\n return {\n url: this.toString(),\n semantic: this.semantic,\n transport: this.transport,\n location: this.location,\n timestamp: new Date,\n params\n };\n }\n async resolve(params) {\n const transport = this.resolver.getTransportHandler(this.transport);\n const semantic = this.resolver.getSemanticHandler(this.semantic);\n const context = this.createContext(params);\n return semantic.resolve(transport, this.location, context);\n }\n async deposit(data, params) {\n const transport = this.resolver.getTransportHandler(this.transport);\n const semantic = this.resolver.getSemanticHandler(this.semantic);\n const context = this.createContext(params);\n if (!semantic.deposit) {\n throw new SemanticError(`Semantic \"${semantic.name}\" does not support deposit operation`, this.semantic);\n }\n await semantic.deposit(transport, this.location, data, context);\n }\n async exists() {\n const transport = this.resolver.getTransportHandler(this.transport);\n const semantic = this.resolver.getSemanticHandler(this.semantic);\n const context = this.createContext();\n if (semantic.exists) {\n return semantic.exists(transport, this.location, context);\n }\n return transport.exists(this.location);\n }\n async delete() {\n const transport = this.resolver.getTransportHandler(this.transport);\n const semantic = this.resolver.getSemanticHandler(this.semantic);\n const context = this.createContext();\n if (semantic.delete) {\n return semantic.delete(transport, this.location, context);\n }\n await transport.delete(this.location);\n }\n async list(options) {\n const transport = this.resolver.getTransportHandler(this.transport);\n if (!transport.list) {\n throw new TransportError(`Transport \"${transport.name}\" does not support list operation`, this.transport);\n }\n return transport.list(this.location, options);\n }\n async mkdir() {\n const transport = this.resolver.getTransportHandler(this.transport);\n if (!transport.mkdir) {\n throw new TransportError(`Transport \"${transport.name}\" does not support mkdir operation`, this.transport);\n }\n await transport.mkdir(this.location);\n }\n toString() {\n return `arp:${this.semantic}:${this.transport}://${this.location}`;\n }\n}\n\n// src/transport/file.ts\nimport { readFile, writeFile, readdir, mkdir, rm, access, stat } from \"node:fs/promises\";\nimport { resolve, dirname, join } from \"node:path\";\nclass FileTransportHandler {\n name = \"file\";\n resolvePath(location) {\n return resolve(process.cwd(), location);\n }\n async get(location, params) {\n const filePath = this.resolvePath(location);\n try {\n const stats = await stat(filePath);\n if (stats.isDirectory()) {\n return this.getDirectory(filePath, stats, params);\n } else {\n return this.getFile(filePath, stats);\n }\n } catch (error) {\n const err = error;\n throw new TransportError(`File get error: ${err.code} - ${filePath}`, this.name, {\n cause: err\n });\n }\n }\n async getFile(filePath, stats) {\n const content = await readFile(filePath);\n return {\n content,\n metadata: {\n type: \"file\",\n size: Number(stats.size),\n modifiedAt: stats.mtime\n }\n };\n }\n async getDirectory(dirPath, stats, params) {\n const recursive = params?.recursive === \"true\";\n const pattern = params?.pattern;\n let entries;\n if (recursive) {\n entries = await this.listRecursive(dirPath, dirPath);\n } else {\n entries = await readdir(dirPath);\n }\n if (pattern) {\n entries = this.filterByPattern(entries, pattern);\n }\n const content = Buffer.from(JSON.stringify(entries));\n return {\n content,\n metadata: {\n type: \"directory\",\n modifiedAt: stats.mtime\n }\n };\n }\n async listRecursive(basePath, currentPath) {\n const entries = await readdir(currentPath, { withFileTypes: true });\n const results = [];\n for (const entry of entries) {\n const fullPath = join(currentPath, entry.name);\n const relativePath = fullPath.substring(basePath.length + 1);\n if (entry.isDirectory()) {\n const subEntries = await this.listRecursive(basePath, fullPath);\n results.push(...subEntries);\n } else {\n results.push(relativePath);\n }\n }\n return results;\n }\n filterByPattern(entries, pattern) {\n const regexPattern = pattern.replace(/\\./g, \"\\\\.\").replace(/\\*/g, \".*\").replace(/\\?/g, \".\");\n const regex = new RegExp(`^${regexPattern}$`);\n return entries.filter((entry) => {\n const filename = entry.split(\"/\").pop() || entry;\n return regex.test(filename);\n });\n }\n async set(location, content, _params) {\n const filePath = this.resolvePath(location);\n try {\n await mkdir(dirname(filePath), { recursive: true });\n await writeFile(filePath, content);\n } catch (error) {\n const err = error;\n throw new TransportError(`File set error: ${err.code} - ${filePath}`, this.name, {\n cause: err\n });\n }\n }\n async exists(location) {\n const filePath = this.resolvePath(location);\n try {\n await access(filePath);\n return true;\n } catch {\n return false;\n }\n }\n async delete(location) {\n const filePath = this.resolvePath(location);\n try {\n await rm(filePath, { recursive: true });\n } catch (error) {\n const err = error;\n if (err.code === \"ENOENT\") {\n return;\n }\n throw new TransportError(`File delete error: ${err.code} - ${filePath}`, this.name, {\n cause: err\n });\n }\n }\n async list(location, options) {\n const dirPath = this.resolvePath(location);\n try {\n let entries;\n if (options?.recursive) {\n entries = await this.listRecursive(dirPath, dirPath);\n } else {\n entries = await readdir(dirPath);\n }\n if (options?.pattern) {\n entries = this.filterByPattern(entries, options.pattern);\n }\n return entries;\n } catch (error) {\n const err = error;\n throw new TransportError(`File list error: ${err.code} - ${dirPath}`, this.name, {\n cause: err\n });\n }\n }\n async mkdir(location) {\n const dirPath = this.resolvePath(location);\n try {\n await mkdir(dirPath, { recursive: true });\n } catch (error) {\n const err = error;\n throw new TransportError(`File mkdir error: ${err.code} - ${dirPath}`, this.name, {\n cause: err\n });\n }\n }\n}\nvar fileTransport = new FileTransportHandler;\n// src/transport/http.ts\nclass HttpTransportHandler {\n name;\n protocol;\n constructor(protocol = \"https\") {\n this.protocol = protocol;\n this.name = protocol;\n }\n async get(location, params) {\n const url = this.buildUrl(location, params);\n try {\n const response = await fetch(url);\n if (!response.ok) {\n throw new TransportError(`HTTP ${response.status}: ${response.statusText} - ${url}`, this.name);\n }\n const arrayBuffer = await response.arrayBuffer();\n const content = Buffer.from(arrayBuffer);\n const contentType = response.headers.get(\"content-type\");\n const contentLength = response.headers.get(\"content-length\");\n const lastModified = response.headers.get(\"last-modified\");\n return {\n content,\n metadata: {\n type: \"file\",\n size: contentLength ? parseInt(contentLength, 10) : content.length,\n modifiedAt: lastModified ? new Date(lastModified) : undefined,\n contentType\n }\n };\n } catch (error) {\n if (error instanceof TransportError) {\n throw error;\n }\n throw new TransportError(`Network error: ${url}`, this.name, {\n cause: error\n });\n }\n }\n buildUrl(location, params) {\n const url = new URL(`${this.protocol}://${location}`);\n if (params) {\n for (const [key, value] of Object.entries(params)) {\n url.searchParams.set(key, value);\n }\n }\n return url.toString();\n }\n async set(_location, _content, _params) {\n throw new TransportError(\"HTTP transport is read-only, set not supported\", this.name);\n }\n async exists(location) {\n const url = `${this.protocol}://${location}`;\n try {\n const response = await fetch(url, { method: \"HEAD\" });\n return response.ok;\n } catch {\n return false;\n }\n }\n async delete(_location) {\n throw new TransportError(\"HTTP transport is read-only, delete not supported\", this.name);\n }\n}\nvar httpsTransport = new HttpTransportHandler(\"https\");\nvar httpTransport = new HttpTransportHandler(\"http\");\n// src/semantic/text.ts\nclass TextSemanticHandler {\n name = \"text\";\n async resolve(transport, location, context) {\n const result = await transport.get(location, context.params);\n if (result.metadata?.type === \"directory\") {\n const meta2 = {\n url: context.url,\n semantic: context.semantic,\n transport: context.transport,\n location: context.location,\n size: result.content.length,\n encoding: \"utf-8\",\n mimeType: \"application/json\",\n resolvedAt: context.timestamp.toISOString(),\n type: \"directory\"\n };\n return {\n type: \"text\",\n content: result.content.toString(\"utf-8\"),\n meta: meta2\n };\n }\n const text = result.content.toString(\"utf-8\");\n const meta = {\n url: context.url,\n semantic: context.semantic,\n transport: context.transport,\n location: context.location,\n size: result.metadata?.size ?? result.content.length,\n encoding: \"utf-8\",\n mimeType: \"text/plain\",\n resolvedAt: context.timestamp.toISOString(),\n type: \"file\"\n };\n return {\n type: \"text\",\n content: text,\n meta\n };\n }\n async deposit(transport, location, data, context) {\n const buffer = Buffer.from(data, \"utf-8\");\n try {\n await transport.set(location, buffer, context.params);\n } catch (error) {\n throw new SemanticError(`Failed to deposit text to \"${location}\": ${error.message}`, this.name, { cause: error });\n }\n }\n async exists(transport, location, _context) {\n return transport.exists(location);\n }\n async delete(transport, location, _context) {\n try {\n await transport.delete(location);\n } catch (error) {\n throw new SemanticError(`Failed to delete \"${location}\": ${error.message}`, this.name, { cause: error });\n }\n }\n}\nvar textSemantic = new TextSemanticHandler;\n// src/semantic/binary.ts\nfunction toBuffer(data) {\n if (Buffer.isBuffer(data)) {\n return data;\n }\n if (data instanceof Uint8Array) {\n return Buffer.from(data);\n }\n if (data instanceof ArrayBuffer) {\n return Buffer.from(data);\n }\n if (Array.isArray(data)) {\n return Buffer.from(data);\n }\n throw new SemanticError(`Unsupported binary input type`, \"binary\");\n}\n\nclass BinarySemanticHandler {\n name = \"binary\";\n async resolve(transport, location, context) {\n const result = await transport.get(location, context.params);\n const meta = {\n url: context.url,\n semantic: context.semantic,\n transport: context.transport,\n location: context.location,\n size: result.metadata?.size ?? result.content.length,\n resolvedAt: context.timestamp.toISOString(),\n type: result.metadata?.type\n };\n return {\n type: \"binary\",\n content: result.content,\n meta\n };\n }\n async deposit(transport, location, data, context) {\n const buffer = toBuffer(data);\n try {\n await transport.set(location, buffer, context.params);\n } catch (error) {\n throw new SemanticError(`Failed to deposit binary to \"${location}\": ${error.message}`, this.name, { cause: error });\n }\n }\n async exists(transport, location, _context) {\n return transport.exists(location);\n }\n async delete(transport, location, _context) {\n try {\n await transport.delete(location);\n } catch (error) {\n throw new SemanticError(`Failed to delete \"${location}\": ${error.message}`, this.name, { cause: error });\n }\n }\n}\nvar binarySemantic = new BinarySemanticHandler;\n// src/ARP.ts\nclass ARP {\n transports;\n semantics;\n constructor(config = {}) {\n this.transports = new Map;\n this.semantics = new Map;\n const defaultTransports = [fileTransport, httpTransport, httpsTransport];\n const defaultSemantics = [textSemantic, binarySemantic];\n for (const handler of defaultTransports) {\n this.transports.set(handler.name, handler);\n }\n for (const handler of defaultSemantics) {\n this.semantics.set(handler.name, handler);\n }\n if (config.transports) {\n for (const handler of config.transports) {\n this.transports.set(handler.name, handler);\n }\n }\n if (config.semantics) {\n for (const handler of config.semantics) {\n this.semantics.set(handler.name, handler);\n }\n }\n }\n registerTransport(handler) {\n this.transports.set(handler.name, handler);\n }\n registerSemantic(handler) {\n this.semantics.set(handler.name, handler);\n }\n getTransportHandler(name) {\n const handler = this.transports.get(name);\n if (!handler) {\n throw new TransportError(`Unsupported transport type: ${name}`, name);\n }\n return handler;\n }\n getSemanticHandler(name) {\n const handler = this.semantics.get(name);\n if (!handler) {\n throw new SemanticError(`Unsupported semantic type: ${name}`, name);\n }\n return handler;\n }\n parse(url) {\n if (!url.startsWith(\"arp:\")) {\n throw new ParseError(`Invalid ARP URL: must start with \"arp:\"`, url);\n }\n const content = url.substring(4);\n const separatorIndex = content.indexOf(\"://\");\n if (separatorIndex === -1) {\n throw new ParseError(`Invalid ARP URL: missing \"://\"`, url);\n }\n const typePart = content.substring(0, separatorIndex);\n const location = content.substring(separatorIndex + 3);\n const colonIndex = typePart.indexOf(\":\");\n if (colonIndex === -1) {\n throw new ParseError(`Invalid ARP URL: must have exactly 2 types (semantic:transport)`, url);\n }\n const semantic = typePart.substring(0, colonIndex);\n const transport = typePart.substring(colonIndex + 1);\n if (!semantic) {\n throw new ParseError(`Invalid ARP URL: semantic type cannot be empty`, url);\n }\n if (!transport) {\n throw new ParseError(`Invalid ARP URL: transport type cannot be empty`, url);\n }\n if (!location) {\n throw new ParseError(`Invalid ARP URL: location cannot be empty`, url);\n }\n this.getTransportHandler(transport);\n this.getSemanticHandler(semantic);\n return new ARL(semantic, transport, location, this);\n }\n}\nfunction createARP(config) {\n return new ARP(config);\n}\n\n// src/index.ts\nvar VERSION = \"2.4.0\";\nexport {\n textSemantic,\n httpsTransport,\n httpTransport,\n fileTransport,\n createARP,\n binarySemantic,\n VERSION,\n TransportError,\n TextSemanticHandler,\n SemanticError,\n ParseError,\n HttpTransportHandler,\n FileTransportHandler,\n BinarySemanticHandler,\n ARPError,\n ARP\n};\n\n//# debugId=DB93635BA11734F364756E2164756E21\n",
|
|
10
|
+
"// src/errors.ts\nclass ARPError extends Error {\n constructor(message, options) {\n super(message, options);\n this.name = \"ARPError\";\n }\n}\n\nclass ParseError extends ARPError {\n url;\n constructor(message, url) {\n super(message);\n this.url = url;\n this.name = \"ParseError\";\n }\n}\n\nclass TransportError extends ARPError {\n transport;\n constructor(message, transport, options) {\n super(message, options);\n this.transport = transport;\n this.name = \"TransportError\";\n }\n}\n\nclass SemanticError extends ARPError {\n semantic;\n constructor(message, semantic, options) {\n super(message, options);\n this.semantic = semantic;\n this.name = \"SemanticError\";\n }\n}\n\n// src/ARL.ts\nclass ARL {\n semantic;\n transport;\n location;\n resolver;\n constructor(semantic, transport, location, resolver) {\n this.semantic = semantic;\n this.transport = transport;\n this.location = location;\n this.resolver = resolver;\n }\n createContext(params) {\n return {\n url: this.toString(),\n semantic: this.semantic,\n transport: this.transport,\n location: this.location,\n timestamp: new Date,\n params\n };\n }\n async resolve(params) {\n const transport = this.resolver.getTransportHandler(this.transport);\n const semantic = this.resolver.getSemanticHandler(this.semantic);\n const context = this.createContext(params);\n return semantic.resolve(transport, this.location, context);\n }\n async deposit(data, params) {\n const transport = this.resolver.getTransportHandler(this.transport);\n const semantic = this.resolver.getSemanticHandler(this.semantic);\n const context = this.createContext(params);\n if (!semantic.deposit) {\n throw new SemanticError(`Semantic \"${semantic.name}\" does not support deposit operation`, this.semantic);\n }\n await semantic.deposit(transport, this.location, data, context);\n }\n async exists() {\n const transport = this.resolver.getTransportHandler(this.transport);\n const semantic = this.resolver.getSemanticHandler(this.semantic);\n const context = this.createContext();\n if (semantic.exists) {\n return semantic.exists(transport, this.location, context);\n }\n return transport.exists(this.location);\n }\n async delete() {\n const transport = this.resolver.getTransportHandler(this.transport);\n const semantic = this.resolver.getSemanticHandler(this.semantic);\n const context = this.createContext();\n if (semantic.delete) {\n return semantic.delete(transport, this.location, context);\n }\n await transport.delete(this.location);\n }\n async list(options) {\n const transport = this.resolver.getTransportHandler(this.transport);\n if (!transport.list) {\n throw new TransportError(`Transport \"${transport.name}\" does not support list operation`, this.transport);\n }\n return transport.list(this.location, options);\n }\n async mkdir() {\n const transport = this.resolver.getTransportHandler(this.transport);\n if (!transport.mkdir) {\n throw new TransportError(`Transport \"${transport.name}\" does not support mkdir operation`, this.transport);\n }\n await transport.mkdir(this.location);\n }\n toString() {\n return `arp:${this.semantic}:${this.transport}://${this.location}`;\n }\n}\n\n// src/transport/file.ts\nimport { readFile, writeFile, readdir, mkdir, rm, access, stat } from \"node:fs/promises\";\nimport { resolve, dirname, join } from \"node:path\";\nclass FileTransportHandler {\n name = \"file\";\n resolvePath(location) {\n return resolve(process.cwd(), location);\n }\n async get(location, params) {\n const filePath = this.resolvePath(location);\n try {\n const stats = await stat(filePath);\n if (stats.isDirectory()) {\n return this.getDirectory(filePath, stats, params);\n } else {\n return this.getFile(filePath, stats);\n }\n } catch (error) {\n const err = error;\n throw new TransportError(`File get error: ${err.code} - ${filePath}`, this.name, {\n cause: err\n });\n }\n }\n async getFile(filePath, stats) {\n const content = await readFile(filePath);\n return {\n content,\n metadata: {\n type: \"file\",\n size: Number(stats.size),\n modifiedAt: stats.mtime\n }\n };\n }\n async getDirectory(dirPath, stats, params) {\n const recursive = params?.recursive === \"true\";\n const pattern = params?.pattern;\n let entries;\n if (recursive) {\n entries = await this.listRecursive(dirPath, dirPath);\n } else {\n entries = await readdir(dirPath);\n }\n if (pattern) {\n entries = this.filterByPattern(entries, pattern);\n }\n const content = Buffer.from(JSON.stringify(entries));\n return {\n content,\n metadata: {\n type: \"directory\",\n modifiedAt: stats.mtime\n }\n };\n }\n async listRecursive(basePath, currentPath) {\n const entries = await readdir(currentPath, { withFileTypes: true });\n const results = [];\n for (const entry of entries) {\n const fullPath = join(currentPath, entry.name);\n const relativePath = fullPath.substring(basePath.length + 1);\n if (entry.isDirectory()) {\n const subEntries = await this.listRecursive(basePath, fullPath);\n results.push(...subEntries);\n } else {\n results.push(relativePath);\n }\n }\n return results;\n }\n filterByPattern(entries, pattern) {\n const regexPattern = pattern.replace(/\\./g, \"\\\\.\").replace(/\\*/g, \".*\").replace(/\\?/g, \".\");\n const regex = new RegExp(`^${regexPattern}$`);\n return entries.filter((entry) => {\n const filename = entry.split(\"/\").pop() || entry;\n return regex.test(filename);\n });\n }\n async set(location, content, _params) {\n const filePath = this.resolvePath(location);\n try {\n await mkdir(dirname(filePath), { recursive: true });\n await writeFile(filePath, content);\n } catch (error) {\n const err = error;\n throw new TransportError(`File set error: ${err.code} - ${filePath}`, this.name, {\n cause: err\n });\n }\n }\n async exists(location) {\n const filePath = this.resolvePath(location);\n try {\n await access(filePath);\n return true;\n } catch {\n return false;\n }\n }\n async delete(location) {\n const filePath = this.resolvePath(location);\n try {\n await rm(filePath, { recursive: true });\n } catch (error) {\n const err = error;\n if (err.code === \"ENOENT\") {\n return;\n }\n throw new TransportError(`File delete error: ${err.code} - ${filePath}`, this.name, {\n cause: err\n });\n }\n }\n async list(location, options) {\n const dirPath = this.resolvePath(location);\n try {\n let entries;\n if (options?.recursive) {\n entries = await this.listRecursive(dirPath, dirPath);\n } else {\n entries = await readdir(dirPath);\n }\n if (options?.pattern) {\n entries = this.filterByPattern(entries, options.pattern);\n }\n return entries;\n } catch (error) {\n const err = error;\n throw new TransportError(`File list error: ${err.code} - ${dirPath}`, this.name, {\n cause: err\n });\n }\n }\n async mkdir(location) {\n const dirPath = this.resolvePath(location);\n try {\n await mkdir(dirPath, { recursive: true });\n } catch (error) {\n const err = error;\n throw new TransportError(`File mkdir error: ${err.code} - ${dirPath}`, this.name, {\n cause: err\n });\n }\n }\n}\nvar fileTransport = new FileTransportHandler;\n// src/transport/http.ts\nclass HttpTransportHandler {\n name;\n protocol;\n constructor(protocol = \"https\") {\n this.protocol = protocol;\n this.name = protocol;\n }\n async get(location, params) {\n const url = this.buildUrl(location, params);\n try {\n const response = await fetch(url);\n if (!response.ok) {\n throw new TransportError(`HTTP ${response.status}: ${response.statusText} - ${url}`, this.name);\n }\n const arrayBuffer = await response.arrayBuffer();\n const content = Buffer.from(arrayBuffer);\n const contentType = response.headers.get(\"content-type\");\n const contentLength = response.headers.get(\"content-length\");\n const lastModified = response.headers.get(\"last-modified\");\n return {\n content,\n metadata: {\n type: \"file\",\n size: contentLength ? parseInt(contentLength, 10) : content.length,\n modifiedAt: lastModified ? new Date(lastModified) : undefined,\n contentType\n }\n };\n } catch (error) {\n if (error instanceof TransportError) {\n throw error;\n }\n throw new TransportError(`Network error: ${url}`, this.name, {\n cause: error\n });\n }\n }\n buildUrl(location, params) {\n const url = new URL(`${this.protocol}://${location}`);\n if (params) {\n for (const [key, value] of Object.entries(params)) {\n url.searchParams.set(key, value);\n }\n }\n return url.toString();\n }\n async set(_location, _content, _params) {\n throw new TransportError(\"HTTP transport is read-only, set not supported\", this.name);\n }\n async exists(location) {\n const url = `${this.protocol}://${location}`;\n try {\n const response = await fetch(url, { method: \"HEAD\" });\n return response.ok;\n } catch {\n return false;\n }\n }\n async delete(_location) {\n throw new TransportError(\"HTTP transport is read-only, delete not supported\", this.name);\n }\n}\nvar httpsTransport = new HttpTransportHandler(\"https\");\nvar httpTransport = new HttpTransportHandler(\"http\");\n// src/semantic/text.ts\nclass TextSemanticHandler {\n name = \"text\";\n async resolve(transport, location, context) {\n const result = await transport.get(location, context.params);\n if (result.metadata?.type === \"directory\") {\n const meta2 = {\n url: context.url,\n semantic: context.semantic,\n transport: context.transport,\n location: context.location,\n size: result.content.length,\n encoding: \"utf-8\",\n mimeType: \"application/json\",\n resolvedAt: context.timestamp.toISOString(),\n type: \"directory\"\n };\n return {\n type: \"text\",\n content: result.content.toString(\"utf-8\"),\n meta: meta2\n };\n }\n const text = result.content.toString(\"utf-8\");\n const meta = {\n url: context.url,\n semantic: context.semantic,\n transport: context.transport,\n location: context.location,\n size: result.metadata?.size ?? result.content.length,\n encoding: \"utf-8\",\n mimeType: \"text/plain\",\n resolvedAt: context.timestamp.toISOString(),\n type: \"file\"\n };\n return {\n type: \"text\",\n content: text,\n meta\n };\n }\n async deposit(transport, location, data, context) {\n const buffer = Buffer.from(data, \"utf-8\");\n try {\n await transport.set(location, buffer, context.params);\n } catch (error) {\n throw new SemanticError(`Failed to deposit text to \"${location}\": ${error.message}`, this.name, { cause: error });\n }\n }\n async exists(transport, location, _context) {\n return transport.exists(location);\n }\n async delete(transport, location, _context) {\n try {\n await transport.delete(location);\n } catch (error) {\n throw new SemanticError(`Failed to delete \"${location}\": ${error.message}`, this.name, { cause: error });\n }\n }\n}\nvar textSemantic = new TextSemanticHandler;\n// src/semantic/binary.ts\nfunction toBuffer(data) {\n if (Buffer.isBuffer(data)) {\n return data;\n }\n if (data instanceof Uint8Array) {\n return Buffer.from(data);\n }\n if (data instanceof ArrayBuffer) {\n return Buffer.from(data);\n }\n if (Array.isArray(data)) {\n return Buffer.from(data);\n }\n throw new SemanticError(`Unsupported binary input type`, \"binary\");\n}\n\nclass BinarySemanticHandler {\n name = \"binary\";\n async resolve(transport, location, context) {\n const result = await transport.get(location, context.params);\n const meta = {\n url: context.url,\n semantic: context.semantic,\n transport: context.transport,\n location: context.location,\n size: result.metadata?.size ?? result.content.length,\n resolvedAt: context.timestamp.toISOString(),\n type: result.metadata?.type\n };\n return {\n type: \"binary\",\n content: result.content,\n meta\n };\n }\n async deposit(transport, location, data, context) {\n const buffer = toBuffer(data);\n try {\n await transport.set(location, buffer, context.params);\n } catch (error) {\n throw new SemanticError(`Failed to deposit binary to \"${location}\": ${error.message}`, this.name, { cause: error });\n }\n }\n async exists(transport, location, _context) {\n return transport.exists(location);\n }\n async delete(transport, location, _context) {\n try {\n await transport.delete(location);\n } catch (error) {\n throw new SemanticError(`Failed to delete \"${location}\": ${error.message}`, this.name, { cause: error });\n }\n }\n}\nvar binarySemantic = new BinarySemanticHandler;\n// src/ARP.ts\nclass ARP {\n transports;\n semantics;\n constructor(config = {}) {\n this.transports = new Map;\n this.semantics = new Map;\n const defaultTransports = [fileTransport, httpTransport, httpsTransport];\n const defaultSemantics = [textSemantic, binarySemantic];\n for (const handler of defaultTransports) {\n this.transports.set(handler.name, handler);\n }\n for (const handler of defaultSemantics) {\n this.semantics.set(handler.name, handler);\n }\n if (config.transports) {\n for (const handler of config.transports) {\n this.transports.set(handler.name, handler);\n }\n }\n if (config.semantics) {\n for (const handler of config.semantics) {\n this.semantics.set(handler.name, handler);\n }\n }\n }\n registerTransport(handler) {\n this.transports.set(handler.name, handler);\n }\n registerSemantic(handler) {\n this.semantics.set(handler.name, handler);\n }\n getTransportHandler(name) {\n const handler = this.transports.get(name);\n if (!handler) {\n throw new TransportError(`Unsupported transport type: ${name}`, name);\n }\n return handler;\n }\n getSemanticHandler(name) {\n const handler = this.semantics.get(name);\n if (!handler) {\n throw new SemanticError(`Unsupported semantic type: ${name}`, name);\n }\n return handler;\n }\n parse(url) {\n if (!url.startsWith(\"arp:\")) {\n throw new ParseError(`Invalid ARP URL: must start with \"arp:\"`, url);\n }\n const content = url.substring(4);\n const separatorIndex = content.indexOf(\"://\");\n if (separatorIndex === -1) {\n throw new ParseError(`Invalid ARP URL: missing \"://\"`, url);\n }\n const typePart = content.substring(0, separatorIndex);\n const location = content.substring(separatorIndex + 3);\n const colonIndex = typePart.indexOf(\":\");\n if (colonIndex === -1) {\n throw new ParseError(`Invalid ARP URL: must have exactly 2 types (semantic:transport)`, url);\n }\n const semantic = typePart.substring(0, colonIndex);\n const transport = typePart.substring(colonIndex + 1);\n if (!semantic) {\n throw new ParseError(`Invalid ARP URL: semantic type cannot be empty`, url);\n }\n if (!transport) {\n throw new ParseError(`Invalid ARP URL: transport type cannot be empty`, url);\n }\n if (!location) {\n throw new ParseError(`Invalid ARP URL: location cannot be empty`, url);\n }\n this.getTransportHandler(transport);\n this.getSemanticHandler(semantic);\n return new ARL(semantic, transport, location, this);\n }\n}\nfunction createARP(config) {\n return new ARP(config);\n}\n\n// src/index.ts\nvar VERSION = \"2.4.1\";\nexport {\n textSemantic,\n httpsTransport,\n httpTransport,\n fileTransport,\n createARP,\n binarySemantic,\n VERSION,\n TransportError,\n TextSemanticHandler,\n SemanticError,\n ParseError,\n HttpTransportHandler,\n FileTransportHandler,\n BinarySemanticHandler,\n ARPError,\n ARP\n};\n\n//# debugId=BB58C24E142E2F3264756E2164756E21\n",
|
|
11
11
|
"import { ResourceXError } from \"@resourcexjs/core\";\n\n/**\n * Registry-specific error.\n */\nexport class RegistryError extends ResourceXError {\n constructor(message: string, options?: ErrorOptions) {\n super(message, options);\n this.name = \"RegistryError\";\n }\n}\n",
|
|
12
12
|
"import type { RXR } from \"@resourcexjs/core\";\nimport type { IsolatorType, ResolveContext } from \"@resourcexjs/type\";\n\n/**\n * ResolverExecutor - Executes bundled resolver code in sandbox.\n *\n * Responsibilities:\n * 1. Pre-process RXR → ResolveContext (extract files)\n * 2. Serialize context for sandbox\n * 3. Execute resolver code in sandbox\n * 4. Return result\n */\nexport interface ResolverExecutor {\n /**\n * Execute resolver code with the given RXR and arguments.\n *\n * @param code - Bundled resolver code string\n * @param rxr - Resource to resolve\n * @param args - Optional arguments for the resolver\n * @returns Result from the resolver\n */\n execute<TResult>(code: string, rxr: RXR, args?: unknown): Promise<TResult>;\n}\n\n/**\n * Create a ResolverExecutor for the given isolator type.\n *\n * @param isolator - SandboX isolator type\n * @returns ResolverExecutor instance\n *\n * @example\n * const executor = createResolverExecutor(\"none\");\n * const result = await executor.execute(code, rxr);\n */\nexport function createResolverExecutor(isolator: IsolatorType): ResolverExecutor {\n return new SandboxResolverExecutor(isolator);\n}\n\n/**\n * Convert RXR to ResolveContext (pure data, serializable).\n */\nasync function toResolveContext(rxr: RXR): Promise<ResolveContext> {\n const pkg = await rxr.archive.extract();\n const filesMap = await pkg.files();\n\n // Convert Map<string, Buffer> to Record<string, Uint8Array>\n const files: Record<string, Uint8Array> = {};\n for (const [path, buffer] of filesMap) {\n files[path] = new Uint8Array(buffer);\n }\n\n return {\n manifest: {\n domain: rxr.manifest.domain,\n path: rxr.manifest.path,\n name: rxr.manifest.name,\n type: rxr.manifest.type,\n version: rxr.manifest.version,\n },\n files,\n };\n}\n\n/**\n * Parse bundled code to extract resolver variable name.\n *\n * Code formats:\n * 1. ESM bundled: `// @resolver: varName\\n...code...`\n * 2. Legacy object literal: `({ async resolve(ctx, args) { ... } })`\n *\n * @returns { isEsm: boolean, varName?: string }\n */\nfunction parseCodeFormat(code: string): { isEsm: boolean; varName?: string } {\n const match = code.match(/^\\/\\/\\s*@resolver:\\s*(\\w+)/);\n if (match) {\n return { isEsm: true, varName: match[1] };\n }\n return { isEsm: false };\n}\n\n/**\n * SandboX-based ResolverExecutor implementation.\n *\n * Uses SandboX for code execution with configurable isolation levels.\n * Supports both ESM bundled code and legacy object literal format.\n */\nclass SandboxResolverExecutor implements ResolverExecutor {\n constructor(private readonly isolator: IsolatorType) {}\n\n async execute<TResult>(code: string, rxr: RXR, args?: unknown): Promise<TResult> {\n // 1. Pre-process: RXR → ResolveContext\n const ctx = await toResolveContext(rxr);\n\n // 2. Import SandboX\n const { createSandbox } = await import(\"sandboxxjs\");\n\n const sandbox = createSandbox({\n isolator: this.isolator,\n runtime: \"node\",\n });\n\n try {\n // 3. Serialize context and args\n const ctxJson = JSON.stringify(ctx, (_, value) => {\n // Convert Uint8Array to array for JSON serialization\n if (value instanceof Uint8Array) {\n return { __type: \"Uint8Array\", data: Array.from(value) };\n }\n return value;\n });\n const argsJson = args !== undefined ? JSON.stringify(args) : \"undefined\";\n\n // 4. Parse code format\n const { isEsm, varName } = parseCodeFormat(code);\n\n // 5. Build execution code based on format\n let executionCode: string;\n\n if (isEsm && varName) {\n // ESM bundled code: variable is already declared, use it directly\n executionCode = `\n ${code}\n\n (async () => {\n // Deserialize context\n const ctxRaw = ${ctxJson};\n const ctx = {\n manifest: ctxRaw.manifest,\n files: {}\n };\n for (const [path, value] of Object.entries(ctxRaw.files)) {\n ctx.files[path] = new Uint8Array(value.data);\n }\n\n const args = ${argsJson};\n\n // Execute resolver using the exported variable\n const result = await ${varName}.resolve(ctx, args);\n\n // Output result via console.log (execute() returns stdout)\n console.log(JSON.stringify(result));\n })();\n `;\n } else {\n // Legacy object literal format: ({ async resolve(ctx, args) { ... } })\n executionCode = `\n (async () => {\n // Deserialize context\n const ctxRaw = ${ctxJson};\n const ctx = {\n manifest: ctxRaw.manifest,\n files: {}\n };\n for (const [path, value] of Object.entries(ctxRaw.files)) {\n ctx.files[path] = new Uint8Array(value.data);\n }\n\n const args = ${argsJson};\n\n // Execute resolver (legacy object literal)\n const resolver = ${code};\n const result = await resolver.resolve(ctx, args);\n\n // Output result via console.log (execute() returns stdout)\n console.log(JSON.stringify(result));\n })();\n `;\n }\n\n // 6. Execute in sandbox (script mode - returns stdout)\n const { stdout } = await sandbox.execute(executionCode);\n\n // 7. Parse stdout and return result\n return JSON.parse(stdout.trim()) as TResult;\n } finally {\n await sandbox.destroy();\n }\n }\n}\n",
|
|
13
13
|
"import type { RXR, RXL, ManifestData } from \"@resourcexjs/core\";\nimport { parseRXL, createRXM, createRXA } from \"@resourcexjs/core\";\nimport { TypeHandlerChain, ResourceTypeError } from \"@resourcexjs/type\";\nimport type { BundledType, ResolvedResource, IsolatorType } from \"@resourcexjs/type\";\nimport { loadResource } from \"@resourcexjs/loader\";\nimport type { Storage, SearchOptions } from \"./storage/index.js\";\nimport { LocalStorage } from \"./storage/index.js\";\nimport { RegistryError } from \"./errors.js\";\nimport { createResolverExecutor } from \"./executor/index.js\";\nimport type { ResolverExecutor } from \"./executor/index.js\";\n\n/**\n * Well-known response format.\n */\ninterface WellKnownResponse {\n version?: string;\n registries: string[];\n}\n\n/**\n * Registry configuration.\n */\nexport interface RegistryConfig {\n /**\n * Storage backend. Defaults to LocalStorage.\n */\n storage?: Storage;\n\n /**\n * Mirror URL for remote fetch (client mode).\n * If configured, tries mirror before well-known discovery.\n */\n mirror?: string;\n\n /**\n * Additional custom resource types to support.\n * Built-in types (text, json, binary) are always included by default.\n */\n types?: BundledType[];\n\n /**\n * Isolator type for resolver execution (SandboX).\n * - \"none\": No isolation, fastest (~10ms), for development\n * - \"srt\": OS-level isolation (~50ms), secure local dev\n * - \"cloudflare\": Container isolation (~100ms), local Docker or edge\n * - \"e2b\": MicroVM isolation (~150ms), production (planned)\n */\n isolator?: IsolatorType;\n}\n\n/**\n * Registry interface for resource management.\n */\nexport interface Registry {\n /**\n * Add support for a custom resource type.\n */\n supportType(type: BundledType): void;\n\n /**\n * Link a development directory.\n * Creates a symlink so changes are reflected immediately.\n * Only supported by LocalStorage.\n */\n link(path: string): Promise<void>;\n\n /**\n * Add resource to storage.\n * @param source - Resource directory path or RXR object\n */\n add(source: string | RXR): Promise<void>;\n\n /**\n * Get raw resource by locator.\n *\n * Flow:\n * - localhost: Only queries local storage\n * - Other domains: Local cache -> [Mirror] -> Source (well-known)\n */\n get(locator: string): Promise<RXR>;\n\n /**\n * Resolve resource by locator.\n * Returns ResolvedResource with execute function.\n */\n resolve<TArgs = void, TResult = unknown>(\n locator: string\n ): Promise<ResolvedResource<TArgs, TResult>>;\n\n /**\n * Check if resource exists.\n */\n exists(locator: string): Promise<boolean>;\n\n /**\n * Delete resource from storage.\n */\n delete(locator: string): Promise<void>;\n\n /**\n * Search for resources.\n */\n search(options?: SearchOptions): Promise<RXL[]>;\n}\n\n/**\n * Default Registry implementation.\n *\n * Combines Storage (for CRUD) with TypeHandlerChain (for type resolution).\n * Supports remote fetch for non-localhost domains.\n */\nexport class DefaultRegistry implements Registry {\n private readonly storage: Storage;\n private readonly mirror?: string;\n private readonly typeHandler: TypeHandlerChain;\n private readonly executor: ResolverExecutor;\n\n // Cache for discovered endpoints\n private readonly discoveryCache = new Map<string, string>();\n\n constructor(config?: RegistryConfig) {\n this.storage = config?.storage ?? new LocalStorage();\n this.mirror = config?.mirror;\n // TypeHandlerChain includes builtin types by default\n this.typeHandler = TypeHandlerChain.create();\n // Create executor with configured isolator\n this.executor = createResolverExecutor(config?.isolator ?? \"none\");\n\n // Register additional custom types\n if (config?.types) {\n for (const type of config.types) {\n this.typeHandler.register(type);\n }\n }\n }\n\n supportType(type: BundledType): void {\n this.typeHandler.register(type);\n }\n\n async link(path: string): Promise<void> {\n // Only LocalStorage supports link\n if (this.storage instanceof LocalStorage) {\n return this.storage.link(path);\n }\n throw new RegistryError(`${this.storage.type} storage does not support link`);\n }\n\n async add(source: string | RXR): Promise<void> {\n // Load resource if path is provided\n const rxr = typeof source === \"string\" ? await loadResource(source) : source;\n\n // Validate type is supported before storing\n const typeName = rxr.manifest.type;\n if (!this.typeHandler.canHandle(typeName)) {\n throw new ResourceTypeError(`Unsupported resource type: ${typeName}`);\n }\n\n await this.storage.put(rxr);\n }\n\n async get(locator: string): Promise<RXR> {\n const rxl = parseRXL(locator);\n const domain = rxl.domain ?? \"localhost\";\n\n // 1. Always check local storage first\n if (await this.storage.exists(locator)) {\n return this.storage.get(locator);\n }\n\n // 2. localhost: Only local, never go remote\n if (domain === \"localhost\") {\n throw new RegistryError(`Resource not found: ${locator}`);\n }\n\n // 3. Remote domain: Try mirror -> source\n const rxr = await this.fetchRemote(locator, domain);\n\n // 4. Cache to local storage\n await this.storage.put(rxr);\n\n return rxr;\n }\n\n async resolve<TArgs = void, TResult = unknown>(\n locator: string\n ): Promise<ResolvedResource<TArgs, TResult>> {\n const rxr = await this.get(locator);\n const handler = this.typeHandler.getHandler(rxr.manifest.type);\n\n return {\n resource: rxr,\n schema: handler.schema,\n execute: async (args?: TArgs) => {\n return this.executor.execute<TResult>(handler.code, rxr, args);\n },\n } as ResolvedResource<TArgs, TResult>;\n }\n\n async exists(locator: string): Promise<boolean> {\n // Check local storage\n if (await this.storage.exists(locator)) {\n return true;\n }\n\n // For localhost, that's it\n const rxl = parseRXL(locator);\n const domain = rxl.domain ?? \"localhost\";\n if (domain === \"localhost\") {\n return false;\n }\n\n // For remote domains, could check remote (but expensive)\n // For now, just return false - caller can try get() to trigger fetch\n return false;\n }\n\n async delete(locator: string): Promise<void> {\n return this.storage.delete(locator);\n }\n\n async search(options?: SearchOptions): Promise<RXL[]> {\n return this.storage.search(options);\n }\n\n // ============================================\n // Remote fetch implementation\n // ============================================\n\n /**\n * Fetch resource from remote.\n * Flow: Mirror (if configured) -> Source (well-known)\n */\n private async fetchRemote(locator: string, domain: string): Promise<RXR> {\n // Try mirror first if configured\n if (this.mirror) {\n try {\n return await this.fetchFromEndpoint(this.mirror, locator);\n } catch {\n // Mirror miss, fall through to source\n }\n }\n\n // Discover source via well-known\n const endpoint = await this.discoverEndpoint(domain);\n return await this.fetchFromEndpoint(endpoint, locator);\n }\n\n /**\n * Discover registry endpoint for a domain via well-known.\n */\n private async discoverEndpoint(domain: string): Promise<string> {\n // Check cache\n const cached = this.discoveryCache.get(domain);\n if (cached) {\n return cached;\n }\n\n // Fetch well-known\n const wellKnownUrl = `https://${domain}/.well-known/resourcex`;\n const response = await fetch(wellKnownUrl);\n\n if (!response.ok) {\n throw new RegistryError(`Well-known discovery failed for ${domain}: ${response.statusText}`);\n }\n\n const data = (await response.json()) as WellKnownResponse;\n\n if (!data.registries || !Array.isArray(data.registries) || data.registries.length === 0) {\n throw new RegistryError(\n `Invalid well-known response for ${domain}: missing or empty registries`\n );\n }\n\n // Use first registry (primary)\n const endpoint = data.registries[0];\n this.discoveryCache.set(domain, endpoint);\n\n return endpoint;\n }\n\n /**\n * Fetch resource from a specific endpoint via HTTP API.\n */\n private async fetchFromEndpoint(endpoint: string, locator: string): Promise<RXR> {\n // Remove trailing slash\n const baseUrl = endpoint.replace(/\\/$/, \"\");\n\n // Fetch manifest\n const manifestUrl = `${baseUrl}/resource?locator=${encodeURIComponent(locator)}`;\n const manifestResponse = await fetch(manifestUrl);\n\n if (!manifestResponse.ok) {\n if (manifestResponse.status === 404) {\n throw new RegistryError(`Resource not found: ${locator}`);\n }\n throw new RegistryError(`Failed to fetch resource: ${manifestResponse.statusText}`);\n }\n\n const manifestData = (await manifestResponse.json()) as ManifestData;\n const manifest = createRXM(manifestData);\n\n // Fetch content\n const contentUrl = `${baseUrl}/content?locator=${encodeURIComponent(locator)}`;\n const contentResponse = await fetch(contentUrl);\n\n if (!contentResponse.ok) {\n throw new RegistryError(`Failed to fetch content: ${contentResponse.statusText}`);\n }\n\n const contentBuffer = Buffer.from(await contentResponse.arrayBuffer());\n\n return {\n locator: parseRXL(manifest.toLocator()),\n manifest,\n archive: await createRXA({ buffer: contentBuffer }),\n };\n }\n}\n",
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@resourcexjs/registry",
|
|
3
|
-
"version": "2.4.
|
|
3
|
+
"version": "2.4.1",
|
|
4
4
|
"description": "ResourceX Registry - Resource storage and retrieval",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"resourcex",
|
|
@@ -37,10 +37,10 @@
|
|
|
37
37
|
"clean": "rm -rf dist"
|
|
38
38
|
},
|
|
39
39
|
"dependencies": {
|
|
40
|
-
"@resourcexjs/arp": "^2.4.
|
|
41
|
-
"@resourcexjs/core": "^2.4.
|
|
42
|
-
"@resourcexjs/loader": "^2.4.
|
|
43
|
-
"@resourcexjs/type": "^2.4.
|
|
40
|
+
"@resourcexjs/arp": "^2.4.1",
|
|
41
|
+
"@resourcexjs/core": "^2.4.1",
|
|
42
|
+
"@resourcexjs/loader": "^2.4.1",
|
|
43
|
+
"@resourcexjs/type": "^2.4.1",
|
|
44
44
|
"isomorphic-git": "^1.36.1",
|
|
45
45
|
"sandboxxjs": "^0.5.1"
|
|
46
46
|
},
|