agentikit 0.0.8 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +110 -7
- package/dist/index.d.ts +7 -3
- package/dist/index.js +4 -1
- package/dist/src/cli.js +156 -47
- package/dist/src/config.d.ts +6 -0
- package/dist/src/config.js +50 -0
- package/dist/src/metadata.d.ts +1 -0
- package/dist/src/metadata.js +16 -0
- package/dist/src/registry-install.d.ts +11 -0
- package/dist/src/registry-install.js +208 -0
- package/dist/src/registry-resolve.d.ts +3 -0
- package/dist/src/registry-resolve.js +231 -0
- package/dist/src/registry-search.d.ts +5 -0
- package/dist/src/registry-search.js +129 -0
- package/dist/src/registry-types.d.ts +55 -0
- package/dist/src/registry-types.js +1 -0
- package/dist/src/stash-add.d.ts +4 -0
- package/dist/src/stash-add.js +59 -0
- package/dist/src/stash-registry.d.ts +18 -0
- package/dist/src/stash-registry.js +221 -0
- package/dist/src/stash-search.d.ts +3 -1
- package/dist/src/stash-search.js +236 -21
- package/dist/src/stash-show.js +10 -3
- package/dist/src/stash-types.d.ts +165 -1
- package/dist/src/stash.d.ts +3 -1
- package/dist/src/stash.js +2 -0
- package/package.json +1 -1
- package/src/cli.ts +166 -46
- package/src/config.ts +59 -0
- package/src/metadata.ts +16 -0
- package/src/registry-install.ts +245 -0
- package/src/registry-resolve.ts +272 -0
- package/src/registry-search.ts +145 -0
- package/src/registry-types.ts +64 -0
- package/src/stash-add.ts +66 -0
- package/src/stash-registry.ts +259 -0
- package/src/stash-search.ts +275 -23
- package/src/stash-show.ts +10 -2
- package/src/stash-types.ts +176 -1
- package/src/stash.ts +15 -0
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
import { spawnSync } from "node:child_process";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { TYPE_DIRS } from "./common";
|
|
5
|
+
import { loadConfig, saveConfig } from "./config";
|
|
6
|
+
import { parseRegistryRef, resolveRegistryArtifact } from "./registry-resolve";
|
|
7
|
+
const REGISTRY_STASH_DIR_NAMES = new Set(Object.values(TYPE_DIRS));
|
|
8
|
+
export async function installRegistryRef(ref, options) {
|
|
9
|
+
const parsed = parseRegistryRef(ref);
|
|
10
|
+
const resolved = await resolveRegistryArtifact(parsed);
|
|
11
|
+
const installedAt = (options?.now ?? new Date()).toISOString();
|
|
12
|
+
const cacheRootDir = options?.cacheRootDir ?? getRegistryCacheRootDir();
|
|
13
|
+
const cacheDir = buildInstallCacheDir(cacheRootDir, resolved.source, resolved.id);
|
|
14
|
+
const archivePath = path.join(cacheDir, "artifact.tar.gz");
|
|
15
|
+
const extractedDir = path.join(cacheDir, "extracted");
|
|
16
|
+
fs.mkdirSync(cacheDir, { recursive: true });
|
|
17
|
+
await downloadArchive(resolved.artifactUrl, archivePath);
|
|
18
|
+
extractTarGzSecure(archivePath, extractedDir);
|
|
19
|
+
const stashRoot = detectStashRoot(extractedDir);
|
|
20
|
+
return {
|
|
21
|
+
id: resolved.id,
|
|
22
|
+
source: resolved.source,
|
|
23
|
+
ref: resolved.ref,
|
|
24
|
+
artifactUrl: resolved.artifactUrl,
|
|
25
|
+
resolvedVersion: resolved.resolvedVersion,
|
|
26
|
+
resolvedRevision: resolved.resolvedRevision,
|
|
27
|
+
installedAt,
|
|
28
|
+
cacheDir,
|
|
29
|
+
extractedDir,
|
|
30
|
+
stashRoot,
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
export function upsertInstalledRegistryEntry(entry, stashDir) {
|
|
34
|
+
const current = loadConfig(stashDir);
|
|
35
|
+
const currentInstalled = current.registry?.installed ?? [];
|
|
36
|
+
const previousRegistryRoots = new Set(currentInstalled.map((item) => path.resolve(item.stashRoot)));
|
|
37
|
+
const withoutExisting = currentInstalled.filter((item) => item.id !== entry.id);
|
|
38
|
+
const nextInstalled = [...withoutExisting, normalizeInstalledEntry(entry)];
|
|
39
|
+
const nextRegistryRoots = new Set(nextInstalled.map((item) => path.resolve(item.stashRoot)));
|
|
40
|
+
const preservedAdditional = current.additionalStashDirs.filter((dir) => !previousRegistryRoots.has(path.resolve(dir)));
|
|
41
|
+
const syncedAdditional = uniquePaths([...preservedAdditional, ...nextRegistryRoots]);
|
|
42
|
+
const nextConfig = {
|
|
43
|
+
...current,
|
|
44
|
+
additionalStashDirs: syncedAdditional,
|
|
45
|
+
registry: {
|
|
46
|
+
installed: nextInstalled,
|
|
47
|
+
},
|
|
48
|
+
};
|
|
49
|
+
saveConfig(nextConfig, stashDir);
|
|
50
|
+
return nextConfig;
|
|
51
|
+
}
|
|
52
|
+
export function removeInstalledRegistryEntry(id, stashDir) {
|
|
53
|
+
const current = loadConfig(stashDir);
|
|
54
|
+
const currentInstalled = current.registry?.installed ?? [];
|
|
55
|
+
const previousRegistryRoots = new Set(currentInstalled.map((item) => path.resolve(item.stashRoot)));
|
|
56
|
+
const nextInstalled = currentInstalled.filter((item) => item.id !== id);
|
|
57
|
+
const nextRegistryRoots = new Set(nextInstalled.map((item) => path.resolve(item.stashRoot)));
|
|
58
|
+
const preservedAdditional = current.additionalStashDirs.filter((dir) => !previousRegistryRoots.has(path.resolve(dir)));
|
|
59
|
+
const syncedAdditional = uniquePaths([...preservedAdditional, ...nextRegistryRoots]);
|
|
60
|
+
const nextConfig = {
|
|
61
|
+
...current,
|
|
62
|
+
additionalStashDirs: syncedAdditional,
|
|
63
|
+
registry: nextInstalled.length > 0 ? { installed: nextInstalled } : undefined,
|
|
64
|
+
};
|
|
65
|
+
saveConfig(nextConfig, stashDir);
|
|
66
|
+
return nextConfig;
|
|
67
|
+
}
|
|
68
|
+
export function getRegistryCacheRootDir() {
|
|
69
|
+
const xdgCache = process.env.XDG_CACHE_HOME?.trim();
|
|
70
|
+
if (xdgCache) {
|
|
71
|
+
return path.join(path.resolve(xdgCache), "agentikit", "registry");
|
|
72
|
+
}
|
|
73
|
+
const home = process.env.HOME?.trim();
|
|
74
|
+
if (!home) {
|
|
75
|
+
throw new Error("Unable to determine cache directory. Set XDG_CACHE_HOME or HOME.");
|
|
76
|
+
}
|
|
77
|
+
return path.join(path.resolve(home), ".cache", "agentikit", "registry");
|
|
78
|
+
}
|
|
79
|
+
export function detectStashRoot(extractedDir) {
|
|
80
|
+
const root = path.resolve(extractedDir);
|
|
81
|
+
const rootDotStash = path.join(root, ".stash");
|
|
82
|
+
if (isDirectory(rootDotStash)) {
|
|
83
|
+
return root;
|
|
84
|
+
}
|
|
85
|
+
if (hasStashDirs(root)) {
|
|
86
|
+
return root;
|
|
87
|
+
}
|
|
88
|
+
const opencodeDir = path.join(root, "opencode");
|
|
89
|
+
if (hasStashDirs(opencodeDir)) {
|
|
90
|
+
return opencodeDir;
|
|
91
|
+
}
|
|
92
|
+
const shallowest = findShallowestDotStashRoot(root);
|
|
93
|
+
if (shallowest)
|
|
94
|
+
return shallowest;
|
|
95
|
+
return root;
|
|
96
|
+
}
|
|
97
|
+
function buildInstallCacheDir(cacheRootDir, source, id) {
|
|
98
|
+
const slug = `${source}-${id.replace(/[^a-zA-Z0-9_.-]+/g, "-").replace(/^-+|-+$/g, "")}`;
|
|
99
|
+
const stamp = `${Date.now()}-${Math.random().toString(36).slice(2, 10)}`;
|
|
100
|
+
return path.join(cacheRootDir, slug || source, stamp);
|
|
101
|
+
}
|
|
102
|
+
async function downloadArchive(url, destination) {
|
|
103
|
+
const response = await fetch(url);
|
|
104
|
+
if (!response.ok) {
|
|
105
|
+
throw new Error(`Failed to download archive (${response.status}) from ${url}`);
|
|
106
|
+
}
|
|
107
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
108
|
+
fs.writeFileSync(destination, Buffer.from(arrayBuffer));
|
|
109
|
+
}
|
|
110
|
+
function extractTarGzSecure(archivePath, destinationDir) {
|
|
111
|
+
const listResult = spawnSync("tar", ["tzf", archivePath], { encoding: "utf8" });
|
|
112
|
+
if (listResult.status !== 0) {
|
|
113
|
+
const err = listResult.stderr?.trim() || listResult.error?.message || "unknown error";
|
|
114
|
+
throw new Error(`Failed to inspect archive ${archivePath}: ${err}`);
|
|
115
|
+
}
|
|
116
|
+
validateTarEntries(listResult.stdout);
|
|
117
|
+
fs.rmSync(destinationDir, { recursive: true, force: true });
|
|
118
|
+
fs.mkdirSync(destinationDir, { recursive: true });
|
|
119
|
+
const extractResult = spawnSync("tar", ["xzf", archivePath, "--strip-components=1", "-C", destinationDir], {
|
|
120
|
+
encoding: "utf8",
|
|
121
|
+
});
|
|
122
|
+
if (extractResult.status !== 0) {
|
|
123
|
+
const err = extractResult.stderr?.trim() || extractResult.error?.message || "unknown error";
|
|
124
|
+
throw new Error(`Failed to extract archive ${archivePath}: ${err}`);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
function validateTarEntries(listOutput) {
|
|
128
|
+
const lines = listOutput.split(/\r?\n/).filter(Boolean);
|
|
129
|
+
for (const rawLine of lines) {
|
|
130
|
+
const entry = rawLine.trim();
|
|
131
|
+
if (!entry || entry.includes("\0")) {
|
|
132
|
+
throw new Error(`Archive contains an invalid entry: ${JSON.stringify(rawLine)}`);
|
|
133
|
+
}
|
|
134
|
+
if (entry.startsWith("/")) {
|
|
135
|
+
throw new Error(`Archive contains an absolute path entry: ${entry}`);
|
|
136
|
+
}
|
|
137
|
+
const normalized = path.posix.normalize(entry);
|
|
138
|
+
if (normalized === ".." || normalized.startsWith("../")) {
|
|
139
|
+
throw new Error(`Archive contains a path traversal entry: ${entry}`);
|
|
140
|
+
}
|
|
141
|
+
const parts = normalized.split("/").filter(Boolean);
|
|
142
|
+
const stripped = parts.slice(1).join("/");
|
|
143
|
+
if (!stripped)
|
|
144
|
+
continue;
|
|
145
|
+
const normalizedStripped = path.posix.normalize(stripped);
|
|
146
|
+
if (normalizedStripped === ".." || normalizedStripped.startsWith("../") || path.posix.isAbsolute(normalizedStripped)) {
|
|
147
|
+
throw new Error(`Archive contains an unsafe entry after strip-components: ${entry}`);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
function isDirectory(target) {
|
|
152
|
+
try {
|
|
153
|
+
return fs.statSync(target).isDirectory();
|
|
154
|
+
}
|
|
155
|
+
catch {
|
|
156
|
+
return false;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
function hasStashDirs(dirPath) {
|
|
160
|
+
if (!isDirectory(dirPath))
|
|
161
|
+
return false;
|
|
162
|
+
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
|
163
|
+
return entries.some((entry) => entry.isDirectory() && REGISTRY_STASH_DIR_NAMES.has(entry.name));
|
|
164
|
+
}
|
|
165
|
+
function findShallowestDotStashRoot(root) {
|
|
166
|
+
const queue = [root];
|
|
167
|
+
while (queue.length > 0) {
|
|
168
|
+
const current = queue.shift();
|
|
169
|
+
const dotStash = path.join(current, ".stash");
|
|
170
|
+
if (isDirectory(dotStash)) {
|
|
171
|
+
return current;
|
|
172
|
+
}
|
|
173
|
+
let children;
|
|
174
|
+
try {
|
|
175
|
+
children = fs.readdirSync(current, { withFileTypes: true });
|
|
176
|
+
}
|
|
177
|
+
catch {
|
|
178
|
+
continue;
|
|
179
|
+
}
|
|
180
|
+
for (const child of children) {
|
|
181
|
+
if (!child.isDirectory())
|
|
182
|
+
continue;
|
|
183
|
+
if (child.name === ".git" || child.name === "node_modules")
|
|
184
|
+
continue;
|
|
185
|
+
queue.push(path.join(current, child.name));
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
return undefined;
|
|
189
|
+
}
|
|
190
|
+
function normalizeInstalledEntry(entry) {
|
|
191
|
+
return {
|
|
192
|
+
...entry,
|
|
193
|
+
stashRoot: path.resolve(entry.stashRoot),
|
|
194
|
+
cacheDir: path.resolve(entry.cacheDir),
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
function uniquePaths(paths) {
|
|
198
|
+
const seen = new Set();
|
|
199
|
+
const result = [];
|
|
200
|
+
for (const candidate of paths) {
|
|
201
|
+
const normalized = path.resolve(candidate);
|
|
202
|
+
if (seen.has(normalized))
|
|
203
|
+
continue;
|
|
204
|
+
seen.add(normalized);
|
|
205
|
+
result.push(normalized);
|
|
206
|
+
}
|
|
207
|
+
return result;
|
|
208
|
+
}
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import type { ParsedRegistryRef, ResolvedRegistryArtifact } from "./registry-types";
|
|
2
|
+
export declare function parseRegistryRef(rawRef: string): ParsedRegistryRef;
|
|
3
|
+
export declare function resolveRegistryArtifact(parsed: ParsedRegistryRef): Promise<ResolvedRegistryArtifact>;
|
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
const GITHUB_API_BASE = "https://api.github.com";
|
|
2
|
+
export function parseRegistryRef(rawRef) {
|
|
3
|
+
const ref = rawRef.trim();
|
|
4
|
+
if (!ref)
|
|
5
|
+
throw new Error("Registry ref is required.");
|
|
6
|
+
if (ref.startsWith("npm:")) {
|
|
7
|
+
return parseNpmRef(ref.slice(4), ref);
|
|
8
|
+
}
|
|
9
|
+
if (ref.startsWith("github:")) {
|
|
10
|
+
return parseGithubShorthand(ref.slice(7), ref);
|
|
11
|
+
}
|
|
12
|
+
if (ref.startsWith("http://") || ref.startsWith("https://")) {
|
|
13
|
+
return parseGithubUrl(ref);
|
|
14
|
+
}
|
|
15
|
+
if (ref.startsWith("@") || !looksLikeGithubOwnerRepo(ref)) {
|
|
16
|
+
return parseNpmRef(ref, ref);
|
|
17
|
+
}
|
|
18
|
+
return parseGithubShorthand(ref, ref);
|
|
19
|
+
}
|
|
20
|
+
export async function resolveRegistryArtifact(parsed) {
|
|
21
|
+
if (parsed.source === "npm") {
|
|
22
|
+
return resolveNpmArtifact(parsed);
|
|
23
|
+
}
|
|
24
|
+
return resolveGithubArtifact(parsed);
|
|
25
|
+
}
|
|
26
|
+
function parseNpmRef(input, originalRef) {
|
|
27
|
+
const trimmed = input.trim();
|
|
28
|
+
if (!trimmed)
|
|
29
|
+
throw new Error("Invalid npm ref.");
|
|
30
|
+
const parsed = splitNpmNameAndVersion(trimmed);
|
|
31
|
+
validateNpmPackageName(parsed.packageName);
|
|
32
|
+
return {
|
|
33
|
+
source: "npm",
|
|
34
|
+
ref: originalRef,
|
|
35
|
+
id: `npm:${parsed.packageName}`,
|
|
36
|
+
packageName: parsed.packageName,
|
|
37
|
+
requestedVersionOrTag: parsed.requestedVersionOrTag,
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
function parseGithubShorthand(input, originalRef) {
|
|
41
|
+
const [repoPart, requestedRef] = splitRefSuffix(input.trim());
|
|
42
|
+
const segments = repoPart.split("/").filter(Boolean);
|
|
43
|
+
if (segments.length !== 2) {
|
|
44
|
+
throw new Error("Invalid GitHub ref. Expected owner/repo or owner/repo#ref.");
|
|
45
|
+
}
|
|
46
|
+
const owner = segments[0];
|
|
47
|
+
const repo = segments[1].replace(/\.git$/i, "");
|
|
48
|
+
if (!owner || !repo) {
|
|
49
|
+
throw new Error("Invalid GitHub ref. Expected owner/repo.");
|
|
50
|
+
}
|
|
51
|
+
return {
|
|
52
|
+
source: "github",
|
|
53
|
+
ref: originalRef,
|
|
54
|
+
id: `github:${owner}/${repo}`,
|
|
55
|
+
owner,
|
|
56
|
+
repo,
|
|
57
|
+
requestedRef,
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
function parseGithubUrl(rawUrl) {
|
|
61
|
+
let url;
|
|
62
|
+
try {
|
|
63
|
+
url = new URL(rawUrl);
|
|
64
|
+
}
|
|
65
|
+
catch {
|
|
66
|
+
throw new Error("Invalid registry URL.");
|
|
67
|
+
}
|
|
68
|
+
if (url.hostname !== "github.com") {
|
|
69
|
+
throw new Error("Only GitHub URLs are currently supported for URL refs.");
|
|
70
|
+
}
|
|
71
|
+
const segments = url.pathname.split("/").filter(Boolean);
|
|
72
|
+
if (segments.length < 2) {
|
|
73
|
+
throw new Error("Invalid GitHub URL. Expected https://github.com/owner/repo.");
|
|
74
|
+
}
|
|
75
|
+
const owner = segments[0];
|
|
76
|
+
const repo = segments[1].replace(/\.git$/i, "");
|
|
77
|
+
const requestedRef = url.hash ? decodeURIComponent(url.hash.slice(1)) : undefined;
|
|
78
|
+
return {
|
|
79
|
+
source: "github",
|
|
80
|
+
ref: rawUrl,
|
|
81
|
+
id: `github:${owner}/${repo}`,
|
|
82
|
+
owner,
|
|
83
|
+
repo,
|
|
84
|
+
requestedRef,
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
async function resolveNpmArtifact(parsed) {
|
|
88
|
+
const encodedName = encodeURIComponent(parsed.packageName);
|
|
89
|
+
const metadata = await fetchJson(`https://registry.npmjs.org/${encodedName}`);
|
|
90
|
+
const versions = asRecord(metadata.versions);
|
|
91
|
+
const distTags = asRecord(metadata["dist-tags"]);
|
|
92
|
+
const requested = parsed.requestedVersionOrTag;
|
|
93
|
+
let resolvedVersion;
|
|
94
|
+
if (!requested) {
|
|
95
|
+
resolvedVersion = asString(distTags.latest);
|
|
96
|
+
}
|
|
97
|
+
else if (requested in versions) {
|
|
98
|
+
resolvedVersion = requested;
|
|
99
|
+
}
|
|
100
|
+
else {
|
|
101
|
+
resolvedVersion = asString(distTags[requested]);
|
|
102
|
+
}
|
|
103
|
+
if (!resolvedVersion || !(resolvedVersion in versions)) {
|
|
104
|
+
throw new Error(`Unable to resolve npm ref \"${parsed.ref}\".`);
|
|
105
|
+
}
|
|
106
|
+
const versionMeta = asRecord(versions[resolvedVersion]);
|
|
107
|
+
const dist = asRecord(versionMeta.dist);
|
|
108
|
+
const tarballUrl = asString(dist.tarball);
|
|
109
|
+
if (!tarballUrl) {
|
|
110
|
+
throw new Error(`npm package ${parsed.packageName}@${resolvedVersion} does not expose a tarball URL.`);
|
|
111
|
+
}
|
|
112
|
+
const resolvedRevision = asString(dist.shasum) ?? asString(dist.integrity);
|
|
113
|
+
return {
|
|
114
|
+
id: parsed.id,
|
|
115
|
+
source: parsed.source,
|
|
116
|
+
ref: parsed.ref,
|
|
117
|
+
artifactUrl: tarballUrl,
|
|
118
|
+
resolvedVersion,
|
|
119
|
+
resolvedRevision,
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
async function resolveGithubArtifact(parsed) {
|
|
123
|
+
const headers = githubHeaders();
|
|
124
|
+
if (parsed.requestedRef) {
|
|
125
|
+
const commit = await tryFetchJson(`${GITHUB_API_BASE}/repos/${encodeURIComponent(parsed.owner)}/${encodeURIComponent(parsed.repo)}/commits/${encodeURIComponent(parsed.requestedRef)}`, headers);
|
|
126
|
+
const resolvedRevision = asString(commit?.sha) ?? parsed.requestedRef;
|
|
127
|
+
return {
|
|
128
|
+
id: parsed.id,
|
|
129
|
+
source: parsed.source,
|
|
130
|
+
ref: parsed.ref,
|
|
131
|
+
artifactUrl: `${GITHUB_API_BASE}/repos/${encodeURIComponent(parsed.owner)}/${encodeURIComponent(parsed.repo)}/tarball/${encodeURIComponent(parsed.requestedRef)}`,
|
|
132
|
+
resolvedRevision,
|
|
133
|
+
resolvedVersion: parsed.requestedRef,
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
const latestRelease = await tryFetchJson(`${GITHUB_API_BASE}/repos/${encodeURIComponent(parsed.owner)}/${encodeURIComponent(parsed.repo)}/releases/latest`, headers);
|
|
137
|
+
if (latestRelease) {
|
|
138
|
+
const tarballUrl = asString(latestRelease.tarball_url);
|
|
139
|
+
if (tarballUrl) {
|
|
140
|
+
return {
|
|
141
|
+
id: parsed.id,
|
|
142
|
+
source: parsed.source,
|
|
143
|
+
ref: parsed.ref,
|
|
144
|
+
artifactUrl: tarballUrl,
|
|
145
|
+
resolvedVersion: asString(latestRelease.tag_name),
|
|
146
|
+
resolvedRevision: asString(latestRelease.target_commitish),
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
const repoMeta = await fetchJson(`${GITHUB_API_BASE}/repos/${encodeURIComponent(parsed.owner)}/${encodeURIComponent(parsed.repo)}`, headers);
|
|
151
|
+
const defaultBranch = asString(repoMeta.default_branch);
|
|
152
|
+
if (!defaultBranch) {
|
|
153
|
+
throw new Error(`Unable to resolve default branch for ${parsed.owner}/${parsed.repo}.`);
|
|
154
|
+
}
|
|
155
|
+
const commit = await tryFetchJson(`${GITHUB_API_BASE}/repos/${encodeURIComponent(parsed.owner)}/${encodeURIComponent(parsed.repo)}/commits/${encodeURIComponent(defaultBranch)}`, headers);
|
|
156
|
+
return {
|
|
157
|
+
id: parsed.id,
|
|
158
|
+
source: parsed.source,
|
|
159
|
+
ref: parsed.ref,
|
|
160
|
+
artifactUrl: `${GITHUB_API_BASE}/repos/${encodeURIComponent(parsed.owner)}/${encodeURIComponent(parsed.repo)}/tarball/${encodeURIComponent(defaultBranch)}`,
|
|
161
|
+
resolvedVersion: defaultBranch,
|
|
162
|
+
resolvedRevision: asString(commit?.sha) ?? defaultBranch,
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
function splitNpmNameAndVersion(input) {
|
|
166
|
+
if (input.startsWith("@")) {
|
|
167
|
+
const secondAt = input.indexOf("@", 1);
|
|
168
|
+
if (secondAt > 0) {
|
|
169
|
+
return {
|
|
170
|
+
packageName: input.slice(0, secondAt),
|
|
171
|
+
requestedVersionOrTag: input.slice(secondAt + 1) || undefined,
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
return { packageName: input };
|
|
175
|
+
}
|
|
176
|
+
const at = input.lastIndexOf("@");
|
|
177
|
+
if (at > 0) {
|
|
178
|
+
return {
|
|
179
|
+
packageName: input.slice(0, at),
|
|
180
|
+
requestedVersionOrTag: input.slice(at + 1) || undefined,
|
|
181
|
+
};
|
|
182
|
+
}
|
|
183
|
+
return { packageName: input };
|
|
184
|
+
}
|
|
185
|
+
function validateNpmPackageName(name) {
|
|
186
|
+
if (!name || name.includes(" ")) {
|
|
187
|
+
throw new Error(`Invalid npm package name: \"${name}\".`);
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
function looksLikeGithubOwnerRepo(ref) {
|
|
191
|
+
const [repoPart] = splitRefSuffix(ref);
|
|
192
|
+
const parts = repoPart.split("/").filter(Boolean);
|
|
193
|
+
return parts.length === 2;
|
|
194
|
+
}
|
|
195
|
+
function splitRefSuffix(value) {
|
|
196
|
+
const hash = value.indexOf("#");
|
|
197
|
+
if (hash < 0)
|
|
198
|
+
return [value, undefined];
|
|
199
|
+
return [value.slice(0, hash), value.slice(hash + 1) || undefined];
|
|
200
|
+
}
|
|
201
|
+
function githubHeaders() {
|
|
202
|
+
const token = process.env.GITHUB_TOKEN?.trim();
|
|
203
|
+
const headers = {
|
|
204
|
+
Accept: "application/vnd.github+json",
|
|
205
|
+
"User-Agent": "agentikit-registry",
|
|
206
|
+
};
|
|
207
|
+
if (token)
|
|
208
|
+
headers.Authorization = `Bearer ${token}`;
|
|
209
|
+
return headers;
|
|
210
|
+
}
|
|
211
|
+
async function fetchJson(url, headers) {
|
|
212
|
+
const response = await fetch(url, { headers });
|
|
213
|
+
if (!response.ok) {
|
|
214
|
+
throw new Error(`Request failed (${response.status}) for ${url}`);
|
|
215
|
+
}
|
|
216
|
+
return await response.json();
|
|
217
|
+
}
|
|
218
|
+
async function tryFetchJson(url, headers) {
|
|
219
|
+
const response = await fetch(url, { headers });
|
|
220
|
+
if (!response.ok)
|
|
221
|
+
return null;
|
|
222
|
+
return await response.json();
|
|
223
|
+
}
|
|
224
|
+
function asRecord(value) {
|
|
225
|
+
return typeof value === "object" && value !== null && !Array.isArray(value)
|
|
226
|
+
? value
|
|
227
|
+
: {};
|
|
228
|
+
}
|
|
229
|
+
function asString(value) {
|
|
230
|
+
return typeof value === "string" && value ? value : undefined;
|
|
231
|
+
}
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
const GITHUB_API_BASE = "https://api.github.com";
|
|
2
|
+
export async function searchRegistry(query, options) {
|
|
3
|
+
const trimmed = query.trim();
|
|
4
|
+
if (!trimmed) {
|
|
5
|
+
return { query: "", hits: [], warnings: [] };
|
|
6
|
+
}
|
|
7
|
+
const limit = clampLimit(options?.limit);
|
|
8
|
+
const [npmResult, githubResult] = await Promise.allSettled([
|
|
9
|
+
searchNpm(trimmed, limit),
|
|
10
|
+
searchGithub(trimmed, limit),
|
|
11
|
+
]);
|
|
12
|
+
const hits = [];
|
|
13
|
+
const warnings = [];
|
|
14
|
+
if (npmResult.status === "fulfilled") {
|
|
15
|
+
hits.push(...npmResult.value);
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
warnings.push(`npm search failed: ${toErrorMessage(npmResult.reason)}`);
|
|
19
|
+
}
|
|
20
|
+
if (githubResult.status === "fulfilled") {
|
|
21
|
+
hits.push(...githubResult.value);
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
warnings.push(`GitHub search failed: ${toErrorMessage(githubResult.reason)}`);
|
|
25
|
+
}
|
|
26
|
+
hits.sort((a, b) => (b.score ?? 0) - (a.score ?? 0));
|
|
27
|
+
return {
|
|
28
|
+
query: trimmed,
|
|
29
|
+
hits: hits.slice(0, limit * 2),
|
|
30
|
+
warnings,
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
async function searchNpm(query, limit) {
|
|
34
|
+
const url = `https://registry.npmjs.org/-/v1/search?text=${encodeURIComponent(query)}&size=${limit}`;
|
|
35
|
+
const response = await fetch(url);
|
|
36
|
+
if (!response.ok) {
|
|
37
|
+
throw new Error(`HTTP ${response.status}`);
|
|
38
|
+
}
|
|
39
|
+
const data = await response.json();
|
|
40
|
+
const objects = Array.isArray(data.objects) ? data.objects : [];
|
|
41
|
+
return objects.flatMap((raw) => {
|
|
42
|
+
if (typeof raw !== "object" || raw === null || Array.isArray(raw))
|
|
43
|
+
return [];
|
|
44
|
+
const obj = raw;
|
|
45
|
+
const pkg = asRecord(obj.package);
|
|
46
|
+
const name = asString(pkg.name);
|
|
47
|
+
if (!name)
|
|
48
|
+
return [];
|
|
49
|
+
const version = asString(pkg.version);
|
|
50
|
+
const metadata = {};
|
|
51
|
+
if (version)
|
|
52
|
+
metadata.version = version;
|
|
53
|
+
const date = asString(pkg.date);
|
|
54
|
+
if (date)
|
|
55
|
+
metadata.updatedAt = date;
|
|
56
|
+
return [{
|
|
57
|
+
source: "npm",
|
|
58
|
+
id: `npm:${name}`,
|
|
59
|
+
title: name,
|
|
60
|
+
description: asString(pkg.description),
|
|
61
|
+
ref: name,
|
|
62
|
+
homepage: asString(asRecord(pkg.links).homepage),
|
|
63
|
+
score: asNumber(obj.score),
|
|
64
|
+
metadata,
|
|
65
|
+
}];
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
async function searchGithub(query, limit) {
|
|
69
|
+
const q = encodeURIComponent(`${query} in:name,description,readme`);
|
|
70
|
+
const url = `${GITHUB_API_BASE}/search/repositories?q=${q}&sort=stars&order=desc&per_page=${limit}`;
|
|
71
|
+
const response = await fetch(url, { headers: githubHeaders() });
|
|
72
|
+
if (!response.ok) {
|
|
73
|
+
throw new Error(`HTTP ${response.status}`);
|
|
74
|
+
}
|
|
75
|
+
const data = await response.json();
|
|
76
|
+
const items = Array.isArray(data.items) ? data.items : [];
|
|
77
|
+
return items.flatMap((raw) => {
|
|
78
|
+
const repo = asRecord(raw);
|
|
79
|
+
const fullName = asString(repo.full_name);
|
|
80
|
+
if (!fullName)
|
|
81
|
+
return [];
|
|
82
|
+
const metadata = {};
|
|
83
|
+
const stars = asNumber(repo.stargazers_count);
|
|
84
|
+
if (stars > 0)
|
|
85
|
+
metadata.stars = String(stars);
|
|
86
|
+
const language = asString(repo.language);
|
|
87
|
+
if (language)
|
|
88
|
+
metadata.language = language;
|
|
89
|
+
return [{
|
|
90
|
+
source: "github",
|
|
91
|
+
id: `github:${fullName}`,
|
|
92
|
+
title: fullName,
|
|
93
|
+
description: asString(repo.description),
|
|
94
|
+
ref: fullName,
|
|
95
|
+
homepage: asString(repo.html_url),
|
|
96
|
+
score: stars,
|
|
97
|
+
metadata,
|
|
98
|
+
}];
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
function githubHeaders() {
|
|
102
|
+
const token = process.env.GITHUB_TOKEN?.trim();
|
|
103
|
+
const headers = {
|
|
104
|
+
Accept: "application/vnd.github+json",
|
|
105
|
+
"User-Agent": "agentikit-registry",
|
|
106
|
+
};
|
|
107
|
+
if (token)
|
|
108
|
+
headers.Authorization = `Bearer ${token}`;
|
|
109
|
+
return headers;
|
|
110
|
+
}
|
|
111
|
+
function clampLimit(limit) {
|
|
112
|
+
if (!limit || !Number.isFinite(limit))
|
|
113
|
+
return 20;
|
|
114
|
+
return Math.min(100, Math.max(1, Math.trunc(limit)));
|
|
115
|
+
}
|
|
116
|
+
function asRecord(value) {
|
|
117
|
+
return typeof value === "object" && value !== null && !Array.isArray(value)
|
|
118
|
+
? value
|
|
119
|
+
: {};
|
|
120
|
+
}
|
|
121
|
+
function asString(value) {
|
|
122
|
+
return typeof value === "string" && value ? value : undefined;
|
|
123
|
+
}
|
|
124
|
+
function asNumber(value) {
|
|
125
|
+
return typeof value === "number" && Number.isFinite(value) ? value : 0;
|
|
126
|
+
}
|
|
127
|
+
function toErrorMessage(error) {
|
|
128
|
+
return error instanceof Error ? error.message : String(error);
|
|
129
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
export type RegistrySource = "npm" | "github";
|
|
2
|
+
export interface RegistryRefBase {
|
|
3
|
+
source: RegistrySource;
|
|
4
|
+
ref: string;
|
|
5
|
+
id: string;
|
|
6
|
+
}
|
|
7
|
+
export interface ParsedNpmRef extends RegistryRefBase {
|
|
8
|
+
source: "npm";
|
|
9
|
+
packageName: string;
|
|
10
|
+
requestedVersionOrTag?: string;
|
|
11
|
+
}
|
|
12
|
+
export interface ParsedGithubRef extends RegistryRefBase {
|
|
13
|
+
source: "github";
|
|
14
|
+
owner: string;
|
|
15
|
+
repo: string;
|
|
16
|
+
requestedRef?: string;
|
|
17
|
+
}
|
|
18
|
+
export type ParsedRegistryRef = ParsedNpmRef | ParsedGithubRef;
|
|
19
|
+
export interface ResolvedRegistryArtifact {
|
|
20
|
+
id: string;
|
|
21
|
+
source: RegistrySource;
|
|
22
|
+
ref: string;
|
|
23
|
+
artifactUrl: string;
|
|
24
|
+
resolvedVersion?: string;
|
|
25
|
+
resolvedRevision?: string;
|
|
26
|
+
}
|
|
27
|
+
export interface RegistryInstalledEntry {
|
|
28
|
+
id: string;
|
|
29
|
+
source: RegistrySource;
|
|
30
|
+
ref: string;
|
|
31
|
+
resolvedVersion?: string;
|
|
32
|
+
resolvedRevision?: string;
|
|
33
|
+
artifactUrl: string;
|
|
34
|
+
stashRoot: string;
|
|
35
|
+
cacheDir: string;
|
|
36
|
+
installedAt: string;
|
|
37
|
+
}
|
|
38
|
+
export interface RegistryInstallResult extends RegistryInstalledEntry {
|
|
39
|
+
extractedDir: string;
|
|
40
|
+
}
|
|
41
|
+
export interface RegistrySearchHit {
|
|
42
|
+
source: RegistrySource;
|
|
43
|
+
id: string;
|
|
44
|
+
title: string;
|
|
45
|
+
description?: string;
|
|
46
|
+
ref: string;
|
|
47
|
+
homepage?: string;
|
|
48
|
+
score?: number;
|
|
49
|
+
metadata?: Record<string, string>;
|
|
50
|
+
}
|
|
51
|
+
export interface RegistrySearchResponse {
|
|
52
|
+
query: string;
|
|
53
|
+
hits: RegistrySearchHit[];
|
|
54
|
+
warnings: string[];
|
|
55
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|