akm-cli 0.0.21 → 0.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -5
- package/dist/asset-spec.js +91 -10
- package/dist/cli.js +172 -57
- package/dist/common.js +15 -2
- package/dist/config-cli.js +55 -6
- package/dist/config.js +118 -22
- package/dist/create-provider-registry.js +18 -0
- package/dist/db.js +156 -53
- package/dist/embedder.js +36 -18
- package/dist/errors.js +6 -0
- package/dist/file-context.js +18 -19
- package/dist/frontmatter.js +19 -3
- package/dist/indexer.js +126 -89
- package/dist/{stash-registry.js → installed-kits.js} +16 -24
- package/dist/kit-include.js +108 -0
- package/dist/local-search.js +429 -0
- package/dist/lockfile.js +47 -5
- package/dist/matchers.js +6 -0
- package/dist/metadata.js +20 -10
- package/dist/paths.js +4 -0
- package/dist/providers/skills-sh.js +3 -2
- package/dist/providers/static-index.js +4 -9
- package/dist/registry-build-index.js +356 -0
- package/dist/registry-factory.js +19 -0
- package/dist/registry-install.js +114 -109
- package/dist/registry-resolve.js +44 -9
- package/dist/registry-search.js +14 -9
- package/dist/renderers.js +23 -7
- package/dist/ripgrep-install.js +9 -4
- package/dist/self-update.js +31 -4
- package/dist/stash-add.js +75 -6
- package/dist/stash-clone.js +1 -1
- package/dist/stash-provider-factory.js +37 -0
- package/dist/stash-provider.js +1 -0
- package/dist/stash-providers/filesystem.js +42 -0
- package/dist/stash-providers/index.js +9 -0
- package/dist/stash-providers/openviking.js +337 -0
- package/dist/stash-resolve.js +4 -4
- package/dist/stash-search.js +70 -401
- package/dist/stash-show.js +24 -5
- package/dist/stash-source-manage.js +82 -0
- package/dist/stash-source.js +19 -11
- package/dist/walker.js +15 -10
- package/dist/warn.js +7 -0
- package/package.json +1 -1
- package/dist/provider-registry.js +0 -8
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { fetchWithRetry } from "./common";
|
|
5
|
+
import { asRecord, asString, GITHUB_API_BASE, githubHeaders } from "./github";
|
|
6
|
+
import { copyIncludedPaths, findNearestIncludeConfig } from "./kit-include";
|
|
7
|
+
import { generateMetadataFlat, loadStashFile } from "./metadata";
|
|
8
|
+
import { parseRegistryIndex } from "./providers/static-index";
|
|
9
|
+
import { detectStashRoot, extractTarGzSecure } from "./registry-install";
|
|
10
|
+
import { walkStashFlat } from "./walker";
|
|
11
|
+
const DEFAULT_NPM_REGISTRY_BASE = "https://registry.npmjs.org";
|
|
12
|
+
const DEFAULT_MANUAL_ENTRIES_PATH = path.resolve("manual-entries.json");
|
|
13
|
+
const DEFAULT_OUTPUT_PATH = path.resolve("index.json");
|
|
14
|
+
const REQUIRED_KEYWORDS = ["agentikit", "akm-kit"];
|
|
15
|
+
const GITHUB_TOPICS = ["agentikit", "akm-kit"];
|
|
16
|
+
const EXCLUDED_REPOS = new Set(["itlackey/agentikit-plugins", "itlackey/agentikit"]);
|
|
17
|
+
const EXCLUDED_NPM_PACKAGES = new Set([
|
|
18
|
+
"agentikit",
|
|
19
|
+
"agentikit-claude",
|
|
20
|
+
"agentikit-opencode",
|
|
21
|
+
"agentikit-plugins",
|
|
22
|
+
"akm-cli",
|
|
23
|
+
"akm-opencode",
|
|
24
|
+
]);
|
|
25
|
+
const EMPTY_INSPECTION = {};
|
|
26
|
+
export async function buildRegistryIndex(options) {
|
|
27
|
+
const manualEntriesPath = path.resolve(options?.manualEntriesPath ?? DEFAULT_MANUAL_ENTRIES_PATH);
|
|
28
|
+
const npmRegistryBase = trimTrailingSlash(options?.npmRegistryBase ?? DEFAULT_NPM_REGISTRY_BASE);
|
|
29
|
+
const githubApiBase = trimTrailingSlash(options?.githubApiBase ?? GITHUB_API_BASE);
|
|
30
|
+
const [manualKits, npmKits, githubKits] = await Promise.all([
|
|
31
|
+
loadManualEntries(manualEntriesPath),
|
|
32
|
+
scanNpm(npmRegistryBase),
|
|
33
|
+
scanGithub(githubApiBase),
|
|
34
|
+
]);
|
|
35
|
+
const kits = deduplicateKits([...manualKits, ...npmKits, ...githubKits]).sort((a, b) => a.name.localeCompare(b.name));
|
|
36
|
+
const index = {
|
|
37
|
+
version: 2,
|
|
38
|
+
updatedAt: new Date().toISOString(),
|
|
39
|
+
kits,
|
|
40
|
+
};
|
|
41
|
+
return {
|
|
42
|
+
index,
|
|
43
|
+
counts: {
|
|
44
|
+
manual: manualKits.length,
|
|
45
|
+
npm: npmKits.length,
|
|
46
|
+
github: githubKits.length,
|
|
47
|
+
total: kits.length,
|
|
48
|
+
},
|
|
49
|
+
paths: {
|
|
50
|
+
manualEntriesPath,
|
|
51
|
+
},
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
export function writeRegistryIndex(index, outPath) {
|
|
55
|
+
const resolved = path.resolve(outPath ?? DEFAULT_OUTPUT_PATH);
|
|
56
|
+
fs.mkdirSync(path.dirname(resolved), { recursive: true });
|
|
57
|
+
fs.writeFileSync(resolved, `${JSON.stringify(index, null, 2)}\n`, "utf8");
|
|
58
|
+
return resolved;
|
|
59
|
+
}
|
|
60
|
+
async function scanNpm(npmRegistryBase) {
|
|
61
|
+
const kits = [];
|
|
62
|
+
const seen = new Set();
|
|
63
|
+
for (const keyword of REQUIRED_KEYWORDS) {
|
|
64
|
+
let offset = 0;
|
|
65
|
+
const size = 250;
|
|
66
|
+
while (true) {
|
|
67
|
+
const url = `${npmRegistryBase}/-/v1/search?text=keywords:${encodeURIComponent(keyword)}&size=${size}&from=${offset}`;
|
|
68
|
+
const data = await fetchJson(url);
|
|
69
|
+
for (const obj of data.objects) {
|
|
70
|
+
const pkg = obj.package;
|
|
71
|
+
if (EXCLUDED_NPM_PACKAGES.has(pkg.name))
|
|
72
|
+
continue;
|
|
73
|
+
const repoUrl = pkg.links?.repository ?? "";
|
|
74
|
+
const normalizedRepo = repoUrl.replace(/^https?:\/\/github\.com\//, "").replace(/\.git$/, "");
|
|
75
|
+
if (EXCLUDED_REPOS.has(normalizedRepo))
|
|
76
|
+
continue;
|
|
77
|
+
const id = `npm:${pkg.name}`;
|
|
78
|
+
if (seen.has(id))
|
|
79
|
+
continue;
|
|
80
|
+
seen.add(id);
|
|
81
|
+
const keywords = (pkg.keywords ?? []).map((value) => value.toLowerCase());
|
|
82
|
+
if (!keywords.some((value) => REQUIRED_KEYWORDS.includes(value)))
|
|
83
|
+
continue;
|
|
84
|
+
let latestMetadata = {};
|
|
85
|
+
try {
|
|
86
|
+
latestMetadata = await fetchJson(`${npmRegistryBase}/${encodeURIComponent(pkg.name)}/latest`);
|
|
87
|
+
}
|
|
88
|
+
catch {
|
|
89
|
+
latestMetadata = {};
|
|
90
|
+
}
|
|
91
|
+
const inspection = await inspectNpmPackage(npmRegistryBase, latestMetadata).catch(() => EMPTY_INSPECTION);
|
|
92
|
+
const tags = mergeStrings((pkg.keywords ?? []).filter((value) => !REQUIRED_KEYWORDS.includes(value.toLowerCase())), inspection.tags);
|
|
93
|
+
kits.push(normalizeKit({
|
|
94
|
+
id,
|
|
95
|
+
name: pkg.name,
|
|
96
|
+
description: inspection.description ?? pkg.description,
|
|
97
|
+
ref: pkg.name,
|
|
98
|
+
source: "npm",
|
|
99
|
+
homepage: pkg.links?.homepage ?? pkg.links?.npm,
|
|
100
|
+
author: pkg.author?.name ?? pkg.author?.username ?? pkg.publisher?.username,
|
|
101
|
+
latestVersion: inspection.latestVersion ?? pkg.version,
|
|
102
|
+
license: asString(latestMetadata.license) ?? inspection.license,
|
|
103
|
+
tags,
|
|
104
|
+
assetTypes: inspection.assetTypes,
|
|
105
|
+
assets: inspection.assets,
|
|
106
|
+
}));
|
|
107
|
+
}
|
|
108
|
+
if (data.objects.length < size)
|
|
109
|
+
break;
|
|
110
|
+
offset += size;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
return kits;
|
|
114
|
+
}
|
|
115
|
+
async function inspectNpmPackage(_npmRegistryBase, latestMetadata) {
|
|
116
|
+
const dist = asRecord(latestMetadata.dist);
|
|
117
|
+
const tarballUrl = asString(dist.tarball);
|
|
118
|
+
if (!tarballUrl)
|
|
119
|
+
return {};
|
|
120
|
+
const inspection = await inspectArchive(tarballUrl);
|
|
121
|
+
return {
|
|
122
|
+
description: asString(latestMetadata.description) ?? inspection.description,
|
|
123
|
+
latestVersion: asString(latestMetadata.version) ?? inspection.latestVersion,
|
|
124
|
+
license: asString(latestMetadata.license) ?? inspection.license,
|
|
125
|
+
tags: mergeStrings(extractNonReservedKeywords(latestMetadata.keywords), inspection.tags),
|
|
126
|
+
assetTypes: inspection.assetTypes,
|
|
127
|
+
assets: inspection.assets,
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
async function scanGithub(githubApiBase) {
|
|
131
|
+
const kits = [];
|
|
132
|
+
const seen = new Set();
|
|
133
|
+
const headers = githubHeaders();
|
|
134
|
+
for (const topic of GITHUB_TOPICS) {
|
|
135
|
+
let page = 1;
|
|
136
|
+
const perPage = 100;
|
|
137
|
+
while (true) {
|
|
138
|
+
const q = encodeURIComponent(`topic:${topic}`);
|
|
139
|
+
const url = `${githubApiBase}/search/repositories?q=${q}&sort=updated&order=desc&per_page=${perPage}&page=${page}`;
|
|
140
|
+
const data = await fetchJson(url, headers);
|
|
141
|
+
for (const repo of data.items) {
|
|
142
|
+
if (EXCLUDED_REPOS.has(repo.full_name))
|
|
143
|
+
continue;
|
|
144
|
+
const id = `github:${repo.full_name}`;
|
|
145
|
+
if (seen.has(id))
|
|
146
|
+
continue;
|
|
147
|
+
seen.add(id);
|
|
148
|
+
const inspection = await inspectArchive(`${githubApiBase}/repos/${repo.full_name}/tarball/${encodeURIComponent(repo.default_branch)}`, headers).catch(() => EMPTY_INSPECTION);
|
|
149
|
+
const topics = repo.topics.filter((value) => !GITHUB_TOPICS.includes(value));
|
|
150
|
+
kits.push(normalizeKit({
|
|
151
|
+
id,
|
|
152
|
+
name: repo.name,
|
|
153
|
+
description: inspection.description ?? repo.description ?? undefined,
|
|
154
|
+
ref: repo.full_name,
|
|
155
|
+
source: "github",
|
|
156
|
+
homepage: repo.html_url,
|
|
157
|
+
author: repo.owner.login,
|
|
158
|
+
latestVersion: inspection.latestVersion,
|
|
159
|
+
license: repo.license?.spdx_id ?? inspection.license,
|
|
160
|
+
tags: mergeStrings(topics, inspection.tags),
|
|
161
|
+
assetTypes: inspection.assetTypes,
|
|
162
|
+
assets: inspection.assets,
|
|
163
|
+
}));
|
|
164
|
+
}
|
|
165
|
+
if (data.items.length < perPage)
|
|
166
|
+
break;
|
|
167
|
+
page += 1;
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
return kits;
|
|
171
|
+
}
|
|
172
|
+
async function inspectArchive(url, headers) {
|
|
173
|
+
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "akm-registry-build-"));
|
|
174
|
+
const archivePath = path.join(tempDir, "archive.tgz");
|
|
175
|
+
const extractDir = path.join(tempDir, "extract");
|
|
176
|
+
try {
|
|
177
|
+
const response = await fetchWithRetry(url, headers ? { headers } : undefined, { timeout: 120_000 });
|
|
178
|
+
if (!response.ok) {
|
|
179
|
+
throw new Error(`Failed to fetch archive (${response.status}) from ${url}`);
|
|
180
|
+
}
|
|
181
|
+
await Bun.write(archivePath, response);
|
|
182
|
+
// Reuse the secure extraction from registry-install which validates entries,
|
|
183
|
+
// uses --no-same-owner, strips components, and runs a post-extraction scan.
|
|
184
|
+
extractTarGzSecure(archivePath, extractDir);
|
|
185
|
+
const stashRoot = detectStashRoot(extractDir);
|
|
186
|
+
const inspectionRoot = applyIncludeConfigForInspection(stashRoot, tempDir, extractDir) ?? stashRoot;
|
|
187
|
+
const metadata = await enumerateAssets(inspectionRoot);
|
|
188
|
+
const packageMetadata = readNearestPackageJson(extractDir, inspectionRoot);
|
|
189
|
+
const assets = metadata.map((entry) => ({
|
|
190
|
+
type: entry.type,
|
|
191
|
+
name: entry.name,
|
|
192
|
+
...(entry.description ? { description: entry.description } : {}),
|
|
193
|
+
...(entry.tags && entry.tags.length > 0 ? { tags: entry.tags } : {}),
|
|
194
|
+
}));
|
|
195
|
+
return {
|
|
196
|
+
description: asString(packageMetadata.description),
|
|
197
|
+
latestVersion: asString(packageMetadata.version),
|
|
198
|
+
license: asString(packageMetadata.license),
|
|
199
|
+
tags: extractNonReservedKeywords(packageMetadata.keywords),
|
|
200
|
+
assetTypes: deriveAssetTypes(assets),
|
|
201
|
+
assets: assets.length > 0 ? assets : undefined,
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
finally {
|
|
205
|
+
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
function readNearestPackageJson(extractDir, stashRoot) {
|
|
209
|
+
const candidates = [
|
|
210
|
+
path.join(stashRoot, "package.json"),
|
|
211
|
+
path.join(extractDir, "package.json"),
|
|
212
|
+
path.join(extractDir, "package", "package.json"),
|
|
213
|
+
];
|
|
214
|
+
for (const candidate of candidates) {
|
|
215
|
+
try {
|
|
216
|
+
return asRecord(JSON.parse(fs.readFileSync(candidate, "utf8")));
|
|
217
|
+
}
|
|
218
|
+
catch { }
|
|
219
|
+
}
|
|
220
|
+
return {};
|
|
221
|
+
}
|
|
222
|
+
async function enumerateAssets(stashRoot) {
|
|
223
|
+
const fileContexts = walkStashFlat(stashRoot);
|
|
224
|
+
const dirGroups = new Map();
|
|
225
|
+
for (const ctx of fileContexts) {
|
|
226
|
+
const group = dirGroups.get(ctx.parentDirAbs);
|
|
227
|
+
if (group)
|
|
228
|
+
group.push(ctx.absPath);
|
|
229
|
+
else
|
|
230
|
+
dirGroups.set(ctx.parentDirAbs, [ctx.absPath]);
|
|
231
|
+
}
|
|
232
|
+
const entries = [];
|
|
233
|
+
for (const [dirPath, files] of dirGroups) {
|
|
234
|
+
let stash = loadStashFile(dirPath);
|
|
235
|
+
if (stash) {
|
|
236
|
+
const covered = new Set(stash.entries.map((entry) => entry.filename).filter((value) => !!value));
|
|
237
|
+
const uncoveredFiles = files.filter((file) => !covered.has(path.basename(file)));
|
|
238
|
+
if (uncoveredFiles.length > 0) {
|
|
239
|
+
const generated = await generateMetadataFlat(stashRoot, uncoveredFiles);
|
|
240
|
+
if (generated.entries.length > 0) {
|
|
241
|
+
stash = { entries: [...stash.entries, ...generated.entries] };
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
else {
|
|
246
|
+
const generated = await generateMetadataFlat(stashRoot, files);
|
|
247
|
+
if (generated.entries.length === 0)
|
|
248
|
+
continue;
|
|
249
|
+
stash = generated;
|
|
250
|
+
}
|
|
251
|
+
entries.push(...stash.entries);
|
|
252
|
+
}
|
|
253
|
+
return entries.sort((a, b) => `${a.type}:${a.name}`.localeCompare(`${b.type}:${b.name}`));
|
|
254
|
+
}
|
|
255
|
+
function applyIncludeConfigForInspection(stashRoot, tempDir, searchRoot) {
|
|
256
|
+
const includeConfig = findNearestIncludeConfig(stashRoot, searchRoot);
|
|
257
|
+
if (!includeConfig)
|
|
258
|
+
return undefined;
|
|
259
|
+
const selectedDir = path.join(tempDir, "selected");
|
|
260
|
+
fs.rmSync(selectedDir, { recursive: true, force: true });
|
|
261
|
+
fs.mkdirSync(selectedDir, { recursive: true });
|
|
262
|
+
copyIncludedPaths(includeConfig.include, includeConfig.baseDir, selectedDir);
|
|
263
|
+
return selectedDir;
|
|
264
|
+
}
|
|
265
|
+
async function loadManualEntries(manualEntriesPath) {
|
|
266
|
+
try {
|
|
267
|
+
const raw = JSON.parse(fs.readFileSync(manualEntriesPath, "utf8"));
|
|
268
|
+
const candidateKits = Array.isArray(raw) ? raw : asRecord(raw).kits;
|
|
269
|
+
const parsed = parseRegistryIndex({ version: 2, updatedAt: new Date().toISOString(), kits: candidateKits });
|
|
270
|
+
if (!parsed)
|
|
271
|
+
return [];
|
|
272
|
+
return parsed.kits.map((kit) => normalizeKit({ ...kit, curated: kit.curated ?? true }));
|
|
273
|
+
}
|
|
274
|
+
catch {
|
|
275
|
+
return [];
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
async function fetchJson(url, headers) {
|
|
279
|
+
const response = await fetchWithRetry(url, headers ? { headers } : undefined, { timeout: 30_000 });
|
|
280
|
+
if (!response.ok) {
|
|
281
|
+
const body = await response.text().catch(() => "");
|
|
282
|
+
throw new Error(`HTTP ${response.status} from ${url}: ${body.slice(0, 200)}`);
|
|
283
|
+
}
|
|
284
|
+
return (await response.json());
|
|
285
|
+
}
|
|
286
|
+
function deduplicateKits(kits) {
|
|
287
|
+
const byId = new Map();
|
|
288
|
+
for (const kit of kits) {
|
|
289
|
+
const existing = byId.get(kit.id);
|
|
290
|
+
byId.set(kit.id, existing ? mergeEntries(existing, kit) : kit);
|
|
291
|
+
}
|
|
292
|
+
return [...byId.values()];
|
|
293
|
+
}
|
|
294
|
+
function mergeEntries(a, b) {
|
|
295
|
+
const assets = mergeAssets(a.assets, b.assets);
|
|
296
|
+
const assetTypes = mergeStrings(a.assetTypes, b.assetTypes, assets ? deriveAssetTypes(assets) : undefined);
|
|
297
|
+
return normalizeKit({
|
|
298
|
+
id: a.id,
|
|
299
|
+
name: a.name,
|
|
300
|
+
description: a.description ?? b.description,
|
|
301
|
+
ref: a.ref,
|
|
302
|
+
source: a.source,
|
|
303
|
+
homepage: a.homepage ?? b.homepage,
|
|
304
|
+
tags: mergeStrings(a.tags, b.tags),
|
|
305
|
+
assetTypes,
|
|
306
|
+
assets,
|
|
307
|
+
author: a.author ?? b.author,
|
|
308
|
+
license: a.license ?? b.license,
|
|
309
|
+
latestVersion: a.latestVersion ?? b.latestVersion,
|
|
310
|
+
curated: a.curated || b.curated || undefined,
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
function mergeAssets(a, b) {
|
|
314
|
+
if (!a && !b)
|
|
315
|
+
return undefined;
|
|
316
|
+
const merged = new Map();
|
|
317
|
+
for (const asset of [...(a ?? []), ...(b ?? [])]) {
|
|
318
|
+
const key = `${asset.type}:${asset.name}`;
|
|
319
|
+
if (!merged.has(key))
|
|
320
|
+
merged.set(key, asset);
|
|
321
|
+
}
|
|
322
|
+
const values = [...merged.values()];
|
|
323
|
+
return values.length > 0 ? sortAssets(values) : undefined;
|
|
324
|
+
}
|
|
325
|
+
function mergeStrings(...values) {
|
|
326
|
+
const merged = [...new Set(values.flatMap((value) => value ?? []).filter((value) => value.trim().length > 0))].sort();
|
|
327
|
+
return merged.length > 0 ? merged : undefined;
|
|
328
|
+
}
|
|
329
|
+
function deriveAssetTypes(assets) {
|
|
330
|
+
return mergeStrings(assets?.map((asset) => asset.type));
|
|
331
|
+
}
|
|
332
|
+
function extractNonReservedKeywords(value) {
|
|
333
|
+
if (!Array.isArray(value))
|
|
334
|
+
return undefined;
|
|
335
|
+
const filtered = value
|
|
336
|
+
.filter((item) => typeof item === "string")
|
|
337
|
+
.map((item) => item.trim())
|
|
338
|
+
.filter((item) => item.length > 0)
|
|
339
|
+
.filter((item) => !REQUIRED_KEYWORDS.includes(item.toLowerCase()));
|
|
340
|
+
return filtered.length > 0 ? filtered : undefined;
|
|
341
|
+
}
|
|
342
|
+
function normalizeKit(kit) {
|
|
343
|
+
const assets = kit.assets ? sortAssets(kit.assets) : undefined;
|
|
344
|
+
return {
|
|
345
|
+
...kit,
|
|
346
|
+
...(kit.tags && kit.tags.length > 0 ? { tags: kit.tags } : {}),
|
|
347
|
+
...(kit.assetTypes && kit.assetTypes.length > 0 ? { assetTypes: kit.assetTypes } : {}),
|
|
348
|
+
...(assets && assets.length > 0 ? { assets } : {}),
|
|
349
|
+
};
|
|
350
|
+
}
|
|
351
|
+
function sortAssets(assets) {
|
|
352
|
+
return [...assets].sort((a, b) => `${a.type}:${a.name}`.localeCompare(`${b.type}:${b.name}`));
|
|
353
|
+
}
|
|
354
|
+
function trimTrailingSlash(value) {
|
|
355
|
+
return value.replace(/\/+$/, "");
|
|
356
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Registry provider factory map.
|
|
3
|
+
*
|
|
4
|
+
* Maps registry provider type identifiers (e.g. "static-index", "skills-sh")
|
|
5
|
+
* to factory functions that create RegistryProvider instances.
|
|
6
|
+
*
|
|
7
|
+
* "Registry" here refers to the kit discovery registries (npm, GitHub, static
|
|
8
|
+
* index files) — not to be confused with the stash provider factory map in
|
|
9
|
+
* stash-provider-factory.ts or the installed-kit operations in installed-kits.ts.
|
|
10
|
+
*/
|
|
11
|
+
import { createProviderRegistry } from "./create-provider-registry";
|
|
12
|
+
// ── Factory map ─────────────────────────────────────────────────────────────
|
|
13
|
+
const registry = createProviderRegistry();
|
|
14
|
+
export function registerProvider(type, factory) {
|
|
15
|
+
registry.register(type, factory);
|
|
16
|
+
}
|
|
17
|
+
export function resolveProviderFactory(type) {
|
|
18
|
+
return registry.resolve(type);
|
|
19
|
+
}
|
package/dist/registry-install.js
CHANGED
|
@@ -5,8 +5,10 @@ import path from "node:path";
|
|
|
5
5
|
import { TYPE_DIRS } from "./asset-spec";
|
|
6
6
|
import { fetchWithRetry, isWithin } from "./common";
|
|
7
7
|
import { loadConfig, saveConfig } from "./config";
|
|
8
|
+
import { copyIncludedPaths, findNearestIncludeConfig } from "./kit-include";
|
|
8
9
|
import { getRegistryCacheDir as _getRegistryCacheDir } from "./paths";
|
|
9
|
-
import { parseRegistryRef, resolveRegistryArtifact } from "./registry-resolve";
|
|
10
|
+
import { parseRegistryRef, resolveRegistryArtifact, validateGitRef, validateGitUrl } from "./registry-resolve";
|
|
11
|
+
import { warn } from "./warn";
|
|
10
12
|
const REGISTRY_STASH_DIR_NAMES = new Set(Object.values(TYPE_DIRS));
|
|
11
13
|
export async function installRegistryRef(ref, options) {
|
|
12
14
|
const parsed = parseRegistryRef(ref);
|
|
@@ -48,13 +50,30 @@ export async function installRegistryRef(ref, options) {
|
|
|
48
50
|
}
|
|
49
51
|
}
|
|
50
52
|
fs.mkdirSync(cacheDir, { recursive: true });
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
53
|
+
let integrity;
|
|
54
|
+
let provisionalKitRoot;
|
|
55
|
+
let installRoot;
|
|
56
|
+
let stashRoot;
|
|
57
|
+
try {
|
|
58
|
+
await downloadArchive(resolved.artifactUrl, archivePath);
|
|
59
|
+
verifyArchiveIntegrity(archivePath, resolved.resolvedRevision, resolved.source);
|
|
60
|
+
integrity = await computeFileHash(archivePath);
|
|
61
|
+
extractTarGzSecure(archivePath, extractedDir);
|
|
62
|
+
provisionalKitRoot = detectStashRoot(extractedDir);
|
|
63
|
+
installRoot = applyAgentikitIncludeConfig(provisionalKitRoot, cacheDir, extractedDir) ?? provisionalKitRoot;
|
|
64
|
+
stashRoot = detectStashRoot(installRoot);
|
|
65
|
+
}
|
|
66
|
+
catch (err) {
|
|
67
|
+
// Clean up the cache directory so stale or partially-extracted artifacts
|
|
68
|
+
// don't cause false cache hits on the next install attempt.
|
|
69
|
+
try {
|
|
70
|
+
fs.rmSync(cacheDir, { recursive: true, force: true });
|
|
71
|
+
}
|
|
72
|
+
catch {
|
|
73
|
+
// Best-effort cleanup; ignore errors
|
|
74
|
+
}
|
|
75
|
+
throw err;
|
|
76
|
+
}
|
|
58
77
|
return {
|
|
59
78
|
id: resolved.id,
|
|
60
79
|
source: resolved.source,
|
|
@@ -121,24 +140,44 @@ async function installGitRegistryRef(parsed, options) {
|
|
|
121
140
|
}
|
|
122
141
|
}
|
|
123
142
|
fs.mkdirSync(cacheDir, { recursive: true });
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
143
|
+
// Validate URL and ref before passing to git to prevent command injection
|
|
144
|
+
validateGitUrl(parsed.url);
|
|
145
|
+
if (parsed.requestedRef)
|
|
146
|
+
validateGitRef(parsed.requestedRef);
|
|
147
|
+
let provisionalKitRoot;
|
|
148
|
+
let installRoot;
|
|
149
|
+
let stashRoot;
|
|
150
|
+
try {
|
|
151
|
+
const cloneArgs = ["clone", "--depth", "1"];
|
|
152
|
+
if (parsed.requestedRef) {
|
|
153
|
+
cloneArgs.push("--branch", parsed.requestedRef);
|
|
154
|
+
}
|
|
155
|
+
cloneArgs.push(parsed.url, cloneDir);
|
|
156
|
+
const cloneResult = spawnSync("git", cloneArgs, { encoding: "utf8", timeout: 120_000 });
|
|
157
|
+
if (cloneResult.status !== 0) {
|
|
158
|
+
const err = cloneResult.stderr?.trim() || cloneResult.error?.message || "unknown error";
|
|
159
|
+
throw new Error(`Failed to clone ${parsed.url}: ${err}`);
|
|
160
|
+
}
|
|
161
|
+
// Copy contents to extracted dir without .git
|
|
162
|
+
fs.mkdirSync(extractedDir, { recursive: true });
|
|
163
|
+
copyDirectoryContents(cloneDir, extractedDir);
|
|
164
|
+
// Clean up the clone dir
|
|
165
|
+
fs.rmSync(cloneDir, { recursive: true, force: true });
|
|
166
|
+
provisionalKitRoot = detectStashRoot(extractedDir);
|
|
167
|
+
installRoot = applyAgentikitIncludeConfig(provisionalKitRoot, cacheDir, extractedDir) ?? provisionalKitRoot;
|
|
168
|
+
stashRoot = detectStashRoot(installRoot);
|
|
127
169
|
}
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
170
|
+
catch (err) {
|
|
171
|
+
// Clean up the cache directory so stale or partially-cloned artifacts
|
|
172
|
+
// don't cause false cache hits on the next install attempt.
|
|
173
|
+
try {
|
|
174
|
+
fs.rmSync(cacheDir, { recursive: true, force: true });
|
|
175
|
+
}
|
|
176
|
+
catch {
|
|
177
|
+
// Best-effort cleanup; ignore errors
|
|
178
|
+
}
|
|
179
|
+
throw err;
|
|
133
180
|
}
|
|
134
|
-
// Copy contents to extracted dir without .git
|
|
135
|
-
fs.mkdirSync(extractedDir, { recursive: true });
|
|
136
|
-
copyDirectoryContents(cloneDir, extractedDir);
|
|
137
|
-
// Clean up the clone dir
|
|
138
|
-
fs.rmSync(cloneDir, { recursive: true, force: true });
|
|
139
|
-
const provisionalKitRoot = detectStashRoot(extractedDir);
|
|
140
|
-
const installRoot = applyAgentikitIncludeConfig(provisionalKitRoot, cacheDir, extractedDir) ?? provisionalKitRoot;
|
|
141
|
-
const stashRoot = detectStashRoot(installRoot);
|
|
142
181
|
return {
|
|
143
182
|
id: resolved.id,
|
|
144
183
|
source: resolved.source,
|
|
@@ -204,13 +243,13 @@ function buildInstallCacheDir(cacheRootDir, source, id, version) {
|
|
|
204
243
|
return path.join(cacheRootDir, slug || source, versionSlug);
|
|
205
244
|
}
|
|
206
245
|
function applyAgentikitIncludeConfig(sourceRoot, cacheDir, searchRoot = sourceRoot) {
|
|
207
|
-
const includeConfig =
|
|
246
|
+
const includeConfig = findNearestIncludeConfig(sourceRoot, searchRoot);
|
|
208
247
|
if (!includeConfig)
|
|
209
248
|
return undefined;
|
|
210
249
|
const selectedDir = path.join(cacheDir, "selected");
|
|
211
250
|
fs.rmSync(selectedDir, { recursive: true, force: true });
|
|
212
251
|
fs.mkdirSync(selectedDir, { recursive: true });
|
|
213
|
-
copyIncludedPaths(includeConfig.
|
|
252
|
+
copyIncludedPaths(includeConfig.include, includeConfig.baseDir, selectedDir);
|
|
214
253
|
return selectedDir;
|
|
215
254
|
}
|
|
216
255
|
async function downloadArchive(url, destination) {
|
|
@@ -260,9 +299,10 @@ export function verifyArchiveIntegrity(archivePath, expected, source) {
|
|
|
260
299
|
}
|
|
261
300
|
return;
|
|
262
301
|
}
|
|
263
|
-
// Unrecognized format — skip verification
|
|
302
|
+
// Unrecognized format — warn and skip verification
|
|
303
|
+
warn("Unrecognized integrity format: %s — verification skipped", expected);
|
|
264
304
|
}
|
|
265
|
-
function extractTarGzSecure(archivePath, destinationDir) {
|
|
305
|
+
export function extractTarGzSecure(archivePath, destinationDir) {
|
|
266
306
|
const listResult = spawnSync("tar", ["tzf", archivePath], { encoding: "utf8" });
|
|
267
307
|
if (listResult.status !== 0) {
|
|
268
308
|
const err = listResult.stderr?.trim() || listResult.error?.message || "unknown error";
|
|
@@ -271,15 +311,42 @@ function extractTarGzSecure(archivePath, destinationDir) {
|
|
|
271
311
|
validateTarEntries(listResult.stdout);
|
|
272
312
|
fs.rmSync(destinationDir, { recursive: true, force: true });
|
|
273
313
|
fs.mkdirSync(destinationDir, { recursive: true });
|
|
274
|
-
const extractResult = spawnSync("tar", ["xzf", archivePath, "--strip-components=1", "-C", destinationDir], {
|
|
275
|
-
encoding: "utf8",
|
|
276
|
-
});
|
|
314
|
+
const extractResult = spawnSync("tar", ["xzf", archivePath, "--no-same-owner", "--strip-components=1", "-C", destinationDir], { encoding: "utf8" });
|
|
277
315
|
if (extractResult.status !== 0) {
|
|
278
316
|
const err = extractResult.stderr?.trim() || extractResult.error?.message || "unknown error";
|
|
279
317
|
throw new Error(`Failed to extract archive ${archivePath}: ${err}`);
|
|
280
318
|
}
|
|
319
|
+
// Post-extraction scan: verify all extracted files are within destinationDir
|
|
320
|
+
// This mitigates TOCTOU between validateTarEntries (list) and tar extract.
|
|
321
|
+
scanExtractedFiles(destinationDir, destinationDir);
|
|
322
|
+
}
|
|
323
|
+
function scanExtractedFiles(dir, root) {
|
|
324
|
+
let entries;
|
|
325
|
+
try {
|
|
326
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
327
|
+
}
|
|
328
|
+
catch {
|
|
329
|
+
return;
|
|
330
|
+
}
|
|
331
|
+
for (const entry of entries) {
|
|
332
|
+
const fullPath = path.join(dir, entry.name);
|
|
333
|
+
// Check for ".." segments in names (e.g. symlink tricks or crafted filenames)
|
|
334
|
+
if (entry.name.includes("..")) {
|
|
335
|
+
throw new Error(`Post-extraction scan: suspicious entry name: ${fullPath}`);
|
|
336
|
+
}
|
|
337
|
+
// Resolve symlinks to detect escapes outside the destination directory
|
|
338
|
+
if (entry.isSymbolicLink()) {
|
|
339
|
+
const target = fs.realpathSync(fullPath);
|
|
340
|
+
if (!isWithin(target, root)) {
|
|
341
|
+
throw new Error(`Post-extraction scan: symlink escapes destination directory: ${fullPath} -> ${target}`);
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
if (entry.isDirectory()) {
|
|
345
|
+
scanExtractedFiles(fullPath, root);
|
|
346
|
+
}
|
|
347
|
+
}
|
|
281
348
|
}
|
|
282
|
-
function validateTarEntries(listOutput) {
|
|
349
|
+
export function validateTarEntries(listOutput) {
|
|
283
350
|
const lines = listOutput.split(/\r?\n/).filter(Boolean);
|
|
284
351
|
for (const rawLine of lines) {
|
|
285
352
|
const entry = rawLine.trim();
|
|
@@ -313,83 +380,6 @@ function isDirectory(target) {
|
|
|
313
380
|
return false;
|
|
314
381
|
}
|
|
315
382
|
}
|
|
316
|
-
function readAgentikitIncludeConfigAtDir(dirPath) {
|
|
317
|
-
const packageJsonPath = path.join(dirPath, "package.json");
|
|
318
|
-
if (!fs.existsSync(packageJsonPath))
|
|
319
|
-
return undefined;
|
|
320
|
-
let pkg;
|
|
321
|
-
try {
|
|
322
|
-
pkg = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
|
323
|
-
}
|
|
324
|
-
catch {
|
|
325
|
-
return undefined;
|
|
326
|
-
}
|
|
327
|
-
if (typeof pkg !== "object" || pkg === null || Array.isArray(pkg))
|
|
328
|
-
return undefined;
|
|
329
|
-
const akmConfig = pkg.akm;
|
|
330
|
-
if (typeof akmConfig !== "object" || akmConfig === null || Array.isArray(akmConfig))
|
|
331
|
-
return undefined;
|
|
332
|
-
const include = akmConfig.include;
|
|
333
|
-
if (!Array.isArray(include))
|
|
334
|
-
return undefined;
|
|
335
|
-
const parsedInclude = include
|
|
336
|
-
.filter((value) => typeof value === "string")
|
|
337
|
-
.map((value) => value.trim())
|
|
338
|
-
.filter(Boolean);
|
|
339
|
-
return parsedInclude.length > 0 ? { baseDir: dirPath, include: parsedInclude } : undefined;
|
|
340
|
-
}
|
|
341
|
-
function findNearestAgentikitIncludeConfig(startDir, stopDir) {
|
|
342
|
-
let current = path.resolve(startDir);
|
|
343
|
-
const boundary = path.resolve(stopDir);
|
|
344
|
-
while (isWithin(current, boundary)) {
|
|
345
|
-
const config = readAgentikitIncludeConfigAtDir(current);
|
|
346
|
-
if (config)
|
|
347
|
-
return config;
|
|
348
|
-
if (current === boundary)
|
|
349
|
-
break;
|
|
350
|
-
const parent = path.dirname(current);
|
|
351
|
-
if (parent === current)
|
|
352
|
-
break;
|
|
353
|
-
current = parent;
|
|
354
|
-
}
|
|
355
|
-
return undefined;
|
|
356
|
-
}
|
|
357
|
-
function copyIncludedPaths(baseDir, include, destinationDir) {
|
|
358
|
-
for (const entry of include) {
|
|
359
|
-
const resolvedSource = path.resolve(baseDir, entry);
|
|
360
|
-
if (!isWithin(resolvedSource, baseDir)) {
|
|
361
|
-
throw new Error(`Path in akm.include escapes the package root: ${entry}`);
|
|
362
|
-
}
|
|
363
|
-
if (!fs.existsSync(resolvedSource)) {
|
|
364
|
-
throw new Error(`Path in akm.include does not exist: ${entry}`);
|
|
365
|
-
}
|
|
366
|
-
if (path.basename(resolvedSource) === ".git") {
|
|
367
|
-
continue;
|
|
368
|
-
}
|
|
369
|
-
const relativePath = path.relative(baseDir, resolvedSource);
|
|
370
|
-
if (!relativePath || relativePath === ".") {
|
|
371
|
-
copyDirectoryContents(baseDir, destinationDir);
|
|
372
|
-
continue;
|
|
373
|
-
}
|
|
374
|
-
copyPath(resolvedSource, path.join(destinationDir, relativePath));
|
|
375
|
-
}
|
|
376
|
-
}
|
|
377
|
-
function copyDirectoryContents(sourceDir, destinationDir) {
|
|
378
|
-
for (const entry of fs.readdirSync(sourceDir, { withFileTypes: true })) {
|
|
379
|
-
if (entry.name === ".git")
|
|
380
|
-
continue;
|
|
381
|
-
copyPath(path.join(sourceDir, entry.name), path.join(destinationDir, entry.name));
|
|
382
|
-
}
|
|
383
|
-
}
|
|
384
|
-
function copyPath(sourcePath, destinationPath) {
|
|
385
|
-
const stat = fs.statSync(sourcePath);
|
|
386
|
-
fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
|
|
387
|
-
if (stat.isDirectory()) {
|
|
388
|
-
fs.cpSync(sourcePath, destinationPath, { recursive: true, force: true });
|
|
389
|
-
return;
|
|
390
|
-
}
|
|
391
|
-
fs.copyFileSync(sourcePath, destinationPath);
|
|
392
|
-
}
|
|
393
383
|
function hasStashDirs(dirPath) {
|
|
394
384
|
if (!isDirectory(dirPath))
|
|
395
385
|
return false;
|
|
@@ -452,6 +442,21 @@ function normalizeInstalledEntry(entry) {
|
|
|
452
442
|
cacheDir: path.resolve(entry.cacheDir),
|
|
453
443
|
};
|
|
454
444
|
}
|
|
445
|
+
function copyDirectoryContents(sourceDir, destinationDir) {
|
|
446
|
+
for (const entry of fs.readdirSync(sourceDir, { withFileTypes: true })) {
|
|
447
|
+
if (entry.name === ".git")
|
|
448
|
+
continue;
|
|
449
|
+
const src = path.join(sourceDir, entry.name);
|
|
450
|
+
const dest = path.join(destinationDir, entry.name);
|
|
451
|
+
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
|
452
|
+
if (entry.isDirectory()) {
|
|
453
|
+
fs.cpSync(src, dest, { recursive: true, force: true });
|
|
454
|
+
}
|
|
455
|
+
else {
|
|
456
|
+
fs.copyFileSync(src, dest);
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
}
|
|
455
460
|
async function computeFileHash(filePath) {
|
|
456
461
|
const data = fs.readFileSync(filePath);
|
|
457
462
|
const hash = createHash("sha256").update(data).digest("hex");
|