@ryanreh99/skills-sync 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +74 -0
- package/dist/assets/contracts/build/bundle.schema.json +76 -0
- package/dist/assets/contracts/inputs/config.schema.json +13 -0
- package/dist/assets/contracts/inputs/mcp-servers.schema.json +56 -0
- package/dist/assets/contracts/inputs/pack-manifest.schema.json +33 -0
- package/dist/assets/contracts/inputs/pack-sources.schema.json +47 -0
- package/dist/assets/contracts/inputs/profile.schema.json +21 -0
- package/dist/assets/contracts/inputs/upstreams.schema.json +45 -0
- package/dist/assets/contracts/runtime/targets.schema.json +120 -0
- package/dist/assets/contracts/state/upstreams-lock.schema.json +38 -0
- package/dist/assets/manifests/targets.linux.json +27 -0
- package/dist/assets/manifests/targets.macos.json +27 -0
- package/dist/assets/manifests/targets.windows.json +27 -0
- package/dist/assets/seed/config.json +3 -0
- package/dist/assets/seed/packs/personal/mcp/servers.json +20 -0
- package/dist/assets/seed/packs/personal/pack.json +7 -0
- package/dist/assets/seed/packs/personal/sources.json +31 -0
- package/dist/assets/seed/profiles/personal.json +4 -0
- package/dist/assets/seed/upstreams.json +23 -0
- package/dist/cli.js +532 -0
- package/dist/index.js +27 -0
- package/dist/lib/adapters/claude.js +49 -0
- package/dist/lib/adapters/codex.js +239 -0
- package/dist/lib/adapters/common.js +114 -0
- package/dist/lib/adapters/copilot.js +53 -0
- package/dist/lib/adapters/cursor.js +53 -0
- package/dist/lib/adapters/gemini.js +52 -0
- package/dist/lib/agents.js +888 -0
- package/dist/lib/bindings.js +510 -0
- package/dist/lib/build.js +190 -0
- package/dist/lib/bundle.js +165 -0
- package/dist/lib/config.js +324 -0
- package/dist/lib/core.js +447 -0
- package/dist/lib/detect.js +56 -0
- package/dist/lib/doctor.js +504 -0
- package/dist/lib/init.js +292 -0
- package/dist/lib/inventory.js +235 -0
- package/dist/lib/manage.js +463 -0
- package/dist/lib/mcp-config.js +264 -0
- package/dist/lib/profile-transfer.js +221 -0
- package/dist/lib/upstreams.js +782 -0
- package/docs/agent-storage-map.md +153 -0
- package/docs/architecture.md +117 -0
- package/docs/changelog.md +12 -0
- package/docs/commands.md +94 -0
- package/docs/contracts.md +112 -0
- package/docs/homebrew.md +46 -0
- package/docs/quickstart.md +14 -0
- package/docs/roadmap.md +5 -0
- package/docs/security.md +32 -0
- package/docs/user-guide.md +257 -0
- package/package.json +61 -0
|
@@ -0,0 +1,782 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
import fs from "fs-extra";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import readline from "node:readline/promises";
|
|
6
|
+
import {
|
|
7
|
+
CACHE_ROOT,
|
|
8
|
+
LOCKFILE_PATH,
|
|
9
|
+
SCHEMAS,
|
|
10
|
+
UPSTREAMS_CONFIG_PATHS,
|
|
11
|
+
assertJsonFileMatchesSchema,
|
|
12
|
+
assertObjectMatchesSchema,
|
|
13
|
+
extractSkillTitleFromMarkdown,
|
|
14
|
+
normalizeDestPrefix,
|
|
15
|
+
normalizeRepoPath,
|
|
16
|
+
writeJsonFile
|
|
17
|
+
} from "./core.js";
|
|
18
|
+
import { loadPackSources, resolvePack, resolveProfile } from "./config.js";
|
|
19
|
+
|
|
20
|
+
const execFileAsync = promisify(execFile);
|
|
21
|
+
let checkedGitAvailability = false;
|
|
22
|
+
|
|
23
|
+
export async function runGit(args, options = {}) {
|
|
24
|
+
const { cwd = process.cwd(), allowFailure = false } = options;
|
|
25
|
+
try {
|
|
26
|
+
const { stdout } = await execFileAsync("git", args, {
|
|
27
|
+
cwd,
|
|
28
|
+
encoding: "utf8",
|
|
29
|
+
maxBuffer: 1024 * 1024 * 16
|
|
30
|
+
});
|
|
31
|
+
return stdout.trim();
|
|
32
|
+
} catch (error) {
|
|
33
|
+
if (allowFailure) {
|
|
34
|
+
return null;
|
|
35
|
+
}
|
|
36
|
+
const details = [error.message, error.stdout, error.stderr].filter(Boolean).join("\n").trim();
|
|
37
|
+
throw new Error(`git ${args.join(" ")} failed: ${details}`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export async function ensureGitAvailable() {
|
|
42
|
+
if (checkedGitAvailability) {
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
await runGit(["--version"]);
|
|
46
|
+
checkedGitAvailability = true;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export function getUpstreamRepoPath(upstreamId) {
|
|
50
|
+
return path.join(CACHE_ROOT, upstreamId);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export async function ensureUpstreamClone(upstream) {
|
|
54
|
+
await ensureGitAvailable();
|
|
55
|
+
const repoPath = getUpstreamRepoPath(upstream.id);
|
|
56
|
+
const gitDir = path.join(repoPath, ".git");
|
|
57
|
+
if (!(await fs.pathExists(gitDir))) {
|
|
58
|
+
await fs.ensureDir(path.dirname(repoPath));
|
|
59
|
+
await runGit(["clone", "--filter=blob:none", "--no-checkout", upstream.repo, repoPath]);
|
|
60
|
+
}
|
|
61
|
+
return repoPath;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export async function fetchRefAndResolveCommit(repoPath, ref) {
|
|
65
|
+
try {
|
|
66
|
+
await runGit(["fetch", "--prune", "origin", ref], { cwd: repoPath });
|
|
67
|
+
} catch {
|
|
68
|
+
await runGit(["fetch", "--prune", "--force", "origin", ref], { cwd: repoPath });
|
|
69
|
+
}
|
|
70
|
+
return await runGit(["rev-parse", "--verify", "FETCH_HEAD^{commit}"], { cwd: repoPath });
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export async function ensureCommitAvailable(repoPath, commit) {
|
|
74
|
+
let available = await runGit(["cat-file", "-e", `${commit}^{commit}`], {
|
|
75
|
+
cwd: repoPath,
|
|
76
|
+
allowFailure: true
|
|
77
|
+
});
|
|
78
|
+
if (available !== null) {
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
await runGit(["fetch", "--prune", "origin", commit], { cwd: repoPath, allowFailure: true });
|
|
83
|
+
available = await runGit(["cat-file", "-e", `${commit}^{commit}`], {
|
|
84
|
+
cwd: repoPath,
|
|
85
|
+
allowFailure: true
|
|
86
|
+
});
|
|
87
|
+
if (available === null) {
|
|
88
|
+
throw new Error(`Commit '${commit}' is not available in upstream cache '${repoPath}'.`);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
export async function getCommitObjectType(repoPath, commit, repoRelativePath) {
|
|
93
|
+
const gitPath = `${commit}:${repoRelativePath}`;
|
|
94
|
+
const exists = await runGit(["cat-file", "-e", gitPath], { cwd: repoPath, allowFailure: true });
|
|
95
|
+
if (exists === null) {
|
|
96
|
+
return null;
|
|
97
|
+
}
|
|
98
|
+
const type = await runGit(["cat-file", "-t", gitPath], { cwd: repoPath });
|
|
99
|
+
return type;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
export async function checkoutCommit(repoPath, commit, checkoutTracker) {
|
|
103
|
+
const existing = checkoutTracker.get(repoPath);
|
|
104
|
+
if (existing && existing === commit) {
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
await runGit(["checkout", "--force", commit], { cwd: repoPath });
|
|
108
|
+
checkoutTracker.set(repoPath, commit);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
export function getLockKey(upstreamId, ref) {
|
|
112
|
+
return `${upstreamId}::${ref}`;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
export function sortPins(lockDocument) {
|
|
116
|
+
lockDocument.pins.sort((left, right) => {
|
|
117
|
+
const leftKey = `${left.upstream}::${left.ref}`;
|
|
118
|
+
const rightKey = `${right.upstream}::${right.ref}`;
|
|
119
|
+
return leftKey.localeCompare(rightKey);
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
export function findPin(lockDocument, upstreamId, ref) {
|
|
124
|
+
return lockDocument.pins.find((pin) => pin.upstream === upstreamId && pin.ref === ref) ?? null;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
export function setPin(lockDocument, upstreamId, ref, commit) {
|
|
128
|
+
const existing = findPin(lockDocument, upstreamId, ref);
|
|
129
|
+
if (existing) {
|
|
130
|
+
if (existing.commit === commit) {
|
|
131
|
+
return false;
|
|
132
|
+
}
|
|
133
|
+
existing.commit = commit;
|
|
134
|
+
return true;
|
|
135
|
+
}
|
|
136
|
+
lockDocument.pins.push({
|
|
137
|
+
upstream: upstreamId,
|
|
138
|
+
ref,
|
|
139
|
+
commit
|
|
140
|
+
});
|
|
141
|
+
return true;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
export async function loadUpstreamsConfig() {
|
|
145
|
+
const selectedPath = (await fs.pathExists(UPSTREAMS_CONFIG_PATHS.local))
|
|
146
|
+
? UPSTREAMS_CONFIG_PATHS.local
|
|
147
|
+
: UPSTREAMS_CONFIG_PATHS.seed;
|
|
148
|
+
|
|
149
|
+
if (!(await fs.pathExists(selectedPath))) {
|
|
150
|
+
throw new Error("No upstream configuration found.");
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
const config = await assertJsonFileMatchesSchema(selectedPath, SCHEMAS.upstreams);
|
|
154
|
+
const byId = new Map();
|
|
155
|
+
for (const upstream of config.upstreams) {
|
|
156
|
+
if (byId.has(upstream.id)) {
|
|
157
|
+
throw new Error(`Duplicate upstream id '${upstream.id}'.`);
|
|
158
|
+
}
|
|
159
|
+
byId.set(upstream.id, upstream);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
return {
|
|
163
|
+
path: selectedPath,
|
|
164
|
+
config,
|
|
165
|
+
byId
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
export async function loadLockfile() {
|
|
170
|
+
if (!(await fs.pathExists(LOCKFILE_PATH))) {
|
|
171
|
+
return {
|
|
172
|
+
path: LOCKFILE_PATH,
|
|
173
|
+
exists: false,
|
|
174
|
+
changed: false,
|
|
175
|
+
lock: { pins: [] }
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
const lock = await assertJsonFileMatchesSchema(LOCKFILE_PATH, SCHEMAS.upstreamsLock);
|
|
180
|
+
sortPins(lock);
|
|
181
|
+
return {
|
|
182
|
+
path: LOCKFILE_PATH,
|
|
183
|
+
exists: true,
|
|
184
|
+
changed: false,
|
|
185
|
+
lock
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
export async function saveLockfile(lockState) {
|
|
190
|
+
sortPins(lockState.lock);
|
|
191
|
+
await writeJsonFile(lockState.path, lockState.lock);
|
|
192
|
+
lockState.exists = true;
|
|
193
|
+
lockState.changed = false;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
export function dedupeReferences(references) {
|
|
197
|
+
const map = new Map();
|
|
198
|
+
for (const reference of references) {
|
|
199
|
+
map.set(getLockKey(reference.upstreamId, reference.ref), reference);
|
|
200
|
+
}
|
|
201
|
+
return Array.from(map.values()).sort((left, right) => {
|
|
202
|
+
const leftKey = `${left.upstreamId}::${left.ref}`;
|
|
203
|
+
const rightKey = `${right.upstreamId}::${right.ref}`;
|
|
204
|
+
return leftKey.localeCompare(rightKey);
|
|
205
|
+
});
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
function assertImportPathIsNarrow(rawPath, normalizedPath, importEntry, importIndex, pathIndex) {
|
|
209
|
+
const label = `imports[${importIndex}].paths[${pathIndex}]`;
|
|
210
|
+
if (typeof rawPath !== "string" || rawPath.trim().length === 0 || rawPath.trim() === ".") {
|
|
211
|
+
throw new Error(`${label} must not be empty or '.'.`);
|
|
212
|
+
}
|
|
213
|
+
if (normalizedPath === "*") {
|
|
214
|
+
throw new Error(`${label} must not be '*' at repository root.`);
|
|
215
|
+
}
|
|
216
|
+
if (normalizedPath === "skills" && importEntry.allowWholeSkillsTree !== true) {
|
|
217
|
+
throw new Error(
|
|
218
|
+
`${label} is 'skills', which imports the entire skills tree. ` +
|
|
219
|
+
"Set allowWholeSkillsTree=true in this import to allow it explicitly."
|
|
220
|
+
);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
export function collectSourcePlanning(sources, upstreamById) {
|
|
225
|
+
const references = [];
|
|
226
|
+
const skillImports = [];
|
|
227
|
+
|
|
228
|
+
sources.imports.forEach((entry, importIndex) => {
|
|
229
|
+
const upstream = upstreamById.get(entry.upstream);
|
|
230
|
+
if (!upstream) {
|
|
231
|
+
throw new Error(`Unknown upstream '${entry.upstream}' in imports[${importIndex}].`);
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
const effectiveRef = entry.ref || upstream.defaultRef;
|
|
235
|
+
references.push({
|
|
236
|
+
upstreamId: upstream.id,
|
|
237
|
+
ref: effectiveRef
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
if (!Array.isArray(entry.paths) || entry.paths.length === 0) {
|
|
241
|
+
throw new Error(`imports[${importIndex}] must contain one or more paths.`);
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
const destPrefix = normalizeDestPrefix(entry.destPrefix, upstream.id, `imports[${importIndex}]`);
|
|
245
|
+
entry.paths.forEach((rawPath, pathIndex) => {
|
|
246
|
+
const repoPath = normalizeRepoPath(rawPath, `imports[${importIndex}].paths[${pathIndex}]`);
|
|
247
|
+
assertImportPathIsNarrow(rawPath, repoPath, entry, importIndex, pathIndex);
|
|
248
|
+
const skillName = path.posix.basename(repoPath);
|
|
249
|
+
skillImports.push({
|
|
250
|
+
upstreamId: upstream.id,
|
|
251
|
+
ref: effectiveRef,
|
|
252
|
+
repoPath,
|
|
253
|
+
destRelative: path.posix.join(destPrefix, skillName),
|
|
254
|
+
label: `${upstream.id}:${repoPath}@${effectiveRef}`
|
|
255
|
+
});
|
|
256
|
+
});
|
|
257
|
+
});
|
|
258
|
+
|
|
259
|
+
return {
|
|
260
|
+
references: dedupeReferences(references),
|
|
261
|
+
skillImports
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
export async function resolveReferenceCandidatesForSkillLookup({ upstreamId, ref, profileName }) {
|
|
266
|
+
const upstreams = await loadUpstreamsConfig();
|
|
267
|
+
const lockState = await loadLockfile();
|
|
268
|
+
let referenceCandidates = [];
|
|
269
|
+
|
|
270
|
+
if (profileName) {
|
|
271
|
+
const { profile } = await resolveProfile(profileName);
|
|
272
|
+
const packRoot = await resolvePack(profile);
|
|
273
|
+
const { sources } = await loadPackSources(packRoot);
|
|
274
|
+
const planning = collectSourcePlanning(sources, upstreams.byId);
|
|
275
|
+
referenceCandidates = [...planning.references];
|
|
276
|
+
} else if (upstreamId) {
|
|
277
|
+
const upstream = upstreams.byId.get(upstreamId);
|
|
278
|
+
if (!upstream) {
|
|
279
|
+
throw new Error(`Unknown upstream '${upstreamId}'.`);
|
|
280
|
+
}
|
|
281
|
+
referenceCandidates = [
|
|
282
|
+
{
|
|
283
|
+
upstreamId: upstream.id,
|
|
284
|
+
ref: ref || upstream.defaultRef
|
|
285
|
+
}
|
|
286
|
+
];
|
|
287
|
+
} else {
|
|
288
|
+
referenceCandidates = upstreams.config.upstreams.map((upstream) => ({
|
|
289
|
+
upstreamId: upstream.id,
|
|
290
|
+
ref: upstream.defaultRef
|
|
291
|
+
}));
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
if (upstreamId) {
|
|
295
|
+
referenceCandidates = referenceCandidates.filter((item) => item.upstreamId === upstreamId);
|
|
296
|
+
}
|
|
297
|
+
if (ref) {
|
|
298
|
+
referenceCandidates = referenceCandidates.filter((item) => item.ref === ref);
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
if (referenceCandidates.length === 0) {
|
|
302
|
+
throw new Error("No matching upstream/ref found for the provided filters.");
|
|
303
|
+
}
|
|
304
|
+
return {
|
|
305
|
+
upstreams,
|
|
306
|
+
lockState,
|
|
307
|
+
references: referenceCandidates
|
|
308
|
+
};
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
async function resolveReferenceSetForSkillLookup(filters) {
|
|
312
|
+
const { upstreams, lockState, references } = await resolveReferenceCandidatesForSkillLookup(filters);
|
|
313
|
+
const resolved = await resolveReferences({
|
|
314
|
+
references,
|
|
315
|
+
upstreamById: upstreams.byId,
|
|
316
|
+
lockState,
|
|
317
|
+
preferPinned: true,
|
|
318
|
+
requirePinned: false,
|
|
319
|
+
updatePins: false,
|
|
320
|
+
allowLockUpdate: false
|
|
321
|
+
});
|
|
322
|
+
|
|
323
|
+
return references.map((reference) => {
|
|
324
|
+
const resolvedItem = resolved.get(getLockKey(reference.upstreamId, reference.ref));
|
|
325
|
+
return {
|
|
326
|
+
upstreamId: reference.upstreamId,
|
|
327
|
+
ref: reference.ref,
|
|
328
|
+
commit: resolvedItem.commit,
|
|
329
|
+
repoPath: resolvedItem.repoPath
|
|
330
|
+
};
|
|
331
|
+
});
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
export async function discoverUpstreamSkills(repoPath, commit, { verbose = false } = {}) {
|
|
335
|
+
const listing = await runGit(["ls-tree", "-r", "--name-only", commit, "--", "skills"], {
|
|
336
|
+
cwd: repoPath
|
|
337
|
+
});
|
|
338
|
+
const skillMdFiles = listing
|
|
339
|
+
.split(/\r?\n/)
|
|
340
|
+
.map((line) => line.trim())
|
|
341
|
+
.filter((line) => line.length > 0 && line.startsWith("skills/") && line.endsWith("/SKILL.md"));
|
|
342
|
+
|
|
343
|
+
const skillEntries = skillMdFiles.map((skillMdPath) => {
|
|
344
|
+
const skillPath = path.posix.dirname(skillMdPath);
|
|
345
|
+
return {
|
|
346
|
+
path: skillPath,
|
|
347
|
+
basename: path.posix.basename(skillPath),
|
|
348
|
+
skillMdPath
|
|
349
|
+
};
|
|
350
|
+
});
|
|
351
|
+
skillEntries.sort((left, right) => left.path.localeCompare(right.path));
|
|
352
|
+
|
|
353
|
+
if (!verbose) {
|
|
354
|
+
return skillEntries.map(({ path: entryPath, basename }) => ({
|
|
355
|
+
path: entryPath,
|
|
356
|
+
basename
|
|
357
|
+
}));
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
const skills = [];
|
|
361
|
+
for (const entry of skillEntries) {
|
|
362
|
+
const markdown = await runGit(["show", `${commit}:${entry.skillMdPath}`], {
|
|
363
|
+
cwd: repoPath
|
|
364
|
+
});
|
|
365
|
+
const title = extractSkillTitleFromMarkdown(markdown, entry.basename);
|
|
366
|
+
skills.push({
|
|
367
|
+
path: entry.path,
|
|
368
|
+
basename: entry.basename,
|
|
369
|
+
title
|
|
370
|
+
});
|
|
371
|
+
}
|
|
372
|
+
return skills;
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
export async function discoverUpstreamMcpServers(repoPath, commit) {
|
|
376
|
+
const listing = await runGit(["ls-tree", "-r", "--name-only", commit], {
|
|
377
|
+
cwd: repoPath
|
|
378
|
+
});
|
|
379
|
+
const manifestPaths = listing
|
|
380
|
+
.split(/\r?\n/)
|
|
381
|
+
.map((line) => line.trim())
|
|
382
|
+
.filter(
|
|
383
|
+
(line) =>
|
|
384
|
+
line.length > 0 && (line === "mcp/servers.json" || line.endsWith("/mcp/servers.json"))
|
|
385
|
+
)
|
|
386
|
+
.sort((left, right) => left.localeCompare(right));
|
|
387
|
+
|
|
388
|
+
const warnings = [];
|
|
389
|
+
const servers = [];
|
|
390
|
+
|
|
391
|
+
for (const manifestPath of manifestPaths) {
|
|
392
|
+
let manifest;
|
|
393
|
+
try {
|
|
394
|
+
const raw = await runGit(["show", `${commit}:${manifestPath}`], {
|
|
395
|
+
cwd: repoPath
|
|
396
|
+
});
|
|
397
|
+
manifest = JSON.parse(raw);
|
|
398
|
+
await assertObjectMatchesSchema(
|
|
399
|
+
manifest,
|
|
400
|
+
SCHEMAS.mcpServers,
|
|
401
|
+
"upstream MCP manifest"
|
|
402
|
+
);
|
|
403
|
+
} catch (error) {
|
|
404
|
+
warnings.push(error.message);
|
|
405
|
+
continue;
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
for (const [name, server] of Object.entries(manifest.servers ?? {})) {
|
|
409
|
+
const env = {};
|
|
410
|
+
if (server.env && typeof server.env === "object" && !Array.isArray(server.env)) {
|
|
411
|
+
for (const key of Object.keys(server.env).sort((left, right) => left.localeCompare(right))) {
|
|
412
|
+
if (key.length === 0) {
|
|
413
|
+
continue;
|
|
414
|
+
}
|
|
415
|
+
env[key] = String(server.env[key]);
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
servers.push({
|
|
419
|
+
sourcePath: manifestPath,
|
|
420
|
+
name,
|
|
421
|
+
command: server.command,
|
|
422
|
+
args: Array.isArray(server.args) ? server.args : [],
|
|
423
|
+
env
|
|
424
|
+
});
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
servers.sort((left, right) => {
|
|
429
|
+
const leftKey = `${left.name}::${left.sourcePath}`;
|
|
430
|
+
const rightKey = `${right.name}::${right.sourcePath}`;
|
|
431
|
+
return leftKey.localeCompare(rightKey);
|
|
432
|
+
});
|
|
433
|
+
|
|
434
|
+
return {
|
|
435
|
+
manifestPaths,
|
|
436
|
+
servers,
|
|
437
|
+
warnings
|
|
438
|
+
};
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
export async function cmdListSkills({ upstream, ref, profile, format, verbose = false }) {
|
|
442
|
+
const resolvedSet = await resolveReferenceSetForSkillLookup({
|
|
443
|
+
upstreamId: upstream,
|
|
444
|
+
ref,
|
|
445
|
+
profileName: profile
|
|
446
|
+
});
|
|
447
|
+
const payloadItems = [];
|
|
448
|
+
for (const resolved of resolvedSet) {
|
|
449
|
+
const skills = await discoverUpstreamSkills(resolved.repoPath, resolved.commit, { verbose });
|
|
450
|
+
payloadItems.push({
|
|
451
|
+
upstream: resolved.upstreamId,
|
|
452
|
+
ref: resolved.ref,
|
|
453
|
+
commit: resolved.commit,
|
|
454
|
+
skills: skills.map((skill) => ({
|
|
455
|
+
path: skill.path,
|
|
456
|
+
basename: skill.basename,
|
|
457
|
+
...(verbose ? { title: skill.title } : {})
|
|
458
|
+
}))
|
|
459
|
+
});
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
if (format === "json") {
|
|
463
|
+
const payload = payloadItems.length === 1 ? payloadItems[0] : { results: payloadItems };
|
|
464
|
+
process.stdout.write(`${JSON.stringify(payload, null, 2)}\n`);
|
|
465
|
+
return;
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
if (payloadItems.length === 1) {
|
|
469
|
+
for (const skill of payloadItems[0].skills) {
|
|
470
|
+
if (verbose) {
|
|
471
|
+
process.stdout.write(`${skill.path}\t${skill.title}\n`);
|
|
472
|
+
} else {
|
|
473
|
+
process.stdout.write(`${skill.path}\n`);
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
return;
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
for (const item of payloadItems) {
|
|
480
|
+
process.stdout.write(`${item.upstream}@${item.ref} (${item.commit.slice(0, 12)})\n`);
|
|
481
|
+
if (item.skills.length === 0) {
|
|
482
|
+
process.stdout.write(" (no skills found)\n\n");
|
|
483
|
+
continue;
|
|
484
|
+
}
|
|
485
|
+
for (const skill of item.skills) {
|
|
486
|
+
if (verbose) {
|
|
487
|
+
process.stdout.write(` ${skill.path}\t${skill.title}\n`);
|
|
488
|
+
} else {
|
|
489
|
+
process.stdout.write(` ${skill.path}\n`);
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
process.stdout.write("\n");
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
export async function cmdListUpstreamContent({
|
|
497
|
+
upstream,
|
|
498
|
+
ref,
|
|
499
|
+
profile,
|
|
500
|
+
format,
|
|
501
|
+
verbose = false
|
|
502
|
+
}) {
|
|
503
|
+
const resolvedSet = await resolveReferenceSetForSkillLookup({
|
|
504
|
+
upstreamId: upstream,
|
|
505
|
+
ref,
|
|
506
|
+
profileName: profile
|
|
507
|
+
});
|
|
508
|
+
|
|
509
|
+
const payloadItems = [];
|
|
510
|
+
for (const resolved of resolvedSet) {
|
|
511
|
+
const skills = await discoverUpstreamSkills(resolved.repoPath, resolved.commit, { verbose });
|
|
512
|
+
const mcp = await discoverUpstreamMcpServers(resolved.repoPath, resolved.commit);
|
|
513
|
+
payloadItems.push({
|
|
514
|
+
upstream: resolved.upstreamId,
|
|
515
|
+
ref: resolved.ref,
|
|
516
|
+
commit: resolved.commit,
|
|
517
|
+
skills: skills.map((skill) => ({
|
|
518
|
+
path: skill.path,
|
|
519
|
+
basename: skill.basename,
|
|
520
|
+
...(verbose ? { title: skill.title } : {})
|
|
521
|
+
})),
|
|
522
|
+
mcpServers: mcp.servers.map((server) => ({
|
|
523
|
+
name: server.name,
|
|
524
|
+
command: server.command,
|
|
525
|
+
args: server.args,
|
|
526
|
+
env: server.env
|
|
527
|
+
})),
|
|
528
|
+
warnings: mcp.warnings
|
|
529
|
+
});
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
if (format === "json") {
|
|
533
|
+
const payload = payloadItems.length === 1 ? payloadItems[0] : { results: payloadItems };
|
|
534
|
+
process.stdout.write(`${JSON.stringify(payload, null, 2)}\n`);
|
|
535
|
+
return;
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
for (let index = 0; index < payloadItems.length; index += 1) {
|
|
539
|
+
const item = payloadItems[index];
|
|
540
|
+
process.stdout.write(`${item.upstream}@${item.ref} (${item.commit.slice(0, 12)})\n`);
|
|
541
|
+
process.stdout.write(`Skills (${item.skills.length})\n`);
|
|
542
|
+
if (item.skills.length === 0) {
|
|
543
|
+
process.stdout.write(" (none)\n");
|
|
544
|
+
} else {
|
|
545
|
+
for (const skill of item.skills) {
|
|
546
|
+
if (verbose) {
|
|
547
|
+
process.stdout.write(` ${skill.path}\t${skill.title}\n`);
|
|
548
|
+
} else {
|
|
549
|
+
process.stdout.write(` ${skill.path}\n`);
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
process.stdout.write(`MCP Servers (${item.mcpServers.length})\n`);
|
|
555
|
+
if (item.mcpServers.length === 0) {
|
|
556
|
+
process.stdout.write(" (none found in upstream manifests)\n");
|
|
557
|
+
} else {
|
|
558
|
+
for (const server of item.mcpServers) {
|
|
559
|
+
const argsPart = server.args.length > 0 ? ` ${server.args.join(" ")}` : "";
|
|
560
|
+
const envKeys = Object.keys(server.env ?? {});
|
|
561
|
+
const envPart = envKeys.length > 0 ? ` [env:${envKeys.join(",")}]` : "";
|
|
562
|
+
process.stdout.write(` ${server.name}\t${server.command}${argsPart}${envPart}\n`);
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
if (item.warnings.length > 0) {
|
|
567
|
+
process.stdout.write("Warnings\n");
|
|
568
|
+
for (const warning of item.warnings) {
|
|
569
|
+
process.stdout.write(` ${warning}\n`);
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
if (index < payloadItems.length - 1) {
|
|
574
|
+
process.stdout.write("\n");
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
|
|
579
|
+
export async function cmdListUpstreams({ format }) {
|
|
580
|
+
const upstreams = await loadUpstreamsConfig();
|
|
581
|
+
const items = [...upstreams.config.upstreams]
|
|
582
|
+
.sort((left, right) => left.id.localeCompare(right.id))
|
|
583
|
+
.map((item) => ({
|
|
584
|
+
id: item.id,
|
|
585
|
+
type: item.type,
|
|
586
|
+
repo: item.repo,
|
|
587
|
+
defaultRef: item.defaultRef
|
|
588
|
+
}));
|
|
589
|
+
|
|
590
|
+
if (format === "json") {
|
|
591
|
+
process.stdout.write(`${JSON.stringify({ upstreams: items }, null, 2)}\n`);
|
|
592
|
+
return;
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
for (const item of items) {
|
|
596
|
+
process.stdout.write(`${item.id}\t${item.defaultRef}\t${item.repo}\n`);
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
async function collectSearchResults({ upstream, ref, profile, query, verbose = false }) {
|
|
601
|
+
const normalizedQuery = query.trim().toLowerCase();
|
|
602
|
+
const resolvedSet = await resolveReferenceSetForSkillLookup({
|
|
603
|
+
upstreamId: upstream,
|
|
604
|
+
ref,
|
|
605
|
+
profileName: profile
|
|
606
|
+
});
|
|
607
|
+
|
|
608
|
+
const allResults = [];
|
|
609
|
+
for (const resolvedItem of resolvedSet) {
|
|
610
|
+
const skills = await discoverUpstreamSkills(resolvedItem.repoPath, resolvedItem.commit, { verbose });
|
|
611
|
+
const matching = skills.filter(
|
|
612
|
+
(skill) =>
|
|
613
|
+
skill.path.toLowerCase().includes(normalizedQuery) ||
|
|
614
|
+
(verbose && typeof skill.title === "string" && skill.title.toLowerCase().includes(normalizedQuery))
|
|
615
|
+
);
|
|
616
|
+
for (const skill of matching) {
|
|
617
|
+
allResults.push({
|
|
618
|
+
upstream: resolvedItem.upstreamId,
|
|
619
|
+
ref: resolvedItem.ref,
|
|
620
|
+
commit: resolvedItem.commit,
|
|
621
|
+
path: skill.path,
|
|
622
|
+
basename: skill.basename,
|
|
623
|
+
...(verbose ? { title: skill.title } : {})
|
|
624
|
+
});
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
|
|
628
|
+
allResults.sort((left, right) => {
|
|
629
|
+
const leftKey = `${left.upstream}::${left.path}::${left.title ?? ""}`;
|
|
630
|
+
const rightKey = `${right.upstream}::${right.path}::${right.title ?? ""}`;
|
|
631
|
+
return leftKey.localeCompare(rightKey);
|
|
632
|
+
});
|
|
633
|
+
return allResults;
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
async function cmdSearchSkillsInteractive({ upstream, ref, profile, verbose = false }) {
|
|
637
|
+
const rl = readline.createInterface({
|
|
638
|
+
input: process.stdin,
|
|
639
|
+
output: process.stdout
|
|
640
|
+
});
|
|
641
|
+
|
|
642
|
+
process.stdout.write("Interactive skill search. Enter a query (blank to exit).\n");
|
|
643
|
+
try {
|
|
644
|
+
while (true) {
|
|
645
|
+
let rawQuery;
|
|
646
|
+
try {
|
|
647
|
+
rawQuery = await rl.question("search> ");
|
|
648
|
+
} catch (error) {
|
|
649
|
+
if (String(error?.message || "").toLowerCase().includes("readline was closed")) {
|
|
650
|
+
break;
|
|
651
|
+
}
|
|
652
|
+
throw error;
|
|
653
|
+
}
|
|
654
|
+
const query = rawQuery.trim();
|
|
655
|
+
if (!query) {
|
|
656
|
+
break;
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
const results = await collectSearchResults({
|
|
660
|
+
upstream,
|
|
661
|
+
ref,
|
|
662
|
+
profile,
|
|
663
|
+
query,
|
|
664
|
+
verbose
|
|
665
|
+
});
|
|
666
|
+
if (results.length === 0) {
|
|
667
|
+
process.stdout.write(`No skills matched "${query}".\n\n`);
|
|
668
|
+
continue;
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
process.stdout.write(`Found ${results.length} match(es):\n`);
|
|
672
|
+
for (const result of results) {
|
|
673
|
+
if (verbose) {
|
|
674
|
+
process.stdout.write(`${result.upstream} ${result.path}\t${result.title}\n`);
|
|
675
|
+
} else {
|
|
676
|
+
process.stdout.write(`${result.upstream} ${result.path}\n`);
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
process.stdout.write("\n");
|
|
680
|
+
}
|
|
681
|
+
} finally {
|
|
682
|
+
rl.close();
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
export async function cmdSearchSkills({ upstream, ref, profile, query, format, interactive, verbose = false }) {
|
|
687
|
+
if (interactive) {
|
|
688
|
+
await cmdSearchSkillsInteractive({ upstream, ref, profile, verbose });
|
|
689
|
+
return;
|
|
690
|
+
}
|
|
691
|
+
|
|
692
|
+
if (!query || query.trim().length === 0) {
|
|
693
|
+
throw new Error("--query is required in non-interactive mode. Use --interactive for prompt mode.");
|
|
694
|
+
}
|
|
695
|
+
const allResults = await collectSearchResults({ upstream, ref, profile, query, verbose });
|
|
696
|
+
|
|
697
|
+
if (format === "json") {
|
|
698
|
+
process.stdout.write(`${JSON.stringify(allResults, null, 2)}\n`);
|
|
699
|
+
return;
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
if (allResults.length === 0) {
|
|
703
|
+
process.stdout.write(`No skills matched "${query}".\n`);
|
|
704
|
+
return;
|
|
705
|
+
}
|
|
706
|
+
|
|
707
|
+
for (const result of allResults) {
|
|
708
|
+
if (verbose) {
|
|
709
|
+
process.stdout.write(`${result.upstream} ${result.path}\t${result.title}\n`);
|
|
710
|
+
} else {
|
|
711
|
+
process.stdout.write(`${result.upstream} ${result.path}\n`);
|
|
712
|
+
}
|
|
713
|
+
}
|
|
714
|
+
}
|
|
715
|
+
|
|
716
|
+
export async function resolveReferences({
|
|
717
|
+
references,
|
|
718
|
+
upstreamById,
|
|
719
|
+
lockState,
|
|
720
|
+
preferPinned,
|
|
721
|
+
requirePinned,
|
|
722
|
+
updatePins,
|
|
723
|
+
allowLockUpdate
|
|
724
|
+
}) {
|
|
725
|
+
const resolved = new Map();
|
|
726
|
+
|
|
727
|
+
for (const reference of references) {
|
|
728
|
+
const key = getLockKey(reference.upstreamId, reference.ref);
|
|
729
|
+
const upstream = upstreamById.get(reference.upstreamId);
|
|
730
|
+
if (!upstream) {
|
|
731
|
+
throw new Error(`Unknown upstream '${reference.upstreamId}'.`);
|
|
732
|
+
}
|
|
733
|
+
|
|
734
|
+
const repoPath = await ensureUpstreamClone(upstream);
|
|
735
|
+
const pin = findPin(lockState.lock, reference.upstreamId, reference.ref);
|
|
736
|
+
let commit = null;
|
|
737
|
+
|
|
738
|
+
if (preferPinned && pin) {
|
|
739
|
+
commit = pin.commit;
|
|
740
|
+
await ensureCommitAvailable(repoPath, commit);
|
|
741
|
+
} else if (requirePinned) {
|
|
742
|
+
throw new Error(
|
|
743
|
+
`Missing lock pin for upstream '${reference.upstreamId}' ref '${reference.ref}'. ` +
|
|
744
|
+
"Run build --lock=write or build --lock=refresh."
|
|
745
|
+
);
|
|
746
|
+
} else {
|
|
747
|
+
commit = await fetchRefAndResolveCommit(repoPath, reference.ref);
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
if ((updatePins || (!pin && allowLockUpdate)) && allowLockUpdate) {
|
|
751
|
+
if (setPin(lockState.lock, reference.upstreamId, reference.ref, commit)) {
|
|
752
|
+
lockState.changed = true;
|
|
753
|
+
}
|
|
754
|
+
}
|
|
755
|
+
|
|
756
|
+
resolved.set(key, {
|
|
757
|
+
upstream,
|
|
758
|
+
ref: reference.ref,
|
|
759
|
+
commit,
|
|
760
|
+
repoPath,
|
|
761
|
+
pinUsed: Boolean(pin && preferPinned)
|
|
762
|
+
});
|
|
763
|
+
}
|
|
764
|
+
|
|
765
|
+
return resolved;
|
|
766
|
+
}
|
|
767
|
+
|
|
768
|
+
export async function validateAllLockPins(lockState, upstreamById, errors) {
|
|
769
|
+
for (const pin of lockState.lock.pins) {
|
|
770
|
+
const upstream = upstreamById.get(pin.upstream);
|
|
771
|
+
if (!upstream) {
|
|
772
|
+
errors.push(`Lock pin references unknown upstream '${pin.upstream}'.`);
|
|
773
|
+
continue;
|
|
774
|
+
}
|
|
775
|
+
try {
|
|
776
|
+
const repoPath = await ensureUpstreamClone(upstream);
|
|
777
|
+
await ensureCommitAvailable(repoPath, pin.commit);
|
|
778
|
+
} catch (error) {
|
|
779
|
+
errors.push(`Invalid lock pin ${pin.upstream}@${pin.ref} -> ${pin.commit}: ${error.message}`);
|
|
780
|
+
}
|
|
781
|
+
}
|
|
782
|
+
}
|