@omnidev-ai/core 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +889 -0
- package/dist/index.js +2173 -0
- package/dist/test-utils/index.d.ts +142 -0
- package/dist/test-utils/index.js +261 -0
- package/package.json +16 -6
- package/src/capability/AGENTS.md +0 -58
- package/src/capability/commands.test.ts +0 -410
- package/src/capability/commands.ts +0 -72
- package/src/capability/docs.test.ts +0 -192
- package/src/capability/docs.ts +0 -48
- package/src/capability/index.ts +0 -20
- package/src/capability/loader.test.ts +0 -668
- package/src/capability/loader.ts +0 -431
- package/src/capability/registry.test.ts +0 -455
- package/src/capability/registry.ts +0 -55
- package/src/capability/rules.test.ts +0 -135
- package/src/capability/rules.ts +0 -135
- package/src/capability/skills.test.ts +0 -312
- package/src/capability/skills.ts +0 -58
- package/src/capability/sources.test.ts +0 -439
- package/src/capability/sources.ts +0 -998
- package/src/capability/subagents.test.ts +0 -474
- package/src/capability/subagents.ts +0 -105
- package/src/capability/wrapping-integration.test.ts +0 -412
- package/src/capability/yaml-parser.ts +0 -81
- package/src/config/AGENTS.md +0 -46
- package/src/config/capabilities.ts +0 -54
- package/src/config/env.test.ts +0 -270
- package/src/config/env.ts +0 -96
- package/src/config/index.ts +0 -6
- package/src/config/loader.test.ts +0 -198
- package/src/config/loader.ts +0 -207
- package/src/config/parser.test.ts +0 -256
- package/src/config/parser.ts +0 -55
- package/src/config/profiles.test.ts +0 -222
- package/src/config/profiles.ts +0 -75
- package/src/config/provider.test.ts +0 -66
- package/src/config/provider.ts +0 -55
- package/src/debug.ts +0 -20
- package/src/index.test.ts +0 -26
- package/src/index.ts +0 -37
- package/src/mcp-json/index.ts +0 -1
- package/src/mcp-json/manager.test.ts +0 -310
- package/src/mcp-json/manager.ts +0 -106
- package/src/state/active-profile.test.ts +0 -117
- package/src/state/active-profile.ts +0 -41
- package/src/state/index.ts +0 -3
- package/src/state/manifest.test.ts +0 -411
- package/src/state/manifest.ts +0 -137
- package/src/state/providers.test.ts +0 -125
- package/src/state/providers.ts +0 -69
- package/src/sync.ts +0 -288
- package/src/templates/agents.test.ts +0 -23
- package/src/templates/agents.ts +0 -14
- package/src/templates/claude.test.ts +0 -48
- package/src/templates/claude.ts +0 -57
- package/src/test-utils/helpers.test.ts +0 -214
- package/src/test-utils/helpers.ts +0 -284
- package/src/test-utils/index.ts +0 -34
- package/src/test-utils/mocks.test.ts +0 -83
- package/src/test-utils/mocks.ts +0 -101
- package/src/types/capability-export.ts +0 -157
- package/src/types/index.test.ts +0 -28
- package/src/types/index.ts +0 -314
|
@@ -1,998 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Git-sourced capabilities: fetching, wrapping, and version management
|
|
3
|
-
*
|
|
4
|
-
* This module handles:
|
|
5
|
-
* - Cloning/fetching capabilities from Git repositories
|
|
6
|
-
* - Wrapping external repos (discovering skills/agents/commands)
|
|
7
|
-
* - Managing the capabilities.lock.toml file
|
|
8
|
-
* - Version tracking and update detection
|
|
9
|
-
*/
|
|
10
|
-
|
|
11
|
-
import { existsSync } from "node:fs";
|
|
12
|
-
import { cp, mkdir, readdir, readFile, rm, stat, writeFile } from "node:fs/promises";
|
|
13
|
-
import { join } from "node:path";
|
|
14
|
-
import { parse as parseToml } from "smol-toml";
|
|
15
|
-
import type {
|
|
16
|
-
CapabilitiesLockFile,
|
|
17
|
-
CapabilityLockEntry,
|
|
18
|
-
CapabilitySourceConfig,
|
|
19
|
-
GitCapabilitySourceConfig,
|
|
20
|
-
OmniConfig,
|
|
21
|
-
} from "../types/index.js";
|
|
22
|
-
|
|
23
|
-
// Local path for .omni directory
|
|
24
|
-
const OMNI_LOCAL = ".omni";
|
|
25
|
-
|
|
26
|
-
// Directory names to scan for content (singular and plural forms)
|
|
27
|
-
const SKILL_DIRS = ["skills", "skill"];
|
|
28
|
-
const AGENT_DIRS = ["agents", "agent", "subagents", "subagent"];
|
|
29
|
-
const COMMAND_DIRS = ["commands", "command"];
|
|
30
|
-
const RULE_DIRS = ["rules", "rule"];
|
|
31
|
-
const DOC_DIRS = ["docs", "doc", "documentation"];
|
|
32
|
-
|
|
33
|
-
// File patterns for each content type
|
|
34
|
-
const SKILL_FILES = ["SKILL.md", "skill.md", "Skill.md"];
|
|
35
|
-
const AGENT_FILES = ["AGENT.md", "agent.md", "Agent.md", "SUBAGENT.md", "subagent.md"];
|
|
36
|
-
const COMMAND_FILES = ["COMMAND.md", "command.md", "Command.md"];
|
|
37
|
-
|
|
38
|
-
export interface FetchResult {
|
|
39
|
-
id: string;
|
|
40
|
-
path: string;
|
|
41
|
-
version: string;
|
|
42
|
-
/** Git commit hash */
|
|
43
|
-
commit?: string;
|
|
44
|
-
updated: boolean;
|
|
45
|
-
wrapped: boolean;
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
export interface SourceUpdateInfo {
|
|
49
|
-
id: string;
|
|
50
|
-
source: string;
|
|
51
|
-
currentVersion: string;
|
|
52
|
-
latestVersion: string;
|
|
53
|
-
hasUpdate: boolean;
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
/**
|
|
57
|
-
* Check if a source string is a git source
|
|
58
|
-
*/
|
|
59
|
-
export function isGitSource(source: string): boolean {
|
|
60
|
-
return (
|
|
61
|
-
source.startsWith("github:") ||
|
|
62
|
-
source.startsWith("git@") ||
|
|
63
|
-
source.startsWith("https://") ||
|
|
64
|
-
source.startsWith("http://")
|
|
65
|
-
);
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
/**
|
|
69
|
-
* Parse a capability source string or config into normalized form
|
|
70
|
-
* Returns a GitCapabilitySourceConfig
|
|
71
|
-
*/
|
|
72
|
-
export function parseSourceConfig(source: CapabilitySourceConfig): GitCapabilitySourceConfig {
|
|
73
|
-
if (typeof source === "string") {
|
|
74
|
-
// Git source shorthand formats:
|
|
75
|
-
// - "github:user/repo"
|
|
76
|
-
// - "github:user/repo#ref"
|
|
77
|
-
// - "git@github.com:user/repo.git"
|
|
78
|
-
// - "https://github.com/user/repo.git"
|
|
79
|
-
|
|
80
|
-
let sourceUrl = source;
|
|
81
|
-
let ref: string | undefined;
|
|
82
|
-
|
|
83
|
-
// Check for ref in github shorthand
|
|
84
|
-
if (source.startsWith("github:") && source.includes("#")) {
|
|
85
|
-
const parts = source.split("#");
|
|
86
|
-
sourceUrl = parts[0] ?? source;
|
|
87
|
-
ref = parts[1];
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
const result: GitCapabilitySourceConfig = { source: sourceUrl };
|
|
91
|
-
if (ref) {
|
|
92
|
-
result.ref = ref;
|
|
93
|
-
}
|
|
94
|
-
return result;
|
|
95
|
-
}
|
|
96
|
-
return source;
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
/**
|
|
100
|
-
* Convert source to a git-cloneable URL
|
|
101
|
-
*/
|
|
102
|
-
export function sourceToGitUrl(source: string): string {
|
|
103
|
-
if (source.startsWith("github:")) {
|
|
104
|
-
const repo = source.replace("github:", "");
|
|
105
|
-
return `https://github.com/${repo}.git`;
|
|
106
|
-
}
|
|
107
|
-
// Already a URL or SSH path
|
|
108
|
-
return source;
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
/**
|
|
112
|
-
* Get the path where a capability source will be stored
|
|
113
|
-
*/
|
|
114
|
-
export function getSourceCapabilityPath(id: string): string {
|
|
115
|
-
return join(OMNI_LOCAL, "capabilities", id);
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
/**
|
|
119
|
-
* Get the lock file path
|
|
120
|
-
*/
|
|
121
|
-
export function getLockFilePath(): string {
|
|
122
|
-
return "omni.lock.toml";
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
/**
|
|
126
|
-
* Load the capabilities lock file
|
|
127
|
-
*/
|
|
128
|
-
export async function loadLockFile(): Promise<CapabilitiesLockFile> {
|
|
129
|
-
const lockPath = getLockFilePath();
|
|
130
|
-
if (!existsSync(lockPath)) {
|
|
131
|
-
return { capabilities: {} };
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
try {
|
|
135
|
-
const content = await readFile(lockPath, "utf-8");
|
|
136
|
-
const parsed = parseToml(content) as Record<string, unknown>;
|
|
137
|
-
const capabilities = parsed["capabilities"] as Record<string, CapabilityLockEntry> | undefined;
|
|
138
|
-
return {
|
|
139
|
-
capabilities: capabilities || {},
|
|
140
|
-
};
|
|
141
|
-
} catch {
|
|
142
|
-
return { capabilities: {} };
|
|
143
|
-
}
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
/**
|
|
147
|
-
* Stringify a lock file to TOML format
|
|
148
|
-
*/
|
|
149
|
-
function stringifyLockFile(lockFile: CapabilitiesLockFile): string {
|
|
150
|
-
const lines: string[] = [];
|
|
151
|
-
|
|
152
|
-
for (const [id, entry] of Object.entries(lockFile.capabilities)) {
|
|
153
|
-
lines.push(`[capabilities.${id}]`);
|
|
154
|
-
lines.push(`source = "${entry.source}"`);
|
|
155
|
-
lines.push(`version = "${entry.version}"`);
|
|
156
|
-
if (entry.commit) {
|
|
157
|
-
lines.push(`commit = "${entry.commit}"`);
|
|
158
|
-
}
|
|
159
|
-
if (entry.ref) {
|
|
160
|
-
lines.push(`ref = "${entry.ref}"`);
|
|
161
|
-
}
|
|
162
|
-
lines.push(`updated_at = "${entry.updated_at}"`);
|
|
163
|
-
lines.push("");
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
-
return lines.join("\n");
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
/**
|
|
170
|
-
* Save the capabilities lock file
|
|
171
|
-
*/
|
|
172
|
-
export async function saveLockFile(lockFile: CapabilitiesLockFile): Promise<void> {
|
|
173
|
-
const lockPath = getLockFilePath();
|
|
174
|
-
|
|
175
|
-
// Ensure directory exists
|
|
176
|
-
await mkdir(join(OMNI_LOCAL, "capabilities"), { recursive: true });
|
|
177
|
-
|
|
178
|
-
const header = `# Auto-generated by OmniDev - DO NOT EDIT
|
|
179
|
-
# Records installed capability versions for reproducibility
|
|
180
|
-
# Last updated: ${new Date().toISOString()}
|
|
181
|
-
|
|
182
|
-
`;
|
|
183
|
-
const content = header + stringifyLockFile(lockFile);
|
|
184
|
-
await writeFile(lockPath, content, "utf-8");
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
/**
|
|
188
|
-
* Get the current commit hash of a git repository
|
|
189
|
-
*/
|
|
190
|
-
async function getRepoCommit(repoPath: string): Promise<string> {
|
|
191
|
-
const proc = Bun.spawn(["git", "rev-parse", "HEAD"], {
|
|
192
|
-
cwd: repoPath,
|
|
193
|
-
stdout: "pipe",
|
|
194
|
-
stderr: "pipe",
|
|
195
|
-
});
|
|
196
|
-
const output = await new Response(proc.stdout).text();
|
|
197
|
-
await proc.exited;
|
|
198
|
-
return output.trim();
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
/**
|
|
202
|
-
* Get short commit hash (7 chars)
|
|
203
|
-
*/
|
|
204
|
-
function shortCommit(commit: string): string {
|
|
205
|
-
return commit.substring(0, 7);
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
/**
|
|
209
|
-
* Clone a git repository
|
|
210
|
-
*/
|
|
211
|
-
async function cloneRepo(gitUrl: string, targetPath: string, ref?: string): Promise<void> {
|
|
212
|
-
// Ensure parent directory exists
|
|
213
|
-
await mkdir(join(targetPath, ".."), { recursive: true });
|
|
214
|
-
|
|
215
|
-
const args = ["clone", "--depth", "1"];
|
|
216
|
-
if (ref) {
|
|
217
|
-
args.push("--branch", ref);
|
|
218
|
-
}
|
|
219
|
-
args.push(gitUrl, targetPath);
|
|
220
|
-
|
|
221
|
-
const proc = Bun.spawn(["git", ...args], {
|
|
222
|
-
stdout: "pipe",
|
|
223
|
-
stderr: "pipe",
|
|
224
|
-
});
|
|
225
|
-
|
|
226
|
-
await proc.exited;
|
|
227
|
-
|
|
228
|
-
if (proc.exitCode !== 0) {
|
|
229
|
-
const stderr = await new Response(proc.stderr).text();
|
|
230
|
-
throw new Error(`Failed to clone ${gitUrl}: ${stderr}`);
|
|
231
|
-
}
|
|
232
|
-
}
|
|
233
|
-
|
|
234
|
-
/**
|
|
235
|
-
* Fetch and update an existing repository
|
|
236
|
-
*/
|
|
237
|
-
async function fetchRepo(repoPath: string, ref?: string): Promise<boolean> {
|
|
238
|
-
// Fetch latest
|
|
239
|
-
const fetchProc = Bun.spawn(["git", "fetch", "--depth", "1", "origin"], {
|
|
240
|
-
cwd: repoPath,
|
|
241
|
-
stdout: "pipe",
|
|
242
|
-
stderr: "pipe",
|
|
243
|
-
});
|
|
244
|
-
await fetchProc.exited;
|
|
245
|
-
|
|
246
|
-
// Get current and remote commits
|
|
247
|
-
const currentCommit = await getRepoCommit(repoPath);
|
|
248
|
-
|
|
249
|
-
// Check remote commit
|
|
250
|
-
const targetRef = ref || "HEAD";
|
|
251
|
-
const lsProc = Bun.spawn(["git", "ls-remote", "origin", targetRef], {
|
|
252
|
-
cwd: repoPath,
|
|
253
|
-
stdout: "pipe",
|
|
254
|
-
stderr: "pipe",
|
|
255
|
-
});
|
|
256
|
-
const lsOutput = await new Response(lsProc.stdout).text();
|
|
257
|
-
await lsProc.exited;
|
|
258
|
-
|
|
259
|
-
const remoteCommit = lsOutput.split("\t")[0];
|
|
260
|
-
|
|
261
|
-
if (currentCommit === remoteCommit) {
|
|
262
|
-
return false; // No update
|
|
263
|
-
}
|
|
264
|
-
|
|
265
|
-
// Pull changes
|
|
266
|
-
const pullProc = Bun.spawn(["git", "pull", "--ff-only"], {
|
|
267
|
-
cwd: repoPath,
|
|
268
|
-
stdout: "pipe",
|
|
269
|
-
stderr: "pipe",
|
|
270
|
-
});
|
|
271
|
-
await pullProc.exited;
|
|
272
|
-
|
|
273
|
-
return true; // Updated
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
/**
|
|
277
|
-
* Check if a directory contains a capability.toml
|
|
278
|
-
*/
|
|
279
|
-
function hasCapabilityToml(dirPath: string): boolean {
|
|
280
|
-
return existsSync(join(dirPath, "capability.toml"));
|
|
281
|
-
}
|
|
282
|
-
|
|
283
|
-
/**
|
|
284
|
-
* Check if a directory should be wrapped (has plugin.json or appropriate structure)
|
|
285
|
-
* Returns true if:
|
|
286
|
-
* 1. .claude-plugin/plugin.json exists, OR
|
|
287
|
-
* 2. Any of the expected content directories exist (skills, agents, commands, rules, docs)
|
|
288
|
-
*/
|
|
289
|
-
async function shouldWrapDirectory(dirPath: string): Promise<boolean> {
|
|
290
|
-
// Check for plugin.json
|
|
291
|
-
if (existsSync(join(dirPath, ".claude-plugin", "plugin.json"))) {
|
|
292
|
-
return true;
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
// Check for any expected content directories
|
|
296
|
-
const allDirs = [...SKILL_DIRS, ...AGENT_DIRS, ...COMMAND_DIRS, ...RULE_DIRS, ...DOC_DIRS];
|
|
297
|
-
for (const dirName of allDirs) {
|
|
298
|
-
const checkPath = join(dirPath, dirName);
|
|
299
|
-
if (existsSync(checkPath)) {
|
|
300
|
-
const stats = await stat(checkPath);
|
|
301
|
-
if (stats.isDirectory()) {
|
|
302
|
-
return true;
|
|
303
|
-
}
|
|
304
|
-
}
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
return false;
|
|
308
|
-
}
|
|
309
|
-
|
|
310
|
-
/**
|
|
311
|
-
* Find directories matching any of the given names
|
|
312
|
-
*/
|
|
313
|
-
async function findMatchingDirs(basePath: string, names: string[]): Promise<string | null> {
|
|
314
|
-
for (const name of names) {
|
|
315
|
-
const dirPath = join(basePath, name);
|
|
316
|
-
if (existsSync(dirPath)) {
|
|
317
|
-
const stats = await stat(dirPath);
|
|
318
|
-
if (stats.isDirectory()) {
|
|
319
|
-
return dirPath;
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
|
-
}
|
|
323
|
-
return null;
|
|
324
|
-
}
|
|
325
|
-
|
|
326
|
-
/**
|
|
327
|
-
* Find content files in a directory (skills, agents, commands)
|
|
328
|
-
*/
|
|
329
|
-
async function findContentItems(
|
|
330
|
-
dirPath: string,
|
|
331
|
-
filePatterns: string[],
|
|
332
|
-
): Promise<Array<{ name: string; path: string; isFolder: boolean }>> {
|
|
333
|
-
const items: Array<{ name: string; path: string; isFolder: boolean }> = [];
|
|
334
|
-
|
|
335
|
-
if (!existsSync(dirPath)) {
|
|
336
|
-
return items;
|
|
337
|
-
}
|
|
338
|
-
|
|
339
|
-
const entries = (await readdir(dirPath, { withFileTypes: true })).sort((a, b) =>
|
|
340
|
-
a.name.localeCompare(b.name),
|
|
341
|
-
);
|
|
342
|
-
|
|
343
|
-
for (const entry of entries) {
|
|
344
|
-
const entryPath = join(dirPath, entry.name);
|
|
345
|
-
|
|
346
|
-
if (entry.isDirectory()) {
|
|
347
|
-
// Check for content file inside directory
|
|
348
|
-
for (const pattern of filePatterns) {
|
|
349
|
-
if (existsSync(join(entryPath, pattern))) {
|
|
350
|
-
items.push({
|
|
351
|
-
name: entry.name,
|
|
352
|
-
path: entryPath,
|
|
353
|
-
isFolder: true,
|
|
354
|
-
});
|
|
355
|
-
break;
|
|
356
|
-
}
|
|
357
|
-
}
|
|
358
|
-
} else if (entry.isFile() && entry.name.endsWith(".md")) {
|
|
359
|
-
// Single file content (e.g., agents/researcher.md)
|
|
360
|
-
const name = entry.name.replace(/\.md$/i, "");
|
|
361
|
-
items.push({
|
|
362
|
-
name,
|
|
363
|
-
path: entryPath,
|
|
364
|
-
isFolder: false,
|
|
365
|
-
});
|
|
366
|
-
}
|
|
367
|
-
}
|
|
368
|
-
|
|
369
|
-
return items;
|
|
370
|
-
}
|
|
371
|
-
|
|
372
|
-
/**
|
|
373
|
-
* Plugin metadata from .claude-plugin/plugin.json
|
|
374
|
-
*/
|
|
375
|
-
export interface PluginMetadata {
|
|
376
|
-
name?: string;
|
|
377
|
-
version?: string;
|
|
378
|
-
description?: string;
|
|
379
|
-
author?: {
|
|
380
|
-
name?: string;
|
|
381
|
-
email?: string;
|
|
382
|
-
};
|
|
383
|
-
}
|
|
384
|
-
|
|
385
|
-
/**
|
|
386
|
-
* Parse .claude-plugin/plugin.json if it exists
|
|
387
|
-
*/
|
|
388
|
-
async function parsePluginJson(dirPath: string): Promise<PluginMetadata | null> {
|
|
389
|
-
const pluginJsonPath = join(dirPath, ".claude-plugin", "plugin.json");
|
|
390
|
-
if (!existsSync(pluginJsonPath)) {
|
|
391
|
-
return null;
|
|
392
|
-
}
|
|
393
|
-
|
|
394
|
-
try {
|
|
395
|
-
const content = await readFile(pluginJsonPath, "utf-8");
|
|
396
|
-
const data = JSON.parse(content);
|
|
397
|
-
const result: PluginMetadata = {
|
|
398
|
-
name: data.name,
|
|
399
|
-
version: data.version,
|
|
400
|
-
description: data.description,
|
|
401
|
-
};
|
|
402
|
-
if (data.author) {
|
|
403
|
-
result.author = {
|
|
404
|
-
name: data.author.name,
|
|
405
|
-
email: data.author.email,
|
|
406
|
-
};
|
|
407
|
-
}
|
|
408
|
-
return result;
|
|
409
|
-
} catch (error) {
|
|
410
|
-
console.warn(`Failed to parse plugin.json in ${dirPath}:`, error);
|
|
411
|
-
return null;
|
|
412
|
-
}
|
|
413
|
-
}
|
|
414
|
-
|
|
415
|
-
/**
|
|
416
|
-
* Read README.md and extract description
|
|
417
|
-
* Returns the first paragraph or the first 200 characters
|
|
418
|
-
*/
|
|
419
|
-
async function readReadmeDescription(dirPath: string): Promise<string | null> {
|
|
420
|
-
const readmePath = join(dirPath, "README.md");
|
|
421
|
-
if (!existsSync(readmePath)) {
|
|
422
|
-
return null;
|
|
423
|
-
}
|
|
424
|
-
|
|
425
|
-
try {
|
|
426
|
-
const content = await readFile(readmePath, "utf-8");
|
|
427
|
-
// Remove markdown headers and get first non-empty paragraph
|
|
428
|
-
const lines = content.split("\n");
|
|
429
|
-
let description = "";
|
|
430
|
-
let inCodeBlock = false;
|
|
431
|
-
|
|
432
|
-
for (const line of lines) {
|
|
433
|
-
const trimmed = line.trim();
|
|
434
|
-
|
|
435
|
-
// Track code blocks
|
|
436
|
-
if (trimmed.startsWith("```")) {
|
|
437
|
-
inCodeBlock = !inCodeBlock;
|
|
438
|
-
continue;
|
|
439
|
-
}
|
|
440
|
-
|
|
441
|
-
// Skip headers, empty lines, and code blocks
|
|
442
|
-
if (
|
|
443
|
-
inCodeBlock ||
|
|
444
|
-
trimmed.startsWith("#") ||
|
|
445
|
-
trimmed.length === 0 ||
|
|
446
|
-
trimmed.startsWith("![")
|
|
447
|
-
) {
|
|
448
|
-
continue;
|
|
449
|
-
}
|
|
450
|
-
|
|
451
|
-
// Found content
|
|
452
|
-
description += (description ? " " : "") + trimmed;
|
|
453
|
-
|
|
454
|
-
// Stop at first paragraph (200 chars or first blank line after content)
|
|
455
|
-
if (description.length >= 200) {
|
|
456
|
-
break;
|
|
457
|
-
}
|
|
458
|
-
}
|
|
459
|
-
|
|
460
|
-
if (description.length > 200) {
|
|
461
|
-
description = `${description.substring(0, 197)}...`;
|
|
462
|
-
}
|
|
463
|
-
|
|
464
|
-
return description || null;
|
|
465
|
-
} catch (error) {
|
|
466
|
-
console.warn(`Failed to read README.md in ${dirPath}:`, error);
|
|
467
|
-
return null;
|
|
468
|
-
}
|
|
469
|
-
}
|
|
470
|
-
|
|
471
|
-
/**
|
|
472
|
-
* Discover content in a wrapped repository
|
|
473
|
-
*/
|
|
474
|
-
export interface DiscoveredContent {
|
|
475
|
-
skills: Array<{ name: string; path: string; isFolder: boolean }>;
|
|
476
|
-
agents: Array<{ name: string; path: string; isFolder: boolean }>;
|
|
477
|
-
commands: Array<{ name: string; path: string; isFolder: boolean }>;
|
|
478
|
-
rulesDir: string | null;
|
|
479
|
-
docsDir: string | null;
|
|
480
|
-
}
|
|
481
|
-
|
|
482
|
-
async function discoverContent(repoPath: string): Promise<DiscoveredContent> {
|
|
483
|
-
const result: DiscoveredContent = {
|
|
484
|
-
skills: [],
|
|
485
|
-
agents: [],
|
|
486
|
-
commands: [],
|
|
487
|
-
rulesDir: null,
|
|
488
|
-
docsDir: null,
|
|
489
|
-
};
|
|
490
|
-
|
|
491
|
-
// Find skills
|
|
492
|
-
const skillsDir = await findMatchingDirs(repoPath, SKILL_DIRS);
|
|
493
|
-
if (skillsDir) {
|
|
494
|
-
result.skills = await findContentItems(skillsDir, SKILL_FILES);
|
|
495
|
-
}
|
|
496
|
-
|
|
497
|
-
// Find agents
|
|
498
|
-
const agentsDir = await findMatchingDirs(repoPath, AGENT_DIRS);
|
|
499
|
-
if (agentsDir) {
|
|
500
|
-
result.agents = await findContentItems(agentsDir, AGENT_FILES);
|
|
501
|
-
}
|
|
502
|
-
|
|
503
|
-
// Find commands
|
|
504
|
-
const commandsDir = await findMatchingDirs(repoPath, COMMAND_DIRS);
|
|
505
|
-
if (commandsDir) {
|
|
506
|
-
result.commands = await findContentItems(commandsDir, COMMAND_FILES);
|
|
507
|
-
}
|
|
508
|
-
|
|
509
|
-
// Find rules directory
|
|
510
|
-
result.rulesDir = await findMatchingDirs(repoPath, RULE_DIRS);
|
|
511
|
-
|
|
512
|
-
// Find docs directory
|
|
513
|
-
result.docsDir = await findMatchingDirs(repoPath, DOC_DIRS);
|
|
514
|
-
|
|
515
|
-
return result;
|
|
516
|
-
}
|
|
517
|
-
|
|
518
|
-
/**
|
|
519
|
-
* Generate a capability.toml for a wrapped repository
|
|
520
|
-
*/
|
|
521
|
-
async function generateCapabilityToml(
|
|
522
|
-
id: string,
|
|
523
|
-
repoPath: string,
|
|
524
|
-
source: string,
|
|
525
|
-
commit: string,
|
|
526
|
-
content: DiscoveredContent,
|
|
527
|
-
): Promise<void> {
|
|
528
|
-
const shortHash = shortCommit(commit);
|
|
529
|
-
|
|
530
|
-
// Try to get metadata from plugin.json
|
|
531
|
-
const pluginMeta = await parsePluginJson(repoPath);
|
|
532
|
-
|
|
533
|
-
// Try to get description from README
|
|
534
|
-
const readmeDesc = await readReadmeDescription(repoPath);
|
|
535
|
-
|
|
536
|
-
// Build description based on available sources
|
|
537
|
-
let description: string;
|
|
538
|
-
if (pluginMeta?.description) {
|
|
539
|
-
description = pluginMeta.description;
|
|
540
|
-
} else if (readmeDesc) {
|
|
541
|
-
description = readmeDesc;
|
|
542
|
-
} else {
|
|
543
|
-
// Fallback: build from discovered content
|
|
544
|
-
const parts: string[] = [];
|
|
545
|
-
if (content.skills.length > 0) {
|
|
546
|
-
parts.push(`${content.skills.length} skill${content.skills.length > 1 ? "s" : ""}`);
|
|
547
|
-
}
|
|
548
|
-
if (content.agents.length > 0) {
|
|
549
|
-
parts.push(`${content.agents.length} agent${content.agents.length > 1 ? "s" : ""}`);
|
|
550
|
-
}
|
|
551
|
-
if (content.commands.length > 0) {
|
|
552
|
-
parts.push(`${content.commands.length} command${content.commands.length > 1 ? "s" : ""}`);
|
|
553
|
-
}
|
|
554
|
-
description = parts.length > 0 ? `${parts.join(", ")}` : `Wrapped from ${source}`;
|
|
555
|
-
}
|
|
556
|
-
|
|
557
|
-
// Use plugin metadata for name and version if available
|
|
558
|
-
const name = pluginMeta?.name || `${id} (wrapped)`;
|
|
559
|
-
const version = pluginMeta?.version || shortHash;
|
|
560
|
-
|
|
561
|
-
// Extract repository URL for metadata
|
|
562
|
-
const repoUrl = source.startsWith("github:")
|
|
563
|
-
? `https://github.com/${source.replace("github:", "")}`
|
|
564
|
-
: source;
|
|
565
|
-
|
|
566
|
-
// Build TOML content
|
|
567
|
-
let tomlContent = `# Auto-generated by OmniDev - DO NOT EDIT
|
|
568
|
-
# This capability was wrapped from an external repository
|
|
569
|
-
|
|
570
|
-
[capability]
|
|
571
|
-
id = "${id}"
|
|
572
|
-
name = "${name}"
|
|
573
|
-
version = "${version}"
|
|
574
|
-
description = "${description}"
|
|
575
|
-
`;
|
|
576
|
-
|
|
577
|
-
// Add author if available from plugin.json
|
|
578
|
-
if (pluginMeta?.author?.name || pluginMeta?.author?.email) {
|
|
579
|
-
tomlContent += "\n[capability.author]\n";
|
|
580
|
-
if (pluginMeta.author.name) {
|
|
581
|
-
tomlContent += `name = "${pluginMeta.author.name}"\n`;
|
|
582
|
-
}
|
|
583
|
-
if (pluginMeta.author.email) {
|
|
584
|
-
tomlContent += `email = "${pluginMeta.author.email}"\n`;
|
|
585
|
-
}
|
|
586
|
-
}
|
|
587
|
-
|
|
588
|
-
// Add metadata section
|
|
589
|
-
tomlContent += `
|
|
590
|
-
[capability.metadata]
|
|
591
|
-
repository = "${repoUrl}"
|
|
592
|
-
wrapped = true
|
|
593
|
-
commit = "${commit}"
|
|
594
|
-
`;
|
|
595
|
-
|
|
596
|
-
await writeFile(join(repoPath, "capability.toml"), tomlContent, "utf-8");
|
|
597
|
-
}
|
|
598
|
-
|
|
599
|
-
/**
|
|
600
|
-
* Fetch a git-sourced capability
|
|
601
|
-
*/
|
|
602
|
-
async function fetchGitCapabilitySource(
|
|
603
|
-
id: string,
|
|
604
|
-
config: GitCapabilitySourceConfig,
|
|
605
|
-
options?: { silent?: boolean },
|
|
606
|
-
): Promise<FetchResult> {
|
|
607
|
-
const gitUrl = sourceToGitUrl(config.source);
|
|
608
|
-
const targetPath = getSourceCapabilityPath(id);
|
|
609
|
-
|
|
610
|
-
let updated = false;
|
|
611
|
-
let commit: string;
|
|
612
|
-
let repoPath: string;
|
|
613
|
-
|
|
614
|
-
// If path is specified, clone to temp location first
|
|
615
|
-
if (config.path) {
|
|
616
|
-
const tempPath = join(OMNI_LOCAL, "_temp", `${id}-repo`);
|
|
617
|
-
|
|
618
|
-
// Check if already cloned to temp
|
|
619
|
-
if (existsSync(join(tempPath, ".git"))) {
|
|
620
|
-
if (!options?.silent) {
|
|
621
|
-
console.log(` Checking ${id}...`);
|
|
622
|
-
}
|
|
623
|
-
updated = await fetchRepo(tempPath, config.ref);
|
|
624
|
-
commit = await getRepoCommit(tempPath);
|
|
625
|
-
} else {
|
|
626
|
-
if (!options?.silent) {
|
|
627
|
-
console.log(` Cloning ${id} from ${config.source}...`);
|
|
628
|
-
}
|
|
629
|
-
await mkdir(join(tempPath, ".."), { recursive: true });
|
|
630
|
-
await cloneRepo(gitUrl, tempPath, config.ref);
|
|
631
|
-
commit = await getRepoCommit(tempPath);
|
|
632
|
-
updated = true;
|
|
633
|
-
}
|
|
634
|
-
|
|
635
|
-
// Copy subdirectory to target
|
|
636
|
-
const sourcePath = join(tempPath, config.path);
|
|
637
|
-
if (!existsSync(sourcePath)) {
|
|
638
|
-
throw new Error(`Path not found in repository: ${config.path}`);
|
|
639
|
-
}
|
|
640
|
-
|
|
641
|
-
// Remove old target and copy new content
|
|
642
|
-
if (existsSync(targetPath)) {
|
|
643
|
-
await rm(targetPath, { recursive: true });
|
|
644
|
-
}
|
|
645
|
-
await mkdir(join(targetPath, ".."), { recursive: true });
|
|
646
|
-
await cp(sourcePath, targetPath, { recursive: true });
|
|
647
|
-
|
|
648
|
-
repoPath = targetPath;
|
|
649
|
-
} else {
|
|
650
|
-
// Clone directly to target (no subdirectory)
|
|
651
|
-
if (existsSync(join(targetPath, ".git"))) {
|
|
652
|
-
if (!options?.silent) {
|
|
653
|
-
console.log(` Checking ${id}...`);
|
|
654
|
-
}
|
|
655
|
-
updated = await fetchRepo(targetPath, config.ref);
|
|
656
|
-
commit = await getRepoCommit(targetPath);
|
|
657
|
-
} else {
|
|
658
|
-
if (!options?.silent) {
|
|
659
|
-
console.log(` Cloning ${id} from ${config.source}...`);
|
|
660
|
-
}
|
|
661
|
-
await cloneRepo(gitUrl, targetPath, config.ref);
|
|
662
|
-
commit = await getRepoCommit(targetPath);
|
|
663
|
-
updated = true;
|
|
664
|
-
}
|
|
665
|
-
|
|
666
|
-
repoPath = targetPath;
|
|
667
|
-
}
|
|
668
|
-
|
|
669
|
-
// Auto-detect if we need to wrap
|
|
670
|
-
let needsWrap = false;
|
|
671
|
-
if (!hasCapabilityToml(repoPath)) {
|
|
672
|
-
needsWrap = await shouldWrapDirectory(repoPath);
|
|
673
|
-
}
|
|
674
|
-
|
|
675
|
-
if (needsWrap) {
|
|
676
|
-
// Discover content and generate capability.toml
|
|
677
|
-
const content = await discoverContent(repoPath);
|
|
678
|
-
await generateCapabilityToml(id, repoPath, config.source, commit, content);
|
|
679
|
-
|
|
680
|
-
if (!options?.silent) {
|
|
681
|
-
const parts: string[] = [];
|
|
682
|
-
if (content.skills.length > 0) parts.push(`${content.skills.length} skills`);
|
|
683
|
-
if (content.agents.length > 0) parts.push(`${content.agents.length} agents`);
|
|
684
|
-
if (content.commands.length > 0) parts.push(`${content.commands.length} commands`);
|
|
685
|
-
if (parts.length > 0) {
|
|
686
|
-
console.log(` Wrapped: ${parts.join(", ")}`);
|
|
687
|
-
}
|
|
688
|
-
}
|
|
689
|
-
}
|
|
690
|
-
|
|
691
|
-
// Get version from capability.toml or package.json
|
|
692
|
-
let version = shortCommit(commit);
|
|
693
|
-
const pkgJsonPath = join(repoPath, "package.json");
|
|
694
|
-
if (existsSync(pkgJsonPath)) {
|
|
695
|
-
try {
|
|
696
|
-
const pkgJson = JSON.parse(await readFile(pkgJsonPath, "utf-8"));
|
|
697
|
-
if (pkgJson.version) {
|
|
698
|
-
version = pkgJson.version;
|
|
699
|
-
}
|
|
700
|
-
} catch {
|
|
701
|
-
// Ignore parse errors
|
|
702
|
-
}
|
|
703
|
-
}
|
|
704
|
-
|
|
705
|
-
return {
|
|
706
|
-
id,
|
|
707
|
-
path: targetPath,
|
|
708
|
-
version,
|
|
709
|
-
commit,
|
|
710
|
-
updated,
|
|
711
|
-
wrapped: needsWrap,
|
|
712
|
-
};
|
|
713
|
-
}
|
|
714
|
-
|
|
715
|
-
/**
|
|
716
|
-
* Fetch a single capability source from git
|
|
717
|
-
*/
|
|
718
|
-
export async function fetchCapabilitySource(
|
|
719
|
-
id: string,
|
|
720
|
-
sourceConfig: CapabilitySourceConfig,
|
|
721
|
-
options?: { silent?: boolean },
|
|
722
|
-
): Promise<FetchResult> {
|
|
723
|
-
const config = parseSourceConfig(sourceConfig);
|
|
724
|
-
return fetchGitCapabilitySource(id, config, options);
|
|
725
|
-
}
|
|
726
|
-
|
|
727
|
-
/**
|
|
728
|
-
* Generate capability.toml content for an MCP server definition
|
|
729
|
-
*/
|
|
730
|
-
function generateMcpCapabilityTomlContent(
|
|
731
|
-
id: string,
|
|
732
|
-
mcpConfig: import("../types/index.js").McpConfig,
|
|
733
|
-
): string {
|
|
734
|
-
let tomlContent = `# Auto-generated by OmniDev from omni.toml [mcps] section - DO NOT EDIT
|
|
735
|
-
|
|
736
|
-
[capability]
|
|
737
|
-
id = "${id}"
|
|
738
|
-
name = "${id} (MCP)"
|
|
739
|
-
version = "1.0.0"
|
|
740
|
-
description = "MCP server defined in omni.toml"
|
|
741
|
-
|
|
742
|
-
[capability.metadata]
|
|
743
|
-
wrapped = true
|
|
744
|
-
generated_from_omni_toml = true
|
|
745
|
-
|
|
746
|
-
[mcp]
|
|
747
|
-
command = "${mcpConfig.command}"
|
|
748
|
-
`;
|
|
749
|
-
|
|
750
|
-
if (mcpConfig.args && mcpConfig.args.length > 0) {
|
|
751
|
-
tomlContent += `args = ${JSON.stringify(mcpConfig.args)}\n`;
|
|
752
|
-
}
|
|
753
|
-
|
|
754
|
-
if (mcpConfig.transport) {
|
|
755
|
-
tomlContent += `transport = "${mcpConfig.transport}"\n`;
|
|
756
|
-
}
|
|
757
|
-
|
|
758
|
-
if (mcpConfig.cwd) {
|
|
759
|
-
tomlContent += `cwd = "${mcpConfig.cwd}"\n`;
|
|
760
|
-
}
|
|
761
|
-
|
|
762
|
-
if (mcpConfig.env && Object.keys(mcpConfig.env).length > 0) {
|
|
763
|
-
tomlContent += `\n[mcp.env]\n`;
|
|
764
|
-
for (const [key, value] of Object.entries(mcpConfig.env)) {
|
|
765
|
-
tomlContent += `${key} = "${value}"\n`;
|
|
766
|
-
}
|
|
767
|
-
}
|
|
768
|
-
|
|
769
|
-
return tomlContent;
|
|
770
|
-
}
|
|
771
|
-
|
|
772
|
-
/**
|
|
773
|
-
* Generate synthetic capability for an MCP server definition
|
|
774
|
-
*/
|
|
775
|
-
async function generateMcpCapabilityToml(
|
|
776
|
-
id: string,
|
|
777
|
-
mcpConfig: import("../types/index.js").McpConfig,
|
|
778
|
-
targetPath: string,
|
|
779
|
-
): Promise<void> {
|
|
780
|
-
const tomlContent = generateMcpCapabilityTomlContent(id, mcpConfig);
|
|
781
|
-
await writeFile(join(targetPath, "capability.toml"), tomlContent, "utf-8");
|
|
782
|
-
}
|
|
783
|
-
|
|
784
|
-
/**
|
|
785
|
-
* Check if a capability directory was generated from omni.toml [mcps] section
|
|
786
|
-
*/
|
|
787
|
-
async function isGeneratedMcpCapability(capabilityDir: string): Promise<boolean> {
|
|
788
|
-
const tomlPath = join(capabilityDir, "capability.toml");
|
|
789
|
-
if (!existsSync(tomlPath)) {
|
|
790
|
-
console.warn("no capability.toml found in", capabilityDir);
|
|
791
|
-
return false;
|
|
792
|
-
}
|
|
793
|
-
|
|
794
|
-
try {
|
|
795
|
-
const content = await readFile(tomlPath, "utf-8");
|
|
796
|
-
const parsed = parseToml(content) as Record<string, unknown>;
|
|
797
|
-
const capability = parsed["capability"] as Record<string, unknown> | undefined;
|
|
798
|
-
const metadata = capability?.["metadata"] as Record<string, unknown> | undefined;
|
|
799
|
-
return metadata?.["generated_from_omni_toml"] === true;
|
|
800
|
-
} catch {
|
|
801
|
-
return false;
|
|
802
|
-
}
|
|
803
|
-
}
|
|
804
|
-
|
|
805
|
-
/**
|
|
806
|
-
* Clean up stale MCP capabilities that are no longer in config
|
|
807
|
-
*/
|
|
808
|
-
async function cleanupStaleMcpCapabilities(currentMcpIds: Set<string>): Promise<void> {
|
|
809
|
-
const capabilitiesDir = join(OMNI_LOCAL, "capabilities");
|
|
810
|
-
if (!existsSync(capabilitiesDir)) {
|
|
811
|
-
return;
|
|
812
|
-
}
|
|
813
|
-
|
|
814
|
-
const entries = await readdir(capabilitiesDir, { withFileTypes: true });
|
|
815
|
-
|
|
816
|
-
for (const entry of entries) {
|
|
817
|
-
if (entry.isDirectory()) {
|
|
818
|
-
const capDir = join(capabilitiesDir, entry.name);
|
|
819
|
-
const isGenerated = await isGeneratedMcpCapability(capDir);
|
|
820
|
-
if (isGenerated && !currentMcpIds.has(entry.name)) {
|
|
821
|
-
// This MCP capability is no longer in omni.toml, remove it
|
|
822
|
-
await rm(capDir, { recursive: true });
|
|
823
|
-
}
|
|
824
|
-
}
|
|
825
|
-
}
|
|
826
|
-
}
|
|
827
|
-
|
|
828
|
-
/**
|
|
829
|
-
* Generate synthetic capabilities for MCP definitions in omni.toml
|
|
830
|
-
*/
|
|
831
|
-
export async function generateMcpCapabilities(config: OmniConfig): Promise<void> {
|
|
832
|
-
if (!config.mcps || Object.keys(config.mcps).length === 0) {
|
|
833
|
-
// Clean up all MCP capabilities if mcps section is empty
|
|
834
|
-
await cleanupStaleMcpCapabilities(new Set());
|
|
835
|
-
return;
|
|
836
|
-
}
|
|
837
|
-
|
|
838
|
-
const mcpCapabilitiesDir = join(OMNI_LOCAL, "capabilities");
|
|
839
|
-
const currentMcpIds = new Set<string>();
|
|
840
|
-
|
|
841
|
-
for (const [id, mcpConfig] of Object.entries(config.mcps)) {
|
|
842
|
-
const targetPath = join(mcpCapabilitiesDir, id);
|
|
843
|
-
currentMcpIds.add(id);
|
|
844
|
-
|
|
845
|
-
// Create directory
|
|
846
|
-
await mkdir(targetPath, { recursive: true });
|
|
847
|
-
|
|
848
|
-
// Generate capability.toml
|
|
849
|
-
await generateMcpCapabilityToml(id, mcpConfig, targetPath);
|
|
850
|
-
}
|
|
851
|
-
|
|
852
|
-
// Cleanup stale MCP capabilities
|
|
853
|
-
await cleanupStaleMcpCapabilities(currentMcpIds);
|
|
854
|
-
}
|
|
855
|
-
|
|
856
|
-
/**
|
|
857
|
-
* Fetch all capability sources from config
|
|
858
|
-
*/
|
|
859
|
-
export async function fetchAllCapabilitySources(
|
|
860
|
-
config: OmniConfig,
|
|
861
|
-
options?: { silent?: boolean; force?: boolean },
|
|
862
|
-
): Promise<FetchResult[]> {
|
|
863
|
-
// Generate MCP capabilities FIRST
|
|
864
|
-
await generateMcpCapabilities(config);
|
|
865
|
-
|
|
866
|
-
const sources = config.capabilities?.sources;
|
|
867
|
-
if (!sources || Object.keys(sources).length === 0) {
|
|
868
|
-
return [];
|
|
869
|
-
}
|
|
870
|
-
|
|
871
|
-
if (!options?.silent) {
|
|
872
|
-
console.log("Fetching capability sources...");
|
|
873
|
-
}
|
|
874
|
-
|
|
875
|
-
const results: FetchResult[] = [];
|
|
876
|
-
const lockFile = await loadLockFile();
|
|
877
|
-
let lockUpdated = false;
|
|
878
|
-
|
|
879
|
-
for (const [id, source] of Object.entries(sources)) {
|
|
880
|
-
try {
|
|
881
|
-
const result = await fetchCapabilitySource(id, source, options);
|
|
882
|
-
results.push(result);
|
|
883
|
-
|
|
884
|
-
// Build lock entry based on source type
|
|
885
|
-
const lockEntry: CapabilityLockEntry = {
|
|
886
|
-
source: typeof source === "string" ? source : source.source,
|
|
887
|
-
version: result.version,
|
|
888
|
-
updated_at: new Date().toISOString(),
|
|
889
|
-
};
|
|
890
|
-
|
|
891
|
-
// Git source: use commit and ref
|
|
892
|
-
const gitConfig = parseSourceConfig(source);
|
|
893
|
-
if (result.commit) {
|
|
894
|
-
lockEntry.commit = result.commit;
|
|
895
|
-
}
|
|
896
|
-
if (gitConfig.ref) {
|
|
897
|
-
lockEntry.ref = gitConfig.ref;
|
|
898
|
-
}
|
|
899
|
-
|
|
900
|
-
// Check if lock entry changed
|
|
901
|
-
const existing = lockFile.capabilities[id];
|
|
902
|
-
const hasChanged = !existing || existing.commit !== result.commit;
|
|
903
|
-
|
|
904
|
-
if (hasChanged) {
|
|
905
|
-
lockFile.capabilities[id] = lockEntry;
|
|
906
|
-
lockUpdated = true;
|
|
907
|
-
|
|
908
|
-
if (!options?.silent && result.updated) {
|
|
909
|
-
const oldVersion = existing?.version || "new";
|
|
910
|
-
console.log(` ${result.wrapped ? "+" : "~"} ${id}: ${oldVersion} -> ${result.version}`);
|
|
911
|
-
}
|
|
912
|
-
}
|
|
913
|
-
} catch (error) {
|
|
914
|
-
console.error(` Failed to fetch ${id}: ${error}`);
|
|
915
|
-
}
|
|
916
|
-
}
|
|
917
|
-
|
|
918
|
-
// Save lock file if changed
|
|
919
|
-
if (lockUpdated) {
|
|
920
|
-
await saveLockFile(lockFile);
|
|
921
|
-
}
|
|
922
|
-
|
|
923
|
-
if (!options?.silent && results.length > 0) {
|
|
924
|
-
const updated = results.filter((r) => r.updated).length;
|
|
925
|
-
if (updated > 0) {
|
|
926
|
-
console.log(` Updated ${updated} capability source(s)`);
|
|
927
|
-
} else {
|
|
928
|
-
console.log(` All ${results.length} source(s) up to date`);
|
|
929
|
-
}
|
|
930
|
-
}
|
|
931
|
-
|
|
932
|
-
return results;
|
|
933
|
-
}
|
|
934
|
-
|
|
935
|
-
/**
|
|
936
|
-
* Check for available updates without applying them
|
|
937
|
-
*/
|
|
938
|
-
export async function checkForUpdates(config: OmniConfig): Promise<SourceUpdateInfo[]> {
|
|
939
|
-
const sources = config.capabilities?.sources;
|
|
940
|
-
if (!sources || Object.keys(sources).length === 0) {
|
|
941
|
-
return [];
|
|
942
|
-
}
|
|
943
|
-
|
|
944
|
-
const lockFile = await loadLockFile();
|
|
945
|
-
const updates: SourceUpdateInfo[] = [];
|
|
946
|
-
|
|
947
|
-
for (const [id, source] of Object.entries(sources)) {
|
|
948
|
-
const sourceConfig = parseSourceConfig(source);
|
|
949
|
-
const targetPath = getSourceCapabilityPath(id);
|
|
950
|
-
const existing = lockFile.capabilities[id];
|
|
951
|
-
|
|
952
|
-
// Handle git sources
|
|
953
|
-
const gitConfig = sourceConfig;
|
|
954
|
-
|
|
955
|
-
if (!existsSync(join(targetPath, ".git"))) {
|
|
956
|
-
// Not yet cloned
|
|
957
|
-
updates.push({
|
|
958
|
-
id,
|
|
959
|
-
source: gitConfig.source,
|
|
960
|
-
currentVersion: "not installed",
|
|
961
|
-
latestVersion: "unknown",
|
|
962
|
-
hasUpdate: true,
|
|
963
|
-
});
|
|
964
|
-
continue;
|
|
965
|
-
}
|
|
966
|
-
|
|
967
|
-
// Fetch to check for updates (without pulling)
|
|
968
|
-
const fetchProc = Bun.spawn(["git", "fetch", "--depth", "1", "origin"], {
|
|
969
|
-
cwd: targetPath,
|
|
970
|
-
stdout: "pipe",
|
|
971
|
-
stderr: "pipe",
|
|
972
|
-
});
|
|
973
|
-
await fetchProc.exited;
|
|
974
|
-
|
|
975
|
-
// Get remote commit
|
|
976
|
-
const targetRef = gitConfig.ref || "HEAD";
|
|
977
|
-
const lsProc = Bun.spawn(["git", "ls-remote", "origin", targetRef], {
|
|
978
|
-
cwd: targetPath,
|
|
979
|
-
stdout: "pipe",
|
|
980
|
-
stderr: "pipe",
|
|
981
|
-
});
|
|
982
|
-
const lsOutput = await new Response(lsProc.stdout).text();
|
|
983
|
-
await lsProc.exited;
|
|
984
|
-
|
|
985
|
-
const remoteCommit = lsOutput.split("\t")[0] || "";
|
|
986
|
-
const currentCommit = existing?.commit || "";
|
|
987
|
-
|
|
988
|
-
updates.push({
|
|
989
|
-
id,
|
|
990
|
-
source: gitConfig.source,
|
|
991
|
-
currentVersion: existing?.version || (currentCommit ? shortCommit(currentCommit) : "unknown"),
|
|
992
|
-
latestVersion: remoteCommit ? shortCommit(remoteCommit) : "unknown",
|
|
993
|
-
hasUpdate: currentCommit !== remoteCommit,
|
|
994
|
-
});
|
|
995
|
-
}
|
|
996
|
-
|
|
997
|
-
return updates;
|
|
998
|
-
}
|