@omnidev-ai/core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +31 -0
- package/src/capability/AGENTS.md +58 -0
- package/src/capability/commands.test.ts +414 -0
- package/src/capability/commands.ts +70 -0
- package/src/capability/docs.test.ts +199 -0
- package/src/capability/docs.ts +46 -0
- package/src/capability/index.ts +20 -0
- package/src/capability/loader.test.ts +815 -0
- package/src/capability/loader.ts +492 -0
- package/src/capability/registry.test.ts +473 -0
- package/src/capability/registry.ts +55 -0
- package/src/capability/rules.test.ts +145 -0
- package/src/capability/rules.ts +133 -0
- package/src/capability/skills.test.ts +316 -0
- package/src/capability/skills.ts +56 -0
- package/src/capability/sources.test.ts +338 -0
- package/src/capability/sources.ts +966 -0
- package/src/capability/subagents.test.ts +478 -0
- package/src/capability/subagents.ts +103 -0
- package/src/capability/yaml-parser.ts +81 -0
- package/src/config/AGENTS.md +46 -0
- package/src/config/capabilities.ts +82 -0
- package/src/config/env.test.ts +286 -0
- package/src/config/env.ts +96 -0
- package/src/config/index.ts +6 -0
- package/src/config/loader.test.ts +282 -0
- package/src/config/loader.ts +137 -0
- package/src/config/parser.test.ts +281 -0
- package/src/config/parser.ts +55 -0
- package/src/config/profiles.test.ts +259 -0
- package/src/config/profiles.ts +75 -0
- package/src/config/provider.test.ts +79 -0
- package/src/config/provider.ts +55 -0
- package/src/debug.ts +20 -0
- package/src/gitignore/manager.test.ts +219 -0
- package/src/gitignore/manager.ts +167 -0
- package/src/index.test.ts +26 -0
- package/src/index.ts +39 -0
- package/src/mcp-json/index.ts +1 -0
- package/src/mcp-json/manager.test.ts +415 -0
- package/src/mcp-json/manager.ts +118 -0
- package/src/state/active-profile.test.ts +131 -0
- package/src/state/active-profile.ts +41 -0
- package/src/state/index.ts +2 -0
- package/src/state/manifest.test.ts +548 -0
- package/src/state/manifest.ts +164 -0
- package/src/sync.ts +213 -0
- package/src/templates/agents.test.ts +23 -0
- package/src/templates/agents.ts +14 -0
- package/src/templates/claude.test.ts +48 -0
- package/src/templates/claude.ts +122 -0
- package/src/test-utils/helpers.test.ts +196 -0
- package/src/test-utils/helpers.ts +187 -0
- package/src/test-utils/index.ts +30 -0
- package/src/test-utils/mocks.test.ts +83 -0
- package/src/test-utils/mocks.ts +101 -0
- package/src/types/capability-export.ts +234 -0
- package/src/types/index.test.ts +28 -0
- package/src/types/index.ts +270 -0
|
@@ -0,0 +1,966 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Git-sourced capabilities: fetching, wrapping, and version management
|
|
3
|
+
*
|
|
4
|
+
* This module handles:
|
|
5
|
+
* - Cloning/fetching capabilities from Git repositories
|
|
6
|
+
* - Wrapping external repos (discovering skills/agents/commands)
|
|
7
|
+
* - Managing the capabilities.lock.toml file
|
|
8
|
+
* - Version tracking and update detection
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { createHash } from "node:crypto";
|
|
12
|
+
import { existsSync } from "node:fs";
|
|
13
|
+
import { cp, mkdir, readFile, writeFile, readdir, stat, rm } from "node:fs/promises";
|
|
14
|
+
import { join, resolve } from "node:path";
|
|
15
|
+
import { parse as parseToml } from "smol-toml";
|
|
16
|
+
import type {
|
|
17
|
+
OmniConfig,
|
|
18
|
+
GitCapabilitySourceConfig,
|
|
19
|
+
FileCapabilitySourceConfig,
|
|
20
|
+
CapabilitySourceConfig,
|
|
21
|
+
CapabilitiesLockFile,
|
|
22
|
+
CapabilityLockEntry,
|
|
23
|
+
} from "../types/index.js";
|
|
24
|
+
|
|
25
|
+
// Local path for .omni directory
|
|
26
|
+
const OMNI_LOCAL = ".omni";
|
|
27
|
+
|
|
28
|
+
// Directory names to scan for content (singular and plural forms)
|
|
29
|
+
const SKILL_DIRS = ["skills", "skill"];
|
|
30
|
+
const AGENT_DIRS = ["agents", "agent", "subagents", "subagent"];
|
|
31
|
+
const COMMAND_DIRS = ["commands", "command"];
|
|
32
|
+
const RULE_DIRS = ["rules", "rule"];
|
|
33
|
+
const DOC_DIRS = ["docs", "doc", "documentation"];
|
|
34
|
+
|
|
35
|
+
// File patterns for each content type
|
|
36
|
+
const SKILL_FILES = ["SKILL.md", "skill.md", "Skill.md"];
|
|
37
|
+
const AGENT_FILES = ["AGENT.md", "agent.md", "Agent.md", "SUBAGENT.md", "subagent.md"];
|
|
38
|
+
const COMMAND_FILES = ["COMMAND.md", "command.md", "Command.md"];
|
|
39
|
+
|
|
40
|
+
export interface FetchResult {
|
|
41
|
+
id: string;
|
|
42
|
+
path: string;
|
|
43
|
+
version: string;
|
|
44
|
+
/** Git commit hash (for git sources) */
|
|
45
|
+
commit?: string;
|
|
46
|
+
/** Content hash (for file sources) */
|
|
47
|
+
contentHash?: string;
|
|
48
|
+
updated: boolean;
|
|
49
|
+
wrapped: boolean;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export interface SourceUpdateInfo {
|
|
53
|
+
id: string;
|
|
54
|
+
source: string;
|
|
55
|
+
currentVersion: string;
|
|
56
|
+
latestVersion: string;
|
|
57
|
+
hasUpdate: boolean;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Check if a source string is a file source
|
|
62
|
+
*/
|
|
63
|
+
export function isFileSource(source: string): boolean {
|
|
64
|
+
return source.startsWith("file://");
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Check if a source string is a git source
|
|
69
|
+
*/
|
|
70
|
+
export function isGitSource(source: string): boolean {
|
|
71
|
+
return (
|
|
72
|
+
source.startsWith("github:") ||
|
|
73
|
+
source.startsWith("git@") ||
|
|
74
|
+
source.startsWith("https://") ||
|
|
75
|
+
source.startsWith("http://")
|
|
76
|
+
);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Type guard to check if a parsed source config is a file source
|
|
81
|
+
*/
|
|
82
|
+
export function isFileSourceConfig(
|
|
83
|
+
config: GitCapabilitySourceConfig | FileCapabilitySourceConfig,
|
|
84
|
+
): config is FileCapabilitySourceConfig {
|
|
85
|
+
return isFileSource(config.source);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Type guard to check if a parsed source config is a git source
|
|
90
|
+
*/
|
|
91
|
+
export function isGitSourceConfig(
|
|
92
|
+
config: GitCapabilitySourceConfig | FileCapabilitySourceConfig,
|
|
93
|
+
): config is GitCapabilitySourceConfig {
|
|
94
|
+
return !isFileSource(config.source);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Parse a file:// URL into a filesystem path
|
|
99
|
+
* Supports both relative (file://./path) and absolute (file:///path) URLs
|
|
100
|
+
*/
|
|
101
|
+
export function parseFilePath(fileUrl: string): string {
|
|
102
|
+
if (!fileUrl.startsWith("file://")) {
|
|
103
|
+
throw new Error(`Invalid file URL: ${fileUrl}`);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
const path = fileUrl.slice(7); // Remove "file://"
|
|
107
|
+
|
|
108
|
+
// Handle absolute paths (file:///absolute/path)
|
|
109
|
+
if (path.startsWith("/")) {
|
|
110
|
+
return path;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Handle relative paths (file://./relative/path or file://relative/path)
|
|
114
|
+
if (path.startsWith("./") || path.startsWith("../")) {
|
|
115
|
+
return resolve(process.cwd(), path);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Treat as relative path
|
|
119
|
+
return resolve(process.cwd(), path);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Calculate SHA256 hash of directory contents for change detection
|
|
124
|
+
* Hashes all files recursively, ignoring .git directory
|
|
125
|
+
*/
|
|
126
|
+
async function calculateDirectoryHash(dirPath: string): Promise<string> {
|
|
127
|
+
const hash = createHash("sha256");
|
|
128
|
+
await hashDirectory(dirPath, hash);
|
|
129
|
+
return hash.digest("hex");
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
async function hashDirectory(dirPath: string, hash: ReturnType<typeof createHash>): Promise<void> {
|
|
133
|
+
const entries = await readdir(dirPath, { withFileTypes: true });
|
|
134
|
+
|
|
135
|
+
// Sort entries for consistent hashing
|
|
136
|
+
entries.sort((a, b) => a.name.localeCompare(b.name));
|
|
137
|
+
|
|
138
|
+
for (const entry of entries) {
|
|
139
|
+
const entryPath = join(dirPath, entry.name);
|
|
140
|
+
|
|
141
|
+
// Skip .git directory
|
|
142
|
+
if (entry.name === ".git") {
|
|
143
|
+
continue;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// Add entry name to hash
|
|
147
|
+
hash.update(entry.name);
|
|
148
|
+
|
|
149
|
+
if (entry.isDirectory()) {
|
|
150
|
+
await hashDirectory(entryPath, hash);
|
|
151
|
+
} else if (entry.isFile()) {
|
|
152
|
+
const content = await readFile(entryPath);
|
|
153
|
+
hash.update(content);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Parse a capability source string or config into normalized form
|
|
160
|
+
* Returns either a GitCapabilitySourceConfig or FileCapabilitySourceConfig
|
|
161
|
+
*/
|
|
162
|
+
export function parseSourceConfig(
|
|
163
|
+
source: CapabilitySourceConfig,
|
|
164
|
+
): GitCapabilitySourceConfig | FileCapabilitySourceConfig {
|
|
165
|
+
if (typeof source === "string") {
|
|
166
|
+
// File source shorthand: "file://./path"
|
|
167
|
+
if (isFileSource(source)) {
|
|
168
|
+
return { source };
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
// Git source shorthand formats:
|
|
172
|
+
// - "github:user/repo"
|
|
173
|
+
// - "github:user/repo#ref"
|
|
174
|
+
// - "git@github.com:user/repo.git"
|
|
175
|
+
// - "https://github.com/user/repo.git"
|
|
176
|
+
|
|
177
|
+
let sourceUrl = source;
|
|
178
|
+
let ref: string | undefined;
|
|
179
|
+
|
|
180
|
+
// Check for ref in github shorthand
|
|
181
|
+
if (source.startsWith("github:") && source.includes("#")) {
|
|
182
|
+
const parts = source.split("#");
|
|
183
|
+
sourceUrl = parts[0] ?? source;
|
|
184
|
+
ref = parts[1];
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
const result: GitCapabilitySourceConfig = { source: sourceUrl };
|
|
188
|
+
if (ref) {
|
|
189
|
+
result.ref = ref;
|
|
190
|
+
}
|
|
191
|
+
return result;
|
|
192
|
+
}
|
|
193
|
+
return source;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* Convert source to a git-cloneable URL
|
|
198
|
+
*/
|
|
199
|
+
export function sourceToGitUrl(source: string): string {
|
|
200
|
+
if (source.startsWith("github:")) {
|
|
201
|
+
const repo = source.replace("github:", "");
|
|
202
|
+
return `https://github.com/${repo}.git`;
|
|
203
|
+
}
|
|
204
|
+
// Already a URL or SSH path
|
|
205
|
+
return source;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Get the path where a capability source will be stored
|
|
210
|
+
*/
|
|
211
|
+
export function getSourceCapabilityPath(id: string): string {
|
|
212
|
+
return join(OMNI_LOCAL, "capabilities", id);
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Get the lock file path
|
|
217
|
+
*/
|
|
218
|
+
export function getLockFilePath(): string {
|
|
219
|
+
return "omni.lock.toml";
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
/**
|
|
223
|
+
* Load the capabilities lock file
|
|
224
|
+
*/
|
|
225
|
+
export async function loadLockFile(): Promise<CapabilitiesLockFile> {
|
|
226
|
+
const lockPath = getLockFilePath();
|
|
227
|
+
if (!existsSync(lockPath)) {
|
|
228
|
+
return { capabilities: {} };
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
try {
|
|
232
|
+
const content = await readFile(lockPath, "utf-8");
|
|
233
|
+
const parsed = parseToml(content) as Record<string, unknown>;
|
|
234
|
+
const capabilities = parsed["capabilities"] as Record<string, CapabilityLockEntry> | undefined;
|
|
235
|
+
return {
|
|
236
|
+
capabilities: capabilities || {},
|
|
237
|
+
};
|
|
238
|
+
} catch {
|
|
239
|
+
return { capabilities: {} };
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
/**
|
|
244
|
+
* Stringify a lock file to TOML format
|
|
245
|
+
*/
|
|
246
|
+
function stringifyLockFile(lockFile: CapabilitiesLockFile): string {
|
|
247
|
+
const lines: string[] = [];
|
|
248
|
+
|
|
249
|
+
for (const [id, entry] of Object.entries(lockFile.capabilities)) {
|
|
250
|
+
lines.push(`[capabilities.${id}]`);
|
|
251
|
+
lines.push(`source = "${entry.source}"`);
|
|
252
|
+
lines.push(`version = "${entry.version}"`);
|
|
253
|
+
if (entry.commit) {
|
|
254
|
+
lines.push(`commit = "${entry.commit}"`);
|
|
255
|
+
}
|
|
256
|
+
if (entry.content_hash) {
|
|
257
|
+
lines.push(`content_hash = "${entry.content_hash}"`);
|
|
258
|
+
}
|
|
259
|
+
if (entry.ref) {
|
|
260
|
+
lines.push(`ref = "${entry.ref}"`);
|
|
261
|
+
}
|
|
262
|
+
lines.push(`updated_at = "${entry.updated_at}"`);
|
|
263
|
+
lines.push("");
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
return lines.join("\n");
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
/**
|
|
270
|
+
* Save the capabilities lock file
|
|
271
|
+
*/
|
|
272
|
+
export async function saveLockFile(lockFile: CapabilitiesLockFile): Promise<void> {
|
|
273
|
+
const lockPath = getLockFilePath();
|
|
274
|
+
|
|
275
|
+
// Ensure directory exists
|
|
276
|
+
await mkdir(join(OMNI_LOCAL, "capabilities"), { recursive: true });
|
|
277
|
+
|
|
278
|
+
const header = `# Auto-generated by OmniDev - DO NOT EDIT
|
|
279
|
+
# Records installed capability versions for reproducibility
|
|
280
|
+
# Last updated: ${new Date().toISOString()}
|
|
281
|
+
|
|
282
|
+
`;
|
|
283
|
+
const content = header + stringifyLockFile(lockFile);
|
|
284
|
+
await writeFile(lockPath, content, "utf-8");
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
/**
|
|
288
|
+
* Get the current commit hash of a git repository
|
|
289
|
+
*/
|
|
290
|
+
async function getRepoCommit(repoPath: string): Promise<string> {
|
|
291
|
+
const proc = Bun.spawn(["git", "rev-parse", "HEAD"], {
|
|
292
|
+
cwd: repoPath,
|
|
293
|
+
stdout: "pipe",
|
|
294
|
+
stderr: "pipe",
|
|
295
|
+
});
|
|
296
|
+
const output = await new Response(proc.stdout).text();
|
|
297
|
+
await proc.exited;
|
|
298
|
+
return output.trim();
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
/**
|
|
302
|
+
* Get short commit hash (7 chars)
|
|
303
|
+
*/
|
|
304
|
+
function shortCommit(commit: string): string {
|
|
305
|
+
return commit.substring(0, 7);
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
/**
|
|
309
|
+
* Clone a git repository
|
|
310
|
+
*/
|
|
311
|
+
async function cloneRepo(gitUrl: string, targetPath: string, ref?: string): Promise<void> {
|
|
312
|
+
// Ensure parent directory exists
|
|
313
|
+
await mkdir(join(targetPath, ".."), { recursive: true });
|
|
314
|
+
|
|
315
|
+
const args = ["clone", "--depth", "1"];
|
|
316
|
+
if (ref) {
|
|
317
|
+
args.push("--branch", ref);
|
|
318
|
+
}
|
|
319
|
+
args.push(gitUrl, targetPath);
|
|
320
|
+
|
|
321
|
+
const proc = Bun.spawn(["git", ...args], {
|
|
322
|
+
stdout: "pipe",
|
|
323
|
+
stderr: "pipe",
|
|
324
|
+
});
|
|
325
|
+
|
|
326
|
+
await proc.exited;
|
|
327
|
+
|
|
328
|
+
if (proc.exitCode !== 0) {
|
|
329
|
+
const stderr = await new Response(proc.stderr).text();
|
|
330
|
+
throw new Error(`Failed to clone ${gitUrl}: ${stderr}`);
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
/**
|
|
335
|
+
* Fetch and update an existing repository
|
|
336
|
+
*/
|
|
337
|
+
async function fetchRepo(repoPath: string, ref?: string): Promise<boolean> {
|
|
338
|
+
// Fetch latest
|
|
339
|
+
const fetchProc = Bun.spawn(["git", "fetch", "--depth", "1", "origin"], {
|
|
340
|
+
cwd: repoPath,
|
|
341
|
+
stdout: "pipe",
|
|
342
|
+
stderr: "pipe",
|
|
343
|
+
});
|
|
344
|
+
await fetchProc.exited;
|
|
345
|
+
|
|
346
|
+
// Get current and remote commits
|
|
347
|
+
const currentCommit = await getRepoCommit(repoPath);
|
|
348
|
+
|
|
349
|
+
// Check remote commit
|
|
350
|
+
const targetRef = ref || "HEAD";
|
|
351
|
+
const lsProc = Bun.spawn(["git", "ls-remote", "origin", targetRef], {
|
|
352
|
+
cwd: repoPath,
|
|
353
|
+
stdout: "pipe",
|
|
354
|
+
stderr: "pipe",
|
|
355
|
+
});
|
|
356
|
+
const lsOutput = await new Response(lsProc.stdout).text();
|
|
357
|
+
await lsProc.exited;
|
|
358
|
+
|
|
359
|
+
const remoteCommit = lsOutput.split("\t")[0];
|
|
360
|
+
|
|
361
|
+
if (currentCommit === remoteCommit) {
|
|
362
|
+
return false; // No update
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
// Pull changes
|
|
366
|
+
const pullProc = Bun.spawn(["git", "pull", "--ff-only"], {
|
|
367
|
+
cwd: repoPath,
|
|
368
|
+
stdout: "pipe",
|
|
369
|
+
stderr: "pipe",
|
|
370
|
+
});
|
|
371
|
+
await pullProc.exited;
|
|
372
|
+
|
|
373
|
+
return true; // Updated
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
/**
|
|
377
|
+
* Check if a directory contains a capability.toml
|
|
378
|
+
*/
|
|
379
|
+
function hasCapabilityToml(dirPath: string): boolean {
|
|
380
|
+
return existsSync(join(dirPath, "capability.toml"));
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
/**
|
|
384
|
+
* Find directories matching any of the given names
|
|
385
|
+
*/
|
|
386
|
+
async function findMatchingDirs(basePath: string, names: string[]): Promise<string | null> {
|
|
387
|
+
for (const name of names) {
|
|
388
|
+
const dirPath = join(basePath, name);
|
|
389
|
+
if (existsSync(dirPath)) {
|
|
390
|
+
const stats = await stat(dirPath);
|
|
391
|
+
if (stats.isDirectory()) {
|
|
392
|
+
return dirPath;
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
return null;
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
/**
|
|
400
|
+
* Find content files in a directory (skills, agents, commands)
|
|
401
|
+
*/
|
|
402
|
+
async function findContentItems(
|
|
403
|
+
dirPath: string,
|
|
404
|
+
filePatterns: string[],
|
|
405
|
+
): Promise<Array<{ name: string; path: string; isFolder: boolean }>> {
|
|
406
|
+
const items: Array<{ name: string; path: string; isFolder: boolean }> = [];
|
|
407
|
+
|
|
408
|
+
if (!existsSync(dirPath)) {
|
|
409
|
+
return items;
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
const entries = await readdir(dirPath, { withFileTypes: true });
|
|
413
|
+
|
|
414
|
+
for (const entry of entries) {
|
|
415
|
+
const entryPath = join(dirPath, entry.name);
|
|
416
|
+
|
|
417
|
+
if (entry.isDirectory()) {
|
|
418
|
+
// Check for content file inside directory
|
|
419
|
+
for (const pattern of filePatterns) {
|
|
420
|
+
if (existsSync(join(entryPath, pattern))) {
|
|
421
|
+
items.push({
|
|
422
|
+
name: entry.name,
|
|
423
|
+
path: entryPath,
|
|
424
|
+
isFolder: true,
|
|
425
|
+
});
|
|
426
|
+
break;
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
} else if (entry.isFile() && entry.name.endsWith(".md")) {
|
|
430
|
+
// Single file content (e.g., agents/researcher.md)
|
|
431
|
+
const name = entry.name.replace(/\.md$/i, "");
|
|
432
|
+
items.push({
|
|
433
|
+
name,
|
|
434
|
+
path: entryPath,
|
|
435
|
+
isFolder: false,
|
|
436
|
+
});
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
return items;
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
/**
|
|
444
|
+
* Discover content in a wrapped repository
|
|
445
|
+
*/
|
|
446
|
+
export interface DiscoveredContent {
|
|
447
|
+
skills: Array<{ name: string; path: string; isFolder: boolean }>;
|
|
448
|
+
agents: Array<{ name: string; path: string; isFolder: boolean }>;
|
|
449
|
+
commands: Array<{ name: string; path: string; isFolder: boolean }>;
|
|
450
|
+
rulesDir: string | null;
|
|
451
|
+
docsDir: string | null;
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
async function discoverContent(repoPath: string): Promise<DiscoveredContent> {
|
|
455
|
+
const result: DiscoveredContent = {
|
|
456
|
+
skills: [],
|
|
457
|
+
agents: [],
|
|
458
|
+
commands: [],
|
|
459
|
+
rulesDir: null,
|
|
460
|
+
docsDir: null,
|
|
461
|
+
};
|
|
462
|
+
|
|
463
|
+
// Find skills
|
|
464
|
+
const skillsDir = await findMatchingDirs(repoPath, SKILL_DIRS);
|
|
465
|
+
if (skillsDir) {
|
|
466
|
+
result.skills = await findContentItems(skillsDir, SKILL_FILES);
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
// Find agents
|
|
470
|
+
const agentsDir = await findMatchingDirs(repoPath, AGENT_DIRS);
|
|
471
|
+
if (agentsDir) {
|
|
472
|
+
result.agents = await findContentItems(agentsDir, AGENT_FILES);
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// Find commands
|
|
476
|
+
const commandsDir = await findMatchingDirs(repoPath, COMMAND_DIRS);
|
|
477
|
+
if (commandsDir) {
|
|
478
|
+
result.commands = await findContentItems(commandsDir, COMMAND_FILES);
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
// Find rules directory
|
|
482
|
+
result.rulesDir = await findMatchingDirs(repoPath, RULE_DIRS);
|
|
483
|
+
|
|
484
|
+
// Find docs directory
|
|
485
|
+
result.docsDir = await findMatchingDirs(repoPath, DOC_DIRS);
|
|
486
|
+
|
|
487
|
+
return result;
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
/**
|
|
491
|
+
* Generate a capability.toml for a wrapped repository
|
|
492
|
+
*/
|
|
493
|
+
async function generateCapabilityToml(
|
|
494
|
+
id: string,
|
|
495
|
+
repoPath: string,
|
|
496
|
+
source: string,
|
|
497
|
+
commit: string,
|
|
498
|
+
content: DiscoveredContent,
|
|
499
|
+
): Promise<void> {
|
|
500
|
+
const shortHash = shortCommit(commit);
|
|
501
|
+
|
|
502
|
+
// Build description based on discovered content
|
|
503
|
+
const parts: string[] = [];
|
|
504
|
+
if (content.skills.length > 0) {
|
|
505
|
+
parts.push(`${content.skills.length} skill${content.skills.length > 1 ? "s" : ""}`);
|
|
506
|
+
}
|
|
507
|
+
if (content.agents.length > 0) {
|
|
508
|
+
parts.push(`${content.agents.length} agent${content.agents.length > 1 ? "s" : ""}`);
|
|
509
|
+
}
|
|
510
|
+
if (content.commands.length > 0) {
|
|
511
|
+
parts.push(`${content.commands.length} command${content.commands.length > 1 ? "s" : ""}`);
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
const description =
|
|
515
|
+
parts.length > 0 ? `Wrapped from ${source} (${parts.join(", ")})` : `Wrapped from ${source}`;
|
|
516
|
+
|
|
517
|
+
// Extract repository URL for metadata
|
|
518
|
+
const repoUrl = source.startsWith("github:")
|
|
519
|
+
? `https://github.com/${source.replace("github:", "")}`
|
|
520
|
+
: source;
|
|
521
|
+
|
|
522
|
+
const tomlContent = `# Auto-generated by OmniDev - DO NOT EDIT
|
|
523
|
+
# This capability was wrapped from an external repository
|
|
524
|
+
|
|
525
|
+
[capability]
|
|
526
|
+
id = "${id}"
|
|
527
|
+
name = "${id} (wrapped)"
|
|
528
|
+
version = "${shortHash}"
|
|
529
|
+
description = "${description}"
|
|
530
|
+
|
|
531
|
+
[capability.metadata]
|
|
532
|
+
repository = "${repoUrl}"
|
|
533
|
+
wrapped = true
|
|
534
|
+
commit = "${commit}"
|
|
535
|
+
`;
|
|
536
|
+
|
|
537
|
+
await writeFile(join(repoPath, "capability.toml"), tomlContent, "utf-8");
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
/**
|
|
541
|
+
* Generate capability.toml for a file-sourced capability
|
|
542
|
+
*/
|
|
543
|
+
async function generateFileCapabilityToml(
|
|
544
|
+
id: string,
|
|
545
|
+
targetPath: string,
|
|
546
|
+
sourcePath: string,
|
|
547
|
+
contentHash: string,
|
|
548
|
+
content: DiscoveredContent,
|
|
549
|
+
): Promise<void> {
|
|
550
|
+
const shortHash = contentHash.substring(0, 12);
|
|
551
|
+
|
|
552
|
+
// Build description based on discovered content
|
|
553
|
+
const parts: string[] = [];
|
|
554
|
+
if (content.skills.length > 0) {
|
|
555
|
+
parts.push(`${content.skills.length} skill${content.skills.length > 1 ? "s" : ""}`);
|
|
556
|
+
}
|
|
557
|
+
if (content.agents.length > 0) {
|
|
558
|
+
parts.push(`${content.agents.length} agent${content.agents.length > 1 ? "s" : ""}`);
|
|
559
|
+
}
|
|
560
|
+
if (content.commands.length > 0) {
|
|
561
|
+
parts.push(`${content.commands.length} command${content.commands.length > 1 ? "s" : ""}`);
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
const description =
|
|
565
|
+
parts.length > 0
|
|
566
|
+
? `Copied from ${sourcePath} (${parts.join(", ")})`
|
|
567
|
+
: `Copied from ${sourcePath}`;
|
|
568
|
+
|
|
569
|
+
const tomlContent = `# Auto-generated by OmniDev - DO NOT EDIT
|
|
570
|
+
# This capability was copied from a local file source
|
|
571
|
+
|
|
572
|
+
[capability]
|
|
573
|
+
id = "${id}"
|
|
574
|
+
name = "${id} (file source)"
|
|
575
|
+
version = "${shortHash}"
|
|
576
|
+
description = "${description}"
|
|
577
|
+
|
|
578
|
+
[capability.metadata]
|
|
579
|
+
source_path = "${sourcePath}"
|
|
580
|
+
wrapped = true
|
|
581
|
+
content_hash = "${contentHash}"
|
|
582
|
+
`;
|
|
583
|
+
|
|
584
|
+
await writeFile(join(targetPath, "capability.toml"), tomlContent, "utf-8");
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
/**
|
|
588
|
+
* Fetch a file-sourced capability (copy from local path)
|
|
589
|
+
*/
|
|
590
|
+
async function fetchFileCapabilitySource(
|
|
591
|
+
id: string,
|
|
592
|
+
config: FileCapabilitySourceConfig,
|
|
593
|
+
options?: { silent?: boolean },
|
|
594
|
+
): Promise<FetchResult> {
|
|
595
|
+
const sourcePath = parseFilePath(config.source);
|
|
596
|
+
const targetPath = getSourceCapabilityPath(id);
|
|
597
|
+
|
|
598
|
+
// Verify source exists
|
|
599
|
+
if (!existsSync(sourcePath)) {
|
|
600
|
+
throw new Error(`File source not found: ${sourcePath}`);
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
// Calculate content hash of source
|
|
604
|
+
const sourceHash = await calculateDirectoryHash(sourcePath);
|
|
605
|
+
|
|
606
|
+
// Check if we need to update
|
|
607
|
+
let updated = false;
|
|
608
|
+
let existingHash: string | undefined;
|
|
609
|
+
|
|
610
|
+
if (existsSync(targetPath)) {
|
|
611
|
+
// Calculate existing hash to compare
|
|
612
|
+
existingHash = await calculateDirectoryHash(targetPath);
|
|
613
|
+
updated = existingHash !== sourceHash;
|
|
614
|
+
} else {
|
|
615
|
+
updated = true;
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
if (updated) {
|
|
619
|
+
if (!options?.silent) {
|
|
620
|
+
console.log(` Copying ${id} from ${config.source}...`);
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
// Remove existing target if it exists
|
|
624
|
+
if (existsSync(targetPath)) {
|
|
625
|
+
await rm(targetPath, { recursive: true });
|
|
626
|
+
}
|
|
627
|
+
|
|
628
|
+
// Copy source to target
|
|
629
|
+
await mkdir(join(targetPath, ".."), { recursive: true });
|
|
630
|
+
await cp(sourcePath, targetPath, { recursive: true });
|
|
631
|
+
|
|
632
|
+
// Check if we need to wrap (no capability.toml)
|
|
633
|
+
const needsWrap = !hasCapabilityToml(targetPath);
|
|
634
|
+
|
|
635
|
+
if (needsWrap) {
|
|
636
|
+
// Discover content and generate capability.toml
|
|
637
|
+
const content = await discoverContent(targetPath);
|
|
638
|
+
await generateFileCapabilityToml(id, targetPath, config.source, sourceHash, content);
|
|
639
|
+
|
|
640
|
+
if (!options?.silent) {
|
|
641
|
+
const parts: string[] = [];
|
|
642
|
+
if (content.skills.length > 0) parts.push(`${content.skills.length} skills`);
|
|
643
|
+
if (content.agents.length > 0) parts.push(`${content.agents.length} agents`);
|
|
644
|
+
if (content.commands.length > 0) parts.push(`${content.commands.length} commands`);
|
|
645
|
+
if (parts.length > 0) {
|
|
646
|
+
console.log(` Wrapped: ${parts.join(", ")}`);
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
} else {
|
|
651
|
+
if (!options?.silent) {
|
|
652
|
+
console.log(` Checking ${id}...`);
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
// Get version from capability.toml or package.json
|
|
657
|
+
const shortHash = sourceHash.substring(0, 12);
|
|
658
|
+
let version = shortHash;
|
|
659
|
+
const pkgJsonPath = join(targetPath, "package.json");
|
|
660
|
+
if (existsSync(pkgJsonPath)) {
|
|
661
|
+
try {
|
|
662
|
+
const pkgJson = JSON.parse(await readFile(pkgJsonPath, "utf-8"));
|
|
663
|
+
if (pkgJson.version) {
|
|
664
|
+
version = pkgJson.version;
|
|
665
|
+
}
|
|
666
|
+
} catch {
|
|
667
|
+
// Ignore parse errors
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
return {
|
|
672
|
+
id,
|
|
673
|
+
path: targetPath,
|
|
674
|
+
version,
|
|
675
|
+
contentHash: sourceHash,
|
|
676
|
+
updated,
|
|
677
|
+
wrapped: !hasCapabilityToml(sourcePath),
|
|
678
|
+
};
|
|
679
|
+
}
|
|
680
|
+
|
|
681
|
+
/**
|
|
682
|
+
* Fetch a git-sourced capability
|
|
683
|
+
*/
|
|
684
|
+
async function fetchGitCapabilitySource(
|
|
685
|
+
id: string,
|
|
686
|
+
config: GitCapabilitySourceConfig,
|
|
687
|
+
options?: { silent?: boolean },
|
|
688
|
+
): Promise<FetchResult> {
|
|
689
|
+
const gitUrl = sourceToGitUrl(config.source);
|
|
690
|
+
const targetPath = getSourceCapabilityPath(id);
|
|
691
|
+
const isWrap = config.type === "wrap";
|
|
692
|
+
|
|
693
|
+
let updated = false;
|
|
694
|
+
let commit: string;
|
|
695
|
+
|
|
696
|
+
// Check if already cloned
|
|
697
|
+
if (existsSync(join(targetPath, ".git"))) {
|
|
698
|
+
// Fetch updates
|
|
699
|
+
if (!options?.silent) {
|
|
700
|
+
console.log(` Checking ${id}...`);
|
|
701
|
+
}
|
|
702
|
+
updated = await fetchRepo(targetPath, config.ref);
|
|
703
|
+
commit = await getRepoCommit(targetPath);
|
|
704
|
+
} else {
|
|
705
|
+
// Clone repository
|
|
706
|
+
if (!options?.silent) {
|
|
707
|
+
console.log(` Cloning ${id} from ${config.source}...`);
|
|
708
|
+
}
|
|
709
|
+
await cloneRepo(gitUrl, targetPath, config.ref);
|
|
710
|
+
commit = await getRepoCommit(targetPath);
|
|
711
|
+
updated = true;
|
|
712
|
+
}
|
|
713
|
+
|
|
714
|
+
// Check if we need to wrap (no capability.toml or explicitly wrap type)
|
|
715
|
+
const needsWrap = isWrap || !hasCapabilityToml(targetPath);
|
|
716
|
+
|
|
717
|
+
if (needsWrap && updated) {
|
|
718
|
+
// Discover content and generate capability.toml
|
|
719
|
+
const content = await discoverContent(targetPath);
|
|
720
|
+
await generateCapabilityToml(id, targetPath, config.source, commit, content);
|
|
721
|
+
|
|
722
|
+
if (!options?.silent) {
|
|
723
|
+
const parts: string[] = [];
|
|
724
|
+
if (content.skills.length > 0) parts.push(`${content.skills.length} skills`);
|
|
725
|
+
if (content.agents.length > 0) parts.push(`${content.agents.length} agents`);
|
|
726
|
+
if (content.commands.length > 0) parts.push(`${content.commands.length} commands`);
|
|
727
|
+
if (parts.length > 0) {
|
|
728
|
+
console.log(` Wrapped: ${parts.join(", ")}`);
|
|
729
|
+
}
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
// Get version from capability.toml or package.json
|
|
734
|
+
let version = shortCommit(commit);
|
|
735
|
+
const pkgJsonPath = join(targetPath, "package.json");
|
|
736
|
+
if (existsSync(pkgJsonPath)) {
|
|
737
|
+
try {
|
|
738
|
+
const pkgJson = JSON.parse(await readFile(pkgJsonPath, "utf-8"));
|
|
739
|
+
if (pkgJson.version) {
|
|
740
|
+
version = pkgJson.version;
|
|
741
|
+
}
|
|
742
|
+
} catch {
|
|
743
|
+
// Ignore parse errors
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
|
|
747
|
+
return {
|
|
748
|
+
id,
|
|
749
|
+
path: targetPath,
|
|
750
|
+
version,
|
|
751
|
+
commit,
|
|
752
|
+
updated,
|
|
753
|
+
wrapped: needsWrap,
|
|
754
|
+
};
|
|
755
|
+
}
|
|
756
|
+
|
|
757
|
+
/**
|
|
758
|
+
* Fetch a single capability source (git or file)
|
|
759
|
+
*/
|
|
760
|
+
export async function fetchCapabilitySource(
|
|
761
|
+
id: string,
|
|
762
|
+
sourceConfig: CapabilitySourceConfig,
|
|
763
|
+
options?: { silent?: boolean },
|
|
764
|
+
): Promise<FetchResult> {
|
|
765
|
+
const config = parseSourceConfig(sourceConfig);
|
|
766
|
+
|
|
767
|
+
// Dispatch based on source type
|
|
768
|
+
if (isFileSourceConfig(config)) {
|
|
769
|
+
return fetchFileCapabilitySource(id, config, options);
|
|
770
|
+
}
|
|
771
|
+
|
|
772
|
+
return fetchGitCapabilitySource(id, config, options);
|
|
773
|
+
}
|
|
774
|
+
|
|
775
|
+
/**
|
|
776
|
+
* Fetch all capability sources from config
|
|
777
|
+
*/
|
|
778
|
+
export async function fetchAllCapabilitySources(
|
|
779
|
+
config: OmniConfig,
|
|
780
|
+
options?: { silent?: boolean; force?: boolean },
|
|
781
|
+
): Promise<FetchResult[]> {
|
|
782
|
+
const sources = config.capabilities?.sources;
|
|
783
|
+
if (!sources || Object.keys(sources).length === 0) {
|
|
784
|
+
return [];
|
|
785
|
+
}
|
|
786
|
+
|
|
787
|
+
if (!options?.silent) {
|
|
788
|
+
console.log("Fetching capability sources...");
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
const results: FetchResult[] = [];
|
|
792
|
+
const lockFile = await loadLockFile();
|
|
793
|
+
let lockUpdated = false;
|
|
794
|
+
|
|
795
|
+
for (const [id, source] of Object.entries(sources)) {
|
|
796
|
+
try {
|
|
797
|
+
const result = await fetchCapabilitySource(id, source, options);
|
|
798
|
+
results.push(result);
|
|
799
|
+
|
|
800
|
+
const sourceConfig = parseSourceConfig(source);
|
|
801
|
+
|
|
802
|
+
// Build lock entry based on source type
|
|
803
|
+
const lockEntry: CapabilityLockEntry = {
|
|
804
|
+
source: typeof source === "string" ? source : source.source,
|
|
805
|
+
version: result.version,
|
|
806
|
+
updated_at: new Date().toISOString(),
|
|
807
|
+
};
|
|
808
|
+
|
|
809
|
+
if (isFileSourceConfig(sourceConfig)) {
|
|
810
|
+
// File source: use content hash
|
|
811
|
+
if (result.contentHash) {
|
|
812
|
+
lockEntry.content_hash = result.contentHash;
|
|
813
|
+
}
|
|
814
|
+
} else {
|
|
815
|
+
// Git source: use commit and ref
|
|
816
|
+
const gitConfig = sourceConfig as GitCapabilitySourceConfig;
|
|
817
|
+
if (result.commit) {
|
|
818
|
+
lockEntry.commit = result.commit;
|
|
819
|
+
}
|
|
820
|
+
if (gitConfig.ref) {
|
|
821
|
+
lockEntry.ref = gitConfig.ref;
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
|
|
825
|
+
// Check if lock entry changed
|
|
826
|
+
const existing = lockFile.capabilities[id];
|
|
827
|
+
const hasChanged = isFileSourceConfig(sourceConfig)
|
|
828
|
+
? !existing || existing.content_hash !== result.contentHash
|
|
829
|
+
: !existing || existing.commit !== result.commit;
|
|
830
|
+
|
|
831
|
+
if (hasChanged) {
|
|
832
|
+
lockFile.capabilities[id] = lockEntry;
|
|
833
|
+
lockUpdated = true;
|
|
834
|
+
|
|
835
|
+
if (!options?.silent && result.updated) {
|
|
836
|
+
const oldVersion = existing?.version || "new";
|
|
837
|
+
console.log(` ${result.wrapped ? "+" : "~"} ${id}: ${oldVersion} -> ${result.version}`);
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
} catch (error) {
|
|
841
|
+
console.error(` Failed to fetch ${id}: ${error}`);
|
|
842
|
+
}
|
|
843
|
+
}
|
|
844
|
+
|
|
845
|
+
// Save lock file if changed
|
|
846
|
+
if (lockUpdated) {
|
|
847
|
+
await saveLockFile(lockFile);
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
if (!options?.silent && results.length > 0) {
|
|
851
|
+
const updated = results.filter((r) => r.updated).length;
|
|
852
|
+
if (updated > 0) {
|
|
853
|
+
console.log(` Updated ${updated} capability source(s)`);
|
|
854
|
+
} else {
|
|
855
|
+
console.log(` All ${results.length} source(s) up to date`);
|
|
856
|
+
}
|
|
857
|
+
}
|
|
858
|
+
|
|
859
|
+
return results;
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
/**
|
|
863
|
+
* Check for available updates without applying them
|
|
864
|
+
*/
|
|
865
|
+
export async function checkForUpdates(config: OmniConfig): Promise<SourceUpdateInfo[]> {
|
|
866
|
+
const sources = config.capabilities?.sources;
|
|
867
|
+
if (!sources || Object.keys(sources).length === 0) {
|
|
868
|
+
return [];
|
|
869
|
+
}
|
|
870
|
+
|
|
871
|
+
const lockFile = await loadLockFile();
|
|
872
|
+
const updates: SourceUpdateInfo[] = [];
|
|
873
|
+
|
|
874
|
+
for (const [id, source] of Object.entries(sources)) {
|
|
875
|
+
const sourceConfig = parseSourceConfig(source);
|
|
876
|
+
const targetPath = getSourceCapabilityPath(id);
|
|
877
|
+
const existing = lockFile.capabilities[id];
|
|
878
|
+
|
|
879
|
+
// Handle file sources
|
|
880
|
+
if (isFileSourceConfig(sourceConfig)) {
|
|
881
|
+
const sourcePath = parseFilePath(sourceConfig.source);
|
|
882
|
+
|
|
883
|
+
if (!existsSync(sourcePath)) {
|
|
884
|
+
updates.push({
|
|
885
|
+
id,
|
|
886
|
+
source: sourceConfig.source,
|
|
887
|
+
currentVersion: existing?.version || "unknown",
|
|
888
|
+
latestVersion: "source missing",
|
|
889
|
+
hasUpdate: false,
|
|
890
|
+
});
|
|
891
|
+
continue;
|
|
892
|
+
}
|
|
893
|
+
|
|
894
|
+
if (!existsSync(targetPath)) {
|
|
895
|
+
updates.push({
|
|
896
|
+
id,
|
|
897
|
+
source: sourceConfig.source,
|
|
898
|
+
currentVersion: "not installed",
|
|
899
|
+
latestVersion: "available",
|
|
900
|
+
hasUpdate: true,
|
|
901
|
+
});
|
|
902
|
+
continue;
|
|
903
|
+
}
|
|
904
|
+
|
|
905
|
+
// Calculate current hash of source
|
|
906
|
+
const sourceHash = await calculateDirectoryHash(sourcePath);
|
|
907
|
+
const currentHash = existing?.content_hash || "";
|
|
908
|
+
|
|
909
|
+
updates.push({
|
|
910
|
+
id,
|
|
911
|
+
source: sourceConfig.source,
|
|
912
|
+
currentVersion:
|
|
913
|
+
existing?.version || (currentHash ? currentHash.substring(0, 12) : "unknown"),
|
|
914
|
+
latestVersion: sourceHash.substring(0, 12),
|
|
915
|
+
hasUpdate: currentHash !== sourceHash,
|
|
916
|
+
});
|
|
917
|
+
continue;
|
|
918
|
+
}
|
|
919
|
+
|
|
920
|
+
// Handle git sources
|
|
921
|
+
const gitConfig = sourceConfig as GitCapabilitySourceConfig;
|
|
922
|
+
|
|
923
|
+
if (!existsSync(join(targetPath, ".git"))) {
|
|
924
|
+
// Not yet cloned
|
|
925
|
+
updates.push({
|
|
926
|
+
id,
|
|
927
|
+
source: gitConfig.source,
|
|
928
|
+
currentVersion: "not installed",
|
|
929
|
+
latestVersion: "unknown",
|
|
930
|
+
hasUpdate: true,
|
|
931
|
+
});
|
|
932
|
+
continue;
|
|
933
|
+
}
|
|
934
|
+
|
|
935
|
+
// Fetch to check for updates (without pulling)
|
|
936
|
+
const fetchProc = Bun.spawn(["git", "fetch", "--depth", "1", "origin"], {
|
|
937
|
+
cwd: targetPath,
|
|
938
|
+
stdout: "pipe",
|
|
939
|
+
stderr: "pipe",
|
|
940
|
+
});
|
|
941
|
+
await fetchProc.exited;
|
|
942
|
+
|
|
943
|
+
// Get remote commit
|
|
944
|
+
const targetRef = gitConfig.ref || "HEAD";
|
|
945
|
+
const lsProc = Bun.spawn(["git", "ls-remote", "origin", targetRef], {
|
|
946
|
+
cwd: targetPath,
|
|
947
|
+
stdout: "pipe",
|
|
948
|
+
stderr: "pipe",
|
|
949
|
+
});
|
|
950
|
+
const lsOutput = await new Response(lsProc.stdout).text();
|
|
951
|
+
await lsProc.exited;
|
|
952
|
+
|
|
953
|
+
const remoteCommit = lsOutput.split("\t")[0] || "";
|
|
954
|
+
const currentCommit = existing?.commit || "";
|
|
955
|
+
|
|
956
|
+
updates.push({
|
|
957
|
+
id,
|
|
958
|
+
source: gitConfig.source,
|
|
959
|
+
currentVersion: existing?.version || (currentCommit ? shortCommit(currentCommit) : "unknown"),
|
|
960
|
+
latestVersion: remoteCommit ? shortCommit(remoteCommit) : "unknown",
|
|
961
|
+
hasUpdate: currentCommit !== remoteCommit,
|
|
962
|
+
});
|
|
963
|
+
}
|
|
964
|
+
|
|
965
|
+
return updates;
|
|
966
|
+
}
|