skilld 0.15.3 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/README.md +7 -5
  2. package/dist/_chunks/{detect-imports.mjs → agent.mjs} +48 -15
  3. package/dist/_chunks/agent.mjs.map +1 -0
  4. package/dist/_chunks/{storage.mjs → cache.mjs} +81 -1
  5. package/dist/_chunks/cache.mjs.map +1 -0
  6. package/dist/_chunks/cache2.mjs +71 -0
  7. package/dist/_chunks/cache2.mjs.map +1 -0
  8. package/dist/_chunks/config.mjs +23 -0
  9. package/dist/_chunks/config.mjs.map +1 -1
  10. package/dist/_chunks/{embedding-cache2.mjs → embedding-cache.mjs} +1 -1
  11. package/dist/_chunks/embedding-cache.mjs.map +1 -0
  12. package/dist/_chunks/formatting.mjs +634 -0
  13. package/dist/_chunks/formatting.mjs.map +1 -0
  14. package/dist/_chunks/{version.d.mts → index.d.mts} +1 -1
  15. package/dist/_chunks/index.d.mts.map +1 -0
  16. package/dist/_chunks/{utils.d.mts → index2.d.mts} +1 -1
  17. package/dist/_chunks/index2.d.mts.map +1 -0
  18. package/dist/_chunks/install.mjs +539 -0
  19. package/dist/_chunks/install.mjs.map +1 -0
  20. package/dist/_chunks/list.mjs +70 -0
  21. package/dist/_chunks/list.mjs.map +1 -0
  22. package/dist/_chunks/markdown.mjs +7 -0
  23. package/dist/_chunks/markdown.mjs.map +1 -1
  24. package/dist/_chunks/pool.mjs +174 -0
  25. package/dist/_chunks/pool.mjs.map +1 -0
  26. package/dist/_chunks/pool2.mjs +1 -6
  27. package/dist/_chunks/pool2.mjs.map +1 -1
  28. package/dist/_chunks/prompts.mjs +234 -2
  29. package/dist/_chunks/prompts.mjs.map +1 -1
  30. package/dist/_chunks/sanitize.mjs +71 -0
  31. package/dist/_chunks/sanitize.mjs.map +1 -1
  32. package/dist/_chunks/search-interactive.mjs +245 -0
  33. package/dist/_chunks/search-interactive.mjs.map +1 -0
  34. package/dist/_chunks/search.mjs +12 -0
  35. package/dist/_chunks/shared.mjs +4 -0
  36. package/dist/_chunks/shared.mjs.map +1 -1
  37. package/dist/_chunks/{npm.mjs → sources.mjs} +401 -4
  38. package/dist/_chunks/sources.mjs.map +1 -0
  39. package/dist/_chunks/sync.mjs +1937 -0
  40. package/dist/_chunks/sync.mjs.map +1 -0
  41. package/dist/_chunks/sync2.mjs +13 -0
  42. package/dist/_chunks/uninstall.mjs +207 -0
  43. package/dist/_chunks/uninstall.mjs.map +1 -0
  44. package/dist/_chunks/validate.mjs +3 -0
  45. package/dist/_chunks/validate.mjs.map +1 -1
  46. package/dist/_chunks/yaml.mjs +19 -0
  47. package/dist/_chunks/yaml.mjs.map +1 -1
  48. package/dist/agent/index.d.mts +1 -1
  49. package/dist/agent/index.mjs +4 -3
  50. package/dist/cache/index.d.mts +2 -2
  51. package/dist/cache/index.mjs +2 -1
  52. package/dist/cli.mjs +146 -3823
  53. package/dist/cli.mjs.map +1 -1
  54. package/dist/index.d.mts +2 -3
  55. package/dist/index.mjs +4 -4
  56. package/dist/retriv/index.mjs +14 -2
  57. package/dist/retriv/index.mjs.map +1 -1
  58. package/dist/retriv/worker.mjs +3 -3
  59. package/dist/sources/index.d.mts +2 -2
  60. package/dist/sources/index.mjs +2 -1
  61. package/dist/types.d.mts +2 -3
  62. package/package.json +9 -9
  63. package/dist/_chunks/detect-imports.mjs.map +0 -1
  64. package/dist/_chunks/embedding-cache2.mjs.map +0 -1
  65. package/dist/_chunks/npm.mjs.map +0 -1
  66. package/dist/_chunks/storage.mjs.map +0 -1
  67. package/dist/_chunks/utils.d.mts.map +0 -1
  68. package/dist/_chunks/version.d.mts.map +0 -1
@@ -0,0 +1,1937 @@
1
+ import { a as getRepoCacheDir, c as getVersionKey, i as getPackageDbPath, o as getCacheDir, t as CACHE_DIR } from "./config.mjs";
2
+ import { n as sanitizeMarkdown } from "./sanitize.mjs";
3
+ import { _ as resolvePkgDir, a as getShippedSkills, b as writeToRepoCache, c as linkCachedDir, d as linkRepoCachedDir, f as linkShippedSkill, h as readCachedDocs, i as getPkgKeyFiles, l as linkPkg, m as listReferenceFiles, n as clearCache, o as hasShippedDocs, r as ensureCacheDir, s as isCached, u as linkPkgNamed, y as writeToCache } from "./cache.mjs";
4
+ import { i as parseFrontmatter } from "./markdown.mjs";
5
+ import { createIndex } from "../retriv/index.mjs";
6
+ import { d as getPrereleaseChangelogRef, n as getSharedSkillsDir, o as getBlogPreset, t as SHARED_SKILLS_DIR } from "./shared.mjs";
7
+ import { $ as fetchGitHubIssues, A as parseGitSkillInput, C as downloadLlmsDocs, D as normalizeLlmsLinks, F as formatDiscussionAsMarkdown, G as $fetch, H as generateReleaseIndex, I as generateDiscussionIndex, L as fetchCrawledDocs, M as resolveEntryFiles, N as generateDocsIndex, P as fetchGitHubDiscussions, R as toCrawlPattern, T as fetchLlmsTxt, U as isPrerelease, V as fetchReleaseNotes, X as parseGitHubUrl, Z as parsePackageSpec, b as isShallowGitDocs, et as formatIssueAsMarkdown, f as resolvePackageDocsWithAttempts, h as fetchGitDocs, i as fetchPkgDist, k as fetchGitSkills, n as fetchNpmPackage, nt as isGhAvailable, p as searchNpmPackages, s as readLocalDependencies, tt as generateIssueIndex, u as resolveLocalPackageDocs, v as fetchReadmeContent, x as resolveGitHubRepo, y as filterFrameworkDocs, z as fetchBlogReleases } from "./sources.mjs";
8
+ import { _ as targets, a as sanitizeName, f as maxItems, i as linkSkillToAgents, n as computeSkillDirName, p as maxLines, t as generateSkillMd } from "./prompts.mjs";
9
+ import { a as getModelName, i as getModelLabel, n as createToolProgress, o as optimizeDocs, r as getAvailableModels, t as detectImportedPackages } from "./agent.mjs";
10
+ import { C as introLine, F as readConfig, I as registerProject, N as hasCompletedWizard, O as resolveAgent, R as updateConfig, T as promptForAgent, g as readLock, h as parsePackages, j as defaultFeatures, k as sharedArgs, l as timedSpinner, n as formatDuration, u as getProjectState, w as isInteractive, x as getInstalledGenerators, y as writeLock } from "./formatting.mjs";
11
+ import { t as runWizard } from "../cli.mjs";
12
+ import { n as shutdownWorker } from "./pool2.mjs";
13
+ import { dirname, join, relative, resolve } from "pathe";
14
+ import { appendFileSync, copyFileSync, existsSync, mkdirSync, readFileSync, readdirSync, rmSync, writeFileSync } from "node:fs";
15
+ import { isCI } from "std-env";
16
+ import pLimit from "p-limit";
17
+ import * as p from "@clack/prompts";
18
+ import { defineCommand } from "citty";
19
+ import logUpdate from "log-update";
20
+ const RESOLVE_STEP_LABELS = {
21
+ "npm": "npm registry",
22
+ "github-docs": "GitHub docs",
23
+ "github-meta": "GitHub meta",
24
+ "github-search": "GitHub search",
25
+ "readme": "README",
26
+ "llms.txt": "llms.txt",
27
+ "crawl": "website crawl",
28
+ "local": "node_modules"
29
+ };
30
+ /** Classify a cached doc path into the right metadata type */
31
+ function classifyCachedDoc(path) {
32
+ const issueMatch = path.match(/^issues\/issue-(\d+)\.md$/);
33
+ if (issueMatch) return {
34
+ type: "issue",
35
+ number: Number(issueMatch[1])
36
+ };
37
+ const discussionMatch = path.match(/^discussions\/discussion-(\d+)\.md$/);
38
+ if (discussionMatch) return {
39
+ type: "discussion",
40
+ number: Number(discussionMatch[1])
41
+ };
42
+ if (path.startsWith("releases/")) return { type: "release" };
43
+ return { type: "doc" };
44
+ }
45
+ async function findRelatedSkills(packageName, skillsDir) {
46
+ const related = [];
47
+ const npmInfo = await fetchNpmPackage(packageName);
48
+ if (!npmInfo?.dependencies) return related;
49
+ const deps = new Set(Object.keys(npmInfo.dependencies));
50
+ if (!existsSync(skillsDir)) return related;
51
+ const lock = readLock(skillsDir);
52
+ const pkgToDirName = /* @__PURE__ */ new Map();
53
+ if (lock) for (const [dirName, info] of Object.entries(lock.skills)) {
54
+ if (info.packageName) pkgToDirName.set(info.packageName, dirName);
55
+ for (const pkg of parsePackages(info.packages)) pkgToDirName.set(pkg.name, dirName);
56
+ }
57
+ const installedSkills = readdirSync(skillsDir);
58
+ const installedSet = new Set(installedSkills);
59
+ for (const dep of deps) {
60
+ const dirName = pkgToDirName.get(dep);
61
+ if (dirName && installedSet.has(dirName)) related.push(dirName);
62
+ }
63
+ return related.slice(0, 5);
64
+ }
65
+ /** Clear cache + db for --force flag */
66
+ function forceClearCache(packageName, version, repoInfo) {
67
+ clearCache(packageName, version);
68
+ const forcedDbPath = getPackageDbPath(packageName, version);
69
+ if (existsSync(forcedDbPath)) rmSync(forcedDbPath, {
70
+ recursive: true,
71
+ force: true
72
+ });
73
+ if (repoInfo) {
74
+ const repoDir = getRepoCacheDir(repoInfo.owner, repoInfo.repo);
75
+ if (existsSync(repoDir)) rmSync(repoDir, {
76
+ recursive: true,
77
+ force: true
78
+ });
79
+ }
80
+ }
81
+ /** Link all reference symlinks (pkg, docs, issues, discussions, releases) */
82
+ function linkAllReferences(skillDir, packageName, cwd, version, docsType, extraPackages, features, repoInfo) {
83
+ const f = features ?? readConfig().features ?? defaultFeatures;
84
+ try {
85
+ linkPkg(skillDir, packageName, cwd, version);
86
+ linkPkgNamed(skillDir, packageName, cwd, version);
87
+ if (!hasShippedDocs(packageName, cwd, version) && docsType !== "readme") linkCachedDir(skillDir, packageName, version, "docs");
88
+ if (f.issues) if (repoInfo) linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, "issues");
89
+ else linkCachedDir(skillDir, packageName, version, "issues");
90
+ if (f.discussions) if (repoInfo) linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, "discussions");
91
+ else linkCachedDir(skillDir, packageName, version, "discussions");
92
+ if (f.releases) if (repoInfo) linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, "releases");
93
+ else linkCachedDir(skillDir, packageName, version, "releases");
94
+ linkCachedDir(skillDir, packageName, version, "sections");
95
+ if (extraPackages) {
96
+ for (const pkg of extraPackages) if (pkg.name !== packageName) linkPkgNamed(skillDir, pkg.name, cwd, pkg.version);
97
+ }
98
+ } catch {}
99
+ }
100
+ /** Detect docs type from cached directory contents */
101
+ function detectDocsType(packageName, version, repoUrl, llmsUrl) {
102
+ const cacheDir = getCacheDir(packageName, version);
103
+ if (existsSync(join(cacheDir, "docs", "index.md")) || existsSync(join(cacheDir, "docs", "guide"))) return {
104
+ docsType: "docs",
105
+ docSource: repoUrl ? `${repoUrl}/tree/v${version}/docs` : "git"
106
+ };
107
+ if (existsSync(join(cacheDir, "llms.txt"))) return {
108
+ docsType: "llms.txt",
109
+ docSource: llmsUrl || "llms.txt"
110
+ };
111
+ if (existsSync(join(cacheDir, "docs", "README.md"))) return { docsType: "readme" };
112
+ return { docsType: "readme" };
113
+ }
114
+ /** Link shipped skills, write lock entries, register project. Returns result or null if no shipped skills. */
115
+ function handleShippedSkills(packageName, version, cwd, agent, global) {
116
+ const shippedSkills = getShippedSkills(packageName, cwd, version);
117
+ if (shippedSkills.length === 0) return null;
118
+ const shared = getSharedSkillsDir(cwd);
119
+ const agentConfig = targets[agent];
120
+ const baseDir = global ? join(CACHE_DIR, "skills") : shared || join(cwd, agentConfig.skillsDir);
121
+ mkdirSync(baseDir, { recursive: true });
122
+ for (const shipped of shippedSkills) {
123
+ linkShippedSkill(baseDir, shipped.skillName, shipped.skillDir);
124
+ writeLock(baseDir, shipped.skillName, {
125
+ packageName,
126
+ version,
127
+ source: "shipped",
128
+ syncedAt: (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
129
+ generator: "skilld"
130
+ });
131
+ }
132
+ if (!global) registerProject(cwd);
133
+ return {
134
+ shipped: shippedSkills,
135
+ baseDir
136
+ };
137
+ }
138
+ /** Resolve the base skills directory for an agent */
139
+ function resolveBaseDir(cwd, agent, global) {
140
+ if (global) return join(CACHE_DIR, "skills");
141
+ const shared = getSharedSkillsDir(cwd);
142
+ if (shared) return shared;
143
+ const agentConfig = targets[agent];
144
+ return join(cwd, agentConfig.skillsDir);
145
+ }
146
+ /** Try resolving a `link:` dependency to local package docs. Returns null if not a link dep or resolution fails. */
147
+ async function resolveLocalDep(packageName, cwd) {
148
+ const pkgPath = join(cwd, "package.json");
149
+ if (!existsSync(pkgPath)) return null;
150
+ const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
151
+ const depVersion = {
152
+ ...pkg.dependencies,
153
+ ...pkg.devDependencies
154
+ }[packageName];
155
+ if (!depVersion?.startsWith("link:")) return null;
156
+ return resolveLocalPackageDocs(resolve(cwd, depVersion.slice(5)));
157
+ }
158
+ /** Detect CHANGELOG.md in a package directory or cached releases */
159
+ function detectChangelog(pkgDir, cacheDir) {
160
+ if (pkgDir) {
161
+ const found = ["CHANGELOG.md", "changelog.md"].find((f) => existsSync(join(pkgDir, f)));
162
+ if (found) return `pkg/${found}`;
163
+ }
164
+ if (cacheDir && existsSync(join(cacheDir, "releases", "CHANGELOG.md"))) return "releases/CHANGELOG.md";
165
+ return false;
166
+ }
167
+ /** Fetch and cache all resources for a package (docs cascade + issues + discussions + releases) */
168
+ async function fetchAndCacheResources(opts) {
169
+ const { packageName, resolved, version, onProgress } = opts;
170
+ const features = opts.features ?? readConfig().features ?? defaultFeatures;
171
+ const cacheInvalidated = opts.useCache && resolved.crawlUrl && detectDocsType(packageName, version, resolved.repoUrl, resolved.llmsUrl).docsType === "readme";
172
+ const useCache = opts.useCache && !cacheInvalidated;
173
+ let docSource = resolved.readmeUrl || "readme";
174
+ let docsType = "readme";
175
+ const docsToIndex = [];
176
+ const warnings = [];
177
+ if (cacheInvalidated) warnings.push(`Retrying crawl for ${resolved.crawlUrl} (previous attempt only cached README)`);
178
+ if (!useCache) {
179
+ const cachedDocs = [];
180
+ const isFrameworkDoc = (path) => filterFrameworkDocs([path], packageName).length > 0;
181
+ if (resolved.gitDocsUrl && resolved.repoUrl) {
182
+ const gh = parseGitHubUrl(resolved.repoUrl);
183
+ if (gh) {
184
+ onProgress("Fetching git docs");
185
+ const gitDocs = await fetchGitDocs(gh.owner, gh.repo, version, packageName);
186
+ if (gitDocs?.fallback) warnings.push(`Docs fetched from ${gitDocs.ref} branch (no tag found for v${version})`);
187
+ if (gitDocs && gitDocs.files.length > 0) {
188
+ const BATCH_SIZE = 20;
189
+ const results = [];
190
+ for (let i = 0; i < gitDocs.files.length; i += BATCH_SIZE) {
191
+ const batch = gitDocs.files.slice(i, i + BATCH_SIZE);
192
+ onProgress(`Downloading docs ${Math.min(i + BATCH_SIZE, gitDocs.files.length)}/${gitDocs.files.length} from ${gitDocs.ref}`);
193
+ const batchResults = await Promise.all(batch.map(async (file) => {
194
+ const content = await $fetch(`${gitDocs.baseUrl}/${file}`, { responseType: "text" }).catch(() => null);
195
+ if (!content) return null;
196
+ return {
197
+ file,
198
+ content
199
+ };
200
+ }));
201
+ results.push(...batchResults);
202
+ }
203
+ for (const r of results) if (r) {
204
+ const stripped = gitDocs.docsPrefix ? r.file.replace(gitDocs.docsPrefix, "") : r.file;
205
+ const cachePath = stripped.startsWith("docs/") ? stripped : `docs/${stripped}`;
206
+ cachedDocs.push({
207
+ path: cachePath,
208
+ content: r.content
209
+ });
210
+ docsToIndex.push({
211
+ id: cachePath,
212
+ content: r.content,
213
+ metadata: {
214
+ package: packageName,
215
+ source: cachePath,
216
+ type: "doc"
217
+ }
218
+ });
219
+ }
220
+ const downloaded = results.filter(Boolean).length;
221
+ if (downloaded > 0) if (isShallowGitDocs(downloaded) && resolved.llmsUrl) {
222
+ onProgress(`Shallow git-docs (${downloaded} files), trying llms.txt`);
223
+ cachedDocs.length = 0;
224
+ docsToIndex.length = 0;
225
+ } else {
226
+ docSource = `${resolved.repoUrl}/tree/${gitDocs.ref}/docs`;
227
+ docsType = "docs";
228
+ writeToCache(packageName, version, cachedDocs);
229
+ if (resolved.llmsUrl) {
230
+ onProgress("Caching supplementary llms.txt");
231
+ const llmsContent = await fetchLlmsTxt(resolved.llmsUrl);
232
+ if (llmsContent) {
233
+ const baseUrl = resolved.docsUrl || new URL(resolved.llmsUrl).origin;
234
+ const supplementary = [{
235
+ path: "llms.txt",
236
+ content: normalizeLlmsLinks(llmsContent.raw, baseUrl)
237
+ }];
238
+ if (llmsContent.links.length > 0) {
239
+ onProgress(`Downloading ${llmsContent.links.length} supplementary docs`);
240
+ const docs = await downloadLlmsDocs(llmsContent, baseUrl, (url, done, total) => {
241
+ onProgress(`Downloading supplementary doc ${done + 1}/${total}`);
242
+ });
243
+ for (const doc of docs) {
244
+ if (!isFrameworkDoc(doc.url)) continue;
245
+ const localPath = doc.url.startsWith("/") ? doc.url.slice(1) : doc.url;
246
+ supplementary.push({
247
+ path: join("llms-docs", ...localPath.split("/")),
248
+ content: doc.content
249
+ });
250
+ }
251
+ }
252
+ writeToCache(packageName, version, supplementary);
253
+ }
254
+ }
255
+ }
256
+ }
257
+ }
258
+ }
259
+ if (resolved.crawlUrl && cachedDocs.length === 0) {
260
+ onProgress("Crawling website");
261
+ const crawledDocs = await fetchCrawledDocs(resolved.crawlUrl, onProgress).catch((err) => {
262
+ warnings.push(`Crawl failed for ${resolved.crawlUrl}: ${err?.message || err}`);
263
+ return [];
264
+ });
265
+ if (crawledDocs.length === 0 && resolved.crawlUrl) warnings.push(`Crawl returned 0 docs from ${resolved.crawlUrl}`);
266
+ if (crawledDocs.length > 0) {
267
+ for (const doc of crawledDocs) {
268
+ if (!isFrameworkDoc(doc.path)) continue;
269
+ cachedDocs.push(doc);
270
+ docsToIndex.push({
271
+ id: doc.path,
272
+ content: doc.content,
273
+ metadata: {
274
+ package: packageName,
275
+ source: doc.path,
276
+ type: "doc"
277
+ }
278
+ });
279
+ }
280
+ docSource = resolved.crawlUrl;
281
+ docsType = "docs";
282
+ writeToCache(packageName, version, cachedDocs);
283
+ }
284
+ }
285
+ if (resolved.llmsUrl && cachedDocs.length === 0) {
286
+ onProgress("Fetching llms.txt");
287
+ const llmsContent = await fetchLlmsTxt(resolved.llmsUrl);
288
+ if (llmsContent) {
289
+ docSource = resolved.llmsUrl;
290
+ docsType = "llms.txt";
291
+ const baseUrl = resolved.docsUrl || new URL(resolved.llmsUrl).origin;
292
+ cachedDocs.push({
293
+ path: "llms.txt",
294
+ content: normalizeLlmsLinks(llmsContent.raw, baseUrl)
295
+ });
296
+ if (llmsContent.links.length > 0) {
297
+ onProgress(`Downloading ${llmsContent.links.length} linked docs`);
298
+ const docs = await downloadLlmsDocs(llmsContent, baseUrl, (url, done, total) => {
299
+ onProgress(`Downloading linked doc ${done + 1}/${total}`);
300
+ });
301
+ for (const doc of docs) {
302
+ if (!isFrameworkDoc(doc.url)) continue;
303
+ const cachePath = join("docs", ...(doc.url.startsWith("/") ? doc.url.slice(1) : doc.url).split("/"));
304
+ cachedDocs.push({
305
+ path: cachePath,
306
+ content: doc.content
307
+ });
308
+ docsToIndex.push({
309
+ id: doc.url,
310
+ content: doc.content,
311
+ metadata: {
312
+ package: packageName,
313
+ source: cachePath,
314
+ type: "doc"
315
+ }
316
+ });
317
+ }
318
+ }
319
+ writeToCache(packageName, version, cachedDocs);
320
+ }
321
+ }
322
+ if (resolved.docsUrl && !cachedDocs.some((d) => d.path.startsWith("docs/"))) {
323
+ const crawlPattern = resolved.crawlUrl || toCrawlPattern(resolved.docsUrl);
324
+ onProgress("Crawling docs site");
325
+ const crawledDocs = await fetchCrawledDocs(crawlPattern, onProgress).catch((err) => {
326
+ warnings.push(`Crawl failed for ${crawlPattern}: ${err?.message || err}`);
327
+ return [];
328
+ });
329
+ if (crawledDocs.length > 0) {
330
+ for (const doc of crawledDocs) {
331
+ if (!isFrameworkDoc(doc.path)) continue;
332
+ cachedDocs.push(doc);
333
+ docsToIndex.push({
334
+ id: doc.path,
335
+ content: doc.content,
336
+ metadata: {
337
+ package: packageName,
338
+ source: doc.path,
339
+ type: "doc"
340
+ }
341
+ });
342
+ }
343
+ docSource = crawlPattern;
344
+ docsType = "docs";
345
+ writeToCache(packageName, version, cachedDocs);
346
+ }
347
+ }
348
+ if (resolved.readmeUrl && cachedDocs.length === 0) {
349
+ onProgress("Fetching README");
350
+ const content = await fetchReadmeContent(resolved.readmeUrl);
351
+ if (content) {
352
+ cachedDocs.push({
353
+ path: "docs/README.md",
354
+ content
355
+ });
356
+ docsToIndex.push({
357
+ id: "README.md",
358
+ content,
359
+ metadata: {
360
+ package: packageName,
361
+ source: "docs/README.md",
362
+ type: "doc"
363
+ }
364
+ });
365
+ writeToCache(packageName, version, cachedDocs);
366
+ }
367
+ }
368
+ if (docsType !== "readme" && cachedDocs.filter((d) => d.path.startsWith("docs/") && d.path.endsWith(".md")).length > 1) {
369
+ const docsIndex = generateDocsIndex(cachedDocs);
370
+ if (docsIndex) writeToCache(packageName, version, [{
371
+ path: "docs/_INDEX.md",
372
+ content: docsIndex
373
+ }]);
374
+ }
375
+ } else {
376
+ onProgress("Loading cached docs");
377
+ const detected = detectDocsType(packageName, version, resolved.repoUrl, resolved.llmsUrl);
378
+ docsType = detected.docsType;
379
+ if (detected.docSource) docSource = detected.docSource;
380
+ if (!existsSync(getPackageDbPath(packageName, version))) {
381
+ onProgress("Reading cached docs for indexing");
382
+ const cached = readCachedDocs(packageName, version);
383
+ for (const doc of cached) docsToIndex.push({
384
+ id: doc.path,
385
+ content: doc.content,
386
+ metadata: {
387
+ package: packageName,
388
+ source: doc.path,
389
+ ...classifyCachedDoc(doc.path)
390
+ }
391
+ });
392
+ }
393
+ if (docsType !== "readme" && !existsSync(join(getCacheDir(packageName, version), "docs", "_INDEX.md"))) {
394
+ onProgress("Generating docs index");
395
+ const cached = readCachedDocs(packageName, version);
396
+ if (cached.filter((d) => d.path.startsWith("docs/") && d.path.endsWith(".md")).length > 1) {
397
+ const docsIndex = generateDocsIndex(cached);
398
+ if (docsIndex) writeToCache(packageName, version, [{
399
+ path: "docs/_INDEX.md",
400
+ content: docsIndex
401
+ }]);
402
+ }
403
+ }
404
+ }
405
+ const gh = resolved.repoUrl ? parseGitHubUrl(resolved.repoUrl) : null;
406
+ const repoInfo = gh ? {
407
+ owner: gh.owner,
408
+ repo: gh.repo
409
+ } : void 0;
410
+ const repoCacheDir = repoInfo ? getRepoCacheDir(repoInfo.owner, repoInfo.repo) : null;
411
+ const cacheDir = getCacheDir(packageName, version);
412
+ const issuesDir = repoCacheDir ? join(repoCacheDir, "issues") : join(cacheDir, "issues");
413
+ const discussionsDir = repoCacheDir ? join(repoCacheDir, "discussions") : join(cacheDir, "discussions");
414
+ const releasesPath = repoCacheDir ? join(repoCacheDir, "releases") : join(cacheDir, "releases");
415
+ if (features.issues && gh && isGhAvailable() && !existsSync(issuesDir)) {
416
+ onProgress("Fetching issues via GitHub API");
417
+ const issues = await fetchGitHubIssues(gh.owner, gh.repo, 30, resolved.releasedAt, opts.from).catch(() => []);
418
+ if (issues.length > 0) {
419
+ onProgress(`Caching ${issues.length} issues`);
420
+ const issueDocs = [...issues.map((issue) => ({
421
+ path: `issues/issue-${issue.number}.md`,
422
+ content: formatIssueAsMarkdown(issue)
423
+ })), {
424
+ path: "issues/_INDEX.md",
425
+ content: generateIssueIndex(issues)
426
+ }];
427
+ if (repoInfo) writeToRepoCache(repoInfo.owner, repoInfo.repo, issueDocs);
428
+ else writeToCache(packageName, version, issueDocs);
429
+ for (const issue of issues) docsToIndex.push({
430
+ id: `issue-${issue.number}`,
431
+ content: sanitizeMarkdown(`#${issue.number}: ${issue.title}\n\n${issue.body || ""}`),
432
+ metadata: {
433
+ package: packageName,
434
+ source: `issues/issue-${issue.number}.md`,
435
+ type: "issue",
436
+ number: issue.number
437
+ }
438
+ });
439
+ }
440
+ }
441
+ if (features.discussions && gh && isGhAvailable() && !existsSync(discussionsDir)) {
442
+ onProgress("Fetching discussions via GitHub API");
443
+ const discussions = await fetchGitHubDiscussions(gh.owner, gh.repo, 20, resolved.releasedAt, opts.from).catch(() => []);
444
+ if (discussions.length > 0) {
445
+ onProgress(`Caching ${discussions.length} discussions`);
446
+ const discussionDocs = [...discussions.map((d) => ({
447
+ path: `discussions/discussion-${d.number}.md`,
448
+ content: formatDiscussionAsMarkdown(d)
449
+ })), {
450
+ path: "discussions/_INDEX.md",
451
+ content: generateDiscussionIndex(discussions)
452
+ }];
453
+ if (repoInfo) writeToRepoCache(repoInfo.owner, repoInfo.repo, discussionDocs);
454
+ else writeToCache(packageName, version, discussionDocs);
455
+ for (const d of discussions) docsToIndex.push({
456
+ id: `discussion-${d.number}`,
457
+ content: sanitizeMarkdown(`#${d.number}: ${d.title}\n\n${d.body || ""}`),
458
+ metadata: {
459
+ package: packageName,
460
+ source: `discussions/discussion-${d.number}.md`,
461
+ type: "discussion",
462
+ number: d.number
463
+ }
464
+ });
465
+ }
466
+ }
467
+ if (features.releases && gh && isGhAvailable() && !existsSync(releasesPath)) {
468
+ onProgress("Fetching releases via GitHub API");
469
+ const changelogRef = isPrerelease(version) ? getPrereleaseChangelogRef(packageName) : void 0;
470
+ const releaseDocs = await fetchReleaseNotes(gh.owner, gh.repo, version, resolved.gitRef, packageName, opts.from, changelogRef).catch(() => []);
471
+ let blogDocs = [];
472
+ if (getBlogPreset(packageName)) {
473
+ onProgress("Fetching blog release notes");
474
+ blogDocs = await fetchBlogReleases(packageName, version).catch(() => []);
475
+ }
476
+ const allDocs = [...releaseDocs, ...blogDocs];
477
+ const blogEntries = blogDocs.filter((d) => !d.path.endsWith("_INDEX.md")).map((d) => {
478
+ const versionMatch = d.path.match(/blog-(.+)\.md$/);
479
+ const fm = parseFrontmatter(d.content);
480
+ return {
481
+ version: versionMatch?.[1] ?? "",
482
+ title: fm.title ?? `Release ${versionMatch?.[1]}`,
483
+ date: fm.date ?? ""
484
+ };
485
+ }).filter((b) => b.version);
486
+ const ghReleases = releaseDocs.filter((d) => d.path.startsWith("releases/") && !d.path.endsWith("CHANGELOG.md")).map((d) => {
487
+ const fm = parseFrontmatter(d.content);
488
+ const tag = fm.tag ?? "";
489
+ const name = fm.name ?? tag;
490
+ const published = fm.published ?? "";
491
+ return {
492
+ id: 0,
493
+ tag,
494
+ name,
495
+ prerelease: false,
496
+ createdAt: published,
497
+ publishedAt: published,
498
+ markdown: ""
499
+ };
500
+ }).filter((r) => r.tag);
501
+ const hasChangelog = allDocs.some((d) => d.path === "releases/CHANGELOG.md");
502
+ if (ghReleases.length > 0 || blogEntries.length > 0) allDocs.push({
503
+ path: "releases/_INDEX.md",
504
+ content: generateReleaseIndex({
505
+ releases: ghReleases,
506
+ packageName,
507
+ blogReleases: blogEntries,
508
+ hasChangelog
509
+ })
510
+ });
511
+ if (allDocs.length > 0) {
512
+ onProgress(`Caching ${allDocs.length} releases`);
513
+ if (repoInfo) writeToRepoCache(repoInfo.owner, repoInfo.repo, allDocs);
514
+ else writeToCache(packageName, version, allDocs);
515
+ for (const doc of allDocs) docsToIndex.push({
516
+ id: doc.path,
517
+ content: doc.content,
518
+ metadata: {
519
+ package: packageName,
520
+ source: doc.path,
521
+ type: "release"
522
+ }
523
+ });
524
+ }
525
+ }
526
+ return {
527
+ docSource,
528
+ docsType,
529
+ docsToIndex,
530
+ hasIssues: features.issues && existsSync(issuesDir),
531
+ hasDiscussions: features.discussions && existsSync(discussionsDir),
532
+ hasReleases: features.releases && existsSync(releasesPath),
533
+ warnings,
534
+ repoInfo,
535
+ usedCache: useCache
536
+ };
537
+ }
538
+ /** Index all resources into the search database (single batch) */
539
+ async function indexResources(opts) {
540
+ const { packageName, version, cwd, onProgress } = opts;
541
+ const features = opts.features ?? readConfig().features ?? defaultFeatures;
542
+ if (!features.search) return;
543
+ const dbPath = getPackageDbPath(packageName, version);
544
+ if (existsSync(dbPath)) return;
545
+ const allDocs = [...opts.docsToIndex];
546
+ const pkgDir = resolvePkgDir(packageName, cwd, version);
547
+ if (features.search && pkgDir) {
548
+ onProgress("Scanning exports");
549
+ const entryFiles = await resolveEntryFiles(pkgDir);
550
+ for (const e of entryFiles) allDocs.push({
551
+ id: e.path,
552
+ content: e.content,
553
+ metadata: {
554
+ package: packageName,
555
+ source: `pkg/${e.path}`,
556
+ type: e.type
557
+ }
558
+ });
559
+ }
560
+ if (allDocs.length === 0) return;
561
+ onProgress(`Building search index (${allDocs.length} docs)`);
562
+ await createIndex(allDocs, {
563
+ dbPath,
564
+ onProgress: ({ phase, current, total }) => {
565
+ if (phase === "storing") {
566
+ const d = allDocs[current - 1];
567
+ onProgress(`Storing ${d?.metadata?.type === "source" || d?.metadata?.type === "types" ? "code" : d?.metadata?.type || "doc"} (${current}/${total})`);
568
+ } else if (phase === "embedding") onProgress(`Creating embeddings (${current}/${total})`);
569
+ }
570
+ });
571
+ }
572
+ /**
573
+ * Eject references: copy cached files as real files into references/ dir.
574
+ * Used for portable skills (git repos, sharing). Replaces symlinks with copies.
575
+ * Does NOT copy pkg files — those reference node_modules directly.
576
+ */
577
+ function ejectReferences(skillDir, packageName, cwd, version, docsType, features, repoInfo) {
578
+ const f = features ?? readConfig().features ?? defaultFeatures;
579
+ const cacheDir = getCacheDir(packageName, version);
580
+ const refsDir = join(skillDir, "references");
581
+ const repoDir = repoInfo ? getRepoCacheDir(repoInfo.owner, repoInfo.repo) : cacheDir;
582
+ if (!hasShippedDocs(packageName, cwd, version) && docsType !== "readme") copyCachedSubdir(cacheDir, refsDir, "docs");
583
+ if (f.issues) copyCachedSubdir(repoDir, refsDir, "issues");
584
+ if (f.discussions) copyCachedSubdir(repoDir, refsDir, "discussions");
585
+ if (f.releases) copyCachedSubdir(repoDir, refsDir, "releases");
586
+ }
587
+ /** Recursively copy a cached subdirectory into the references dir */
588
+ function copyCachedSubdir(cacheDir, refsDir, subdir) {
589
+ const srcDir = join(cacheDir, subdir);
590
+ if (!existsSync(srcDir)) return;
591
+ const destDir = join(refsDir, subdir);
592
+ mkdirSync(destDir, { recursive: true });
593
+ function walk(dir, rel) {
594
+ for (const entry of readdirSync(dir, { withFileTypes: true })) {
595
+ const srcPath = join(dir, entry.name);
596
+ const destPath = join(destDir, rel ? `${rel}/${entry.name}` : entry.name);
597
+ if (entry.isDirectory()) {
598
+ mkdirSync(destPath, { recursive: true });
599
+ walk(srcPath, rel ? `${rel}/${entry.name}` : entry.name);
600
+ } else copyFileSync(srcPath, destPath);
601
+ }
602
+ }
603
+ walk(srcDir, "");
604
+ }
605
+ /**
606
+ * Check if .gitignore has `.skilld` entry.
607
+ * If missing, prompt to add it. Skipped for global installs.
608
+ */
609
+ async function ensureGitignore(skillsDir, cwd, isGlobal) {
610
+ if (isGlobal) return;
611
+ const gitignorePath = join(cwd, ".gitignore");
612
+ const pattern = ".skilld";
613
+ if (existsSync(gitignorePath)) {
614
+ if (readFileSync(gitignorePath, "utf-8").split("\n").some((line) => line.trim() === pattern)) return;
615
+ }
616
+ if (!isInteractive()) {
617
+ const entry = `\n# Skilld references (recreated by \`skilld install\`)\n${pattern}\n`;
618
+ if (existsSync(gitignorePath)) appendFileSync(gitignorePath, `${readFileSync(gitignorePath, "utf-8").endsWith("\n") ? "" : "\n"}${entry}`);
619
+ else writeFileSync(gitignorePath, entry);
620
+ return;
621
+ }
622
+ const relSkillsDir = relative(cwd, skillsDir) || ".";
623
+ p.log.info(`\x1B[1mGit guidance:\x1B[0m\n \x1B[32m✓\x1B[0m Commit: \x1B[36m${relSkillsDir}/*/SKILL.md\x1B[0m\n \x1B[32m✓\x1B[0m Commit: \x1B[36m${relSkillsDir}/skilld-lock.yaml\x1B[0m\n \x1B[31m✗\x1B[0m Ignore: \x1B[36m${pattern}\x1B[0m \x1B[90m(recreated by \`skilld install\`)\x1B[0m`);
624
+ const add = await p.confirm({
625
+ message: `Add \`${pattern}\` to .gitignore?`,
626
+ initialValue: true
627
+ });
628
+ if (p.isCancel(add) || !add) return;
629
+ const entry = `\n# Skilld references (recreated by \`skilld install\`)\n${pattern}\n`;
630
+ if (existsSync(gitignorePath)) appendFileSync(gitignorePath, `${readFileSync(gitignorePath, "utf-8").endsWith("\n") ? "" : "\n"}${entry}`);
631
+ else writeFileSync(gitignorePath, entry);
632
+ p.log.success("Updated .gitignore");
633
+ }
634
+ const SKILLD_MARKER_START = "<!-- skilld -->";
635
+ const SKILLD_MARKER_END = "<!-- /skilld -->";
636
+ const SKILLD_INSTRUCTIONS = `${SKILLD_MARKER_START}
637
+ Before modifying code, evaluate each installed skill against the current task.
638
+ For each skill, determine YES/NO relevance and invoke all YES skills before proceeding.
639
+ ${SKILLD_MARKER_END}`;
640
+ /**
641
+ * Check if agent instruction file has skilld skill-activation snippet.
642
+ * If missing, prompt to add it. Skipped for global installs or agents without an instructionFile.
643
+ */
644
+ async function ensureAgentInstructions(agent, cwd, isGlobal) {
645
+ if (isGlobal) return;
646
+ const agentConfig = targets[agent];
647
+ if (!agentConfig.instructionFile) return;
648
+ const filePath = join(cwd, agentConfig.instructionFile);
649
+ if (existsSync(filePath)) {
650
+ if (readFileSync(filePath, "utf-8").includes(SKILLD_MARKER_START)) return;
651
+ }
652
+ if (!isInteractive()) {
653
+ if (existsSync(filePath)) appendFileSync(filePath, `${readFileSync(filePath, "utf-8").endsWith("\n") ? "" : "\n"}\n${SKILLD_INSTRUCTIONS}\n`);
654
+ else writeFileSync(filePath, `${SKILLD_INSTRUCTIONS}\n`);
655
+ return;
656
+ }
657
+ p.note(SKILLD_INSTRUCTIONS, `Will be added to ${agentConfig.instructionFile}`);
658
+ const add = await p.confirm({
659
+ message: `Add skill activation instructions to ${agentConfig.instructionFile}?`,
660
+ initialValue: true
661
+ });
662
+ if (p.isCancel(add) || !add) return;
663
+ if (existsSync(filePath)) appendFileSync(filePath, `${readFileSync(filePath, "utf-8").endsWith("\n") ? "" : "\n"}\n${SKILLD_INSTRUCTIONS}\n`);
664
+ else writeFileSync(filePath, `${SKILLD_INSTRUCTIONS}\n`);
665
+ p.log.success(`Updated ${agentConfig.instructionFile}`);
666
+ }
667
+ /** Select LLM model for SKILL.md generation (independent of target agent) */
668
+ async function selectModel(skipPrompt) {
669
+ const config = readConfig();
670
+ const available = await getAvailableModels();
671
+ if (available.length === 0) {
672
+ p.log.warn("No LLM CLIs found (claude, gemini, codex)");
673
+ return null;
674
+ }
675
+ if (skipPrompt) {
676
+ if (config.model && available.some((m) => m.id === config.model)) return config.model;
677
+ return available.find((m) => m.recommended)?.id ?? available[0].id;
678
+ }
679
+ const modelChoice = await p.select({
680
+ message: "Model for SKILL.md generation",
681
+ options: available.map((m) => ({
682
+ label: m.recommended ? `${m.name} (Recommended)` : m.name,
683
+ value: m.id,
684
+ hint: `${m.agentName} · ${m.hint}`
685
+ })),
686
+ initialValue: available.find((m) => m.recommended)?.id ?? available[0].id
687
+ });
688
+ if (p.isCancel(modelChoice)) {
689
+ p.cancel("Cancelled");
690
+ return null;
691
+ }
692
+ updateConfig({ model: modelChoice });
693
+ return modelChoice;
694
+ }
695
+ /** Default sections when model is pre-set (non-interactive) */
696
+ const DEFAULT_SECTIONS = ["best-practices", "api-changes"];
697
+ async function selectSkillSections(message = "Generate SKILL.md with LLM") {
698
+ p.log.info("Budgets adapt to package release density.");
699
+ const selected = await p.multiselect({
700
+ message,
701
+ options: [
702
+ {
703
+ label: "API changes",
704
+ value: "api-changes",
705
+ hint: "new/deprecated APIs from version history"
706
+ },
707
+ {
708
+ label: "Best practices",
709
+ value: "best-practices",
710
+ hint: "gotchas, pitfalls, patterns"
711
+ },
712
+ {
713
+ label: "Custom section",
714
+ value: "custom",
715
+ hint: "add your own section"
716
+ }
717
+ ],
718
+ initialValues: DEFAULT_SECTIONS,
719
+ required: false
720
+ });
721
+ if (p.isCancel(selected)) return {
722
+ sections: [],
723
+ cancelled: true
724
+ };
725
+ const sections = selected;
726
+ if (sections.length === 0) return {
727
+ sections: [],
728
+ cancelled: false
729
+ };
730
+ if (sections.length > 1) {
731
+ const n = sections.length;
732
+ const budgetLines = [];
733
+ for (const s of sections) switch (s) {
734
+ case "api-changes":
735
+ budgetLines.push(` API changes ${maxItems(6, 12, n)}–${maxItems(6, Math.round(12 * 1.6), n)} items (adapts to release churn)`);
736
+ break;
737
+ case "best-practices":
738
+ budgetLines.push(` Best practices ${maxItems(4, 10, n)}–${maxItems(4, Math.round(10 * 1.3), n)} items`);
739
+ break;
740
+ case "custom":
741
+ budgetLines.push(` Custom ≤${maxLines(50, 80, n)} lines`);
742
+ break;
743
+ }
744
+ p.log.info(`Budget (${n} sections):\n${budgetLines.join("\n")}`);
745
+ }
746
+ let customPrompt;
747
+ if (sections.includes("custom")) {
748
+ const heading = await p.text({
749
+ message: "Section heading",
750
+ placeholder: "e.g. \"Migration from v2\" or \"SSR Patterns\""
751
+ });
752
+ if (p.isCancel(heading)) return {
753
+ sections: [],
754
+ cancelled: true
755
+ };
756
+ const body = await p.text({
757
+ message: "Instructions for this section",
758
+ placeholder: "e.g. \"Document breaking changes and migration steps from v2 to v3\""
759
+ });
760
+ if (p.isCancel(body)) return {
761
+ sections: [],
762
+ cancelled: true
763
+ };
764
+ customPrompt = {
765
+ heading,
766
+ body
767
+ };
768
+ }
769
+ return {
770
+ sections,
771
+ customPrompt,
772
+ cancelled: false
773
+ };
774
+ }
775
+ /**
776
+ * Resolve sections + model for LLM enhancement.
777
+ * If presetModel is provided, uses DEFAULT_SECTIONS without prompting.
778
+ * Returns null if cancelled or no sections/model selected.
779
+ */
780
+ async function selectLlmConfig(presetModel, message) {
781
+ if (presetModel) return {
782
+ model: presetModel,
783
+ sections: DEFAULT_SECTIONS
784
+ };
785
+ if (!isInteractive()) return null;
786
+ const defaultModel = await selectModel(true);
787
+ if (!defaultModel) return null;
788
+ const defaultModelName = getModelName(defaultModel);
789
+ const choice = await p.select({
790
+ message: "Generate enhanced SKILL.md?",
791
+ options: [
792
+ {
793
+ label: defaultModelName,
794
+ value: "default",
795
+ hint: "configured default"
796
+ },
797
+ {
798
+ label: "Different model",
799
+ value: "pick",
800
+ hint: "choose another model"
801
+ },
802
+ {
803
+ label: "Skip",
804
+ value: "skip",
805
+ hint: "base skill only"
806
+ }
807
+ ]
808
+ });
809
+ if (p.isCancel(choice)) return null;
810
+ if (choice === "skip") return null;
811
+ const model = choice === "pick" ? await selectModel(false) : defaultModel;
812
+ if (!model) return null;
813
+ const modelName = getModelName(model);
814
+ const { sections, customPrompt, cancelled } = await selectSkillSections(message ? `${message} (${modelName})` : `Generate SKILL.md with ${modelName}`);
815
+ if (cancelled || sections.length === 0) return null;
816
+ return {
817
+ model,
818
+ sections,
819
+ customPrompt
820
+ };
821
+ }
822
+ async function enhanceSkillWithLLM(opts) {
823
+ const { packageName, version, skillDir, dirName, model, resolved, relatedSkills, hasIssues, hasDiscussions, hasReleases, hasChangelog, docsType, hasShippedDocs: shippedDocs, pkgFiles, force, debug, sections, customPrompt, packages, features, eject } = opts;
824
+ const effectiveFeatures = features;
825
+ const llmLog = p.taskLog({ title: `Agent exploring ${packageName}` });
826
+ const docFiles = listReferenceFiles(skillDir);
827
+ const { optimized, wasOptimized, usage, cost, warnings, error, debugLogsDir } = await optimizeDocs({
828
+ packageName,
829
+ skillDir,
830
+ model,
831
+ version,
832
+ hasGithub: hasIssues || hasDiscussions,
833
+ hasReleases,
834
+ hasChangelog,
835
+ docFiles,
836
+ docsType,
837
+ hasShippedDocs: shippedDocs,
838
+ noCache: force,
839
+ debug,
840
+ sections,
841
+ customPrompt,
842
+ features: effectiveFeatures,
843
+ pkgFiles,
844
+ onProgress: createToolProgress(llmLog)
845
+ });
846
+ if (wasOptimized) {
847
+ const costParts = [];
848
+ if (usage) {
849
+ const totalK = Math.round(usage.totalTokens / 1e3);
850
+ costParts.push(`${totalK}k tokens`);
851
+ }
852
+ if (cost) costParts.push(`$${cost.toFixed(2)}`);
853
+ const costSuffix = costParts.length > 0 ? ` (${costParts.join(", ")})` : "";
854
+ llmLog.success(`Generated best practices${costSuffix}`);
855
+ if (debugLogsDir) p.log.info(`Debug logs: ${relative(process.cwd(), debugLogsDir)}`);
856
+ if (error) p.log.warn(`\x1B[33mPartial failure: ${error}\x1B[0m`);
857
+ if (warnings?.length) for (const w of warnings) p.log.warn(`\x1B[33m${w}\x1B[0m`);
858
+ const skillMd = generateSkillMd({
859
+ name: packageName,
860
+ version,
861
+ releasedAt: resolved.releasedAt,
862
+ dependencies: resolved.dependencies,
863
+ distTags: resolved.distTags,
864
+ body: optimized,
865
+ relatedSkills,
866
+ hasIssues,
867
+ hasDiscussions,
868
+ hasReleases,
869
+ hasChangelog,
870
+ docsType,
871
+ hasShippedDocs: shippedDocs,
872
+ pkgFiles,
873
+ generatedBy: getModelLabel(model),
874
+ dirName,
875
+ packages,
876
+ repoUrl: resolved.repoUrl,
877
+ features,
878
+ eject
879
+ });
880
+ writeFileSync(join(skillDir, "SKILL.md"), skillMd);
881
+ } else llmLog.error(`LLM optimization failed${error ? `: ${error}` : ""}`);
882
+ }
883
+ /**
884
+ * Anonymous telemetry — fire-and-forget GET to add-skill.vercel.sh/t
885
+ *
886
+ * Opt-out: set DISABLE_TELEMETRY=1 or DO_NOT_TRACK=1
887
+ * Auto-disabled in CI environments.
888
+ */
889
+ const TELEMETRY_URL = "https://add-skill.vercel.sh/t";
890
+ const SKILLS_VERSION = "1.3.9";
891
+ function isEnabled() {
892
+ return !process.env.DISABLE_TELEMETRY && !process.env.DO_NOT_TRACK;
893
+ }
894
+ function track(data) {
895
+ if (!isEnabled()) return;
896
+ try {
897
+ const params = new URLSearchParams();
898
+ params.set("v", SKILLS_VERSION);
899
+ if (isCI) params.set("ci", "1");
900
+ for (const [key, value] of Object.entries(data)) if (value !== void 0 && value !== null) params.set(key, String(value));
901
+ fetch(`${TELEMETRY_URL}?${params.toString()}`).catch(() => {});
902
+ } catch {}
903
+ }
904
+ async function syncGitSkills(opts) {
905
+ const { source, agent, global: isGlobal, yes } = opts;
906
+ const cwd = process.cwd();
907
+ const agentConfig = targets[agent];
908
+ const baseDir = isGlobal ? join(CACHE_DIR, "skills") : join(cwd, agentConfig.skillsDir);
909
+ const label = source.type === "local" ? source.localPath : `${source.owner}/${source.repo}`;
910
+ const spin = timedSpinner();
911
+ spin.start(`Fetching skills from ${label}`);
912
+ const { skills } = await fetchGitSkills(source, (msg) => spin.message(msg));
913
+ if (skills.length === 0) {
914
+ if (source.type === "github" && source.owner && source.repo) {
915
+ spin.stop(`No pre-authored skills in ${label}, generating from repo docs...`);
916
+ return syncGitHubRepo(opts);
917
+ }
918
+ spin.stop(`No skills found in ${label}`);
919
+ return;
920
+ }
921
+ spin.stop(`Found ${skills.length} skill(s) in ${label}`);
922
+ let selected = skills;
923
+ if (opts.skillFilter?.length) {
924
+ const filterSet = new Set(opts.skillFilter.map((s) => s.toLowerCase().replace(/-skilld$/, "")));
925
+ selected = skills.filter((s) => filterSet.has(s.name.toLowerCase().replace(/-skilld$/, "")));
926
+ if (selected.length === 0) {
927
+ p.log.warn(`No skills matched: ${opts.skillFilter.join(", ")}`);
928
+ p.log.message(`Available: ${skills.map((s) => s.name).join(", ")}`);
929
+ return;
930
+ }
931
+ } else if (source.skillPath) selected = skills;
932
+ else if (skills.length > 1 && !yes) {
933
+ const choices = await p.autocompleteMultiselect({
934
+ message: `Select skills to install from ${label}`,
935
+ options: skills.map((s) => ({
936
+ label: s.name.replace(/-skilld$/, ""),
937
+ value: s.name,
938
+ hint: s.description || s.path
939
+ })),
940
+ initialValues: []
941
+ });
942
+ if (p.isCancel(choices)) return;
943
+ const selectedNames = new Set(choices);
944
+ selected = skills.filter((s) => selectedNames.has(s.name));
945
+ if (selected.length === 0) return;
946
+ }
947
+ mkdirSync(baseDir, { recursive: true });
948
+ for (const skill of selected) {
949
+ const skillDir = join(baseDir, skill.name);
950
+ mkdirSync(skillDir, { recursive: true });
951
+ writeFileSync(join(skillDir, "SKILL.md"), sanitizeMarkdown(skill.content));
952
+ if (skill.files.length > 0) for (const f of skill.files) {
953
+ const filePath = join(skillDir, f.path);
954
+ mkdirSync(dirname(filePath), { recursive: true });
955
+ writeFileSync(filePath, f.content);
956
+ }
957
+ const sourceType = source.type === "local" ? "local" : source.type;
958
+ writeLock(baseDir, skill.name, {
959
+ source: sourceType,
960
+ repo: source.type === "local" ? source.localPath : `${source.owner}/${source.repo}`,
961
+ path: skill.path || void 0,
962
+ ref: source.ref || "main",
963
+ syncedAt: (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
964
+ generator: "external"
965
+ });
966
+ }
967
+ if (!isGlobal) registerProject(cwd);
968
+ if (source.type !== "local" && source.owner && source.repo) track({
969
+ event: "install",
970
+ source: `${source.owner}/${source.repo}`,
971
+ skills: selected.map((s) => s.name).join(","),
972
+ agents: agent,
973
+ ...isGlobal && { global: "1" },
974
+ sourceType: source.type
975
+ });
976
+ const names = selected.map((s) => `\x1B[36m${s.name}\x1B[0m`).join(", ");
977
+ p.log.success(`Installed ${names}`);
978
+ }
979
+ /**
980
+ * Generate a skill from a GitHub repo's docs (no npm package required).
981
+ * Uses the same pipeline as npm packages: resolve → fetch → cache → generate → LLM enhance.
982
+ */
983
+ async function syncGitHubRepo(opts) {
984
+ const { source, agent, global: isGlobal, yes } = opts;
985
+ const owner = source.owner;
986
+ const repo = source.repo;
987
+ const cwd = process.cwd();
988
+ const spin = timedSpinner();
989
+ spin.start(`Resolving ${owner}/${repo}`);
990
+ const resolved = await resolveGitHubRepo(owner, repo, (msg) => spin.message(msg));
991
+ if (!resolved) {
992
+ spin.stop(`Could not find docs for ${owner}/${repo}`);
993
+ return;
994
+ }
995
+ const repoUrl = `https://github.com/${owner}/${repo}`;
996
+ const packageName = `${owner}-${repo}`;
997
+ const version = resolved.version || "main";
998
+ const versionKey = getVersionKey(version);
999
+ const useCache = isCached(packageName, version);
1000
+ spin.stop(`Resolved ${owner}/${repo}@${useCache ? versionKey : version}${useCache ? " (cached)" : ""}`);
1001
+ ensureCacheDir();
1002
+ const baseDir = resolveBaseDir(cwd, agent, isGlobal);
1003
+ const skillDirName = sanitizeName(`${owner}-${repo}`);
1004
+ const skillDir = join(baseDir, skillDirName);
1005
+ mkdirSync(skillDir, { recursive: true });
1006
+ const features = readConfig().features ?? defaultFeatures;
1007
+ const resSpin = timedSpinner();
1008
+ resSpin.start("Finding resources");
1009
+ const resources = await fetchAndCacheResources({
1010
+ packageName,
1011
+ resolved,
1012
+ version,
1013
+ useCache,
1014
+ features,
1015
+ from: opts.from,
1016
+ onProgress: (msg) => resSpin.message(msg)
1017
+ });
1018
+ const resParts = [];
1019
+ if (resources.docsToIndex.length > 0) {
1020
+ const docCount = resources.docsToIndex.filter((d) => d.metadata?.type === "doc").length;
1021
+ if (docCount > 0) resParts.push(`${docCount} docs`);
1022
+ }
1023
+ if (resources.hasIssues) resParts.push("issues");
1024
+ if (resources.hasDiscussions) resParts.push("discussions");
1025
+ if (resources.hasReleases) resParts.push("releases");
1026
+ resSpin.stop(`Fetched ${resParts.length > 0 ? resParts.join(", ") : "resources"}`);
1027
+ for (const w of resources.warnings) p.log.warn(`\x1B[33m${w}\x1B[0m`);
1028
+ linkAllReferences(skillDir, packageName, cwd, version, resources.docsType, void 0, features);
1029
+ if (features.search) {
1030
+ const idxSpin = timedSpinner();
1031
+ idxSpin.start("Creating search index");
1032
+ await indexResources({
1033
+ packageName,
1034
+ version,
1035
+ cwd,
1036
+ docsToIndex: resources.docsToIndex,
1037
+ features,
1038
+ onProgress: (msg) => idxSpin.message(msg)
1039
+ });
1040
+ idxSpin.stop("Search index ready");
1041
+ }
1042
+ const hasChangelog = detectChangelog(resolvePkgDir(packageName, cwd, version), getCacheDir(packageName, version));
1043
+ const shippedDocs = hasShippedDocs(packageName, cwd, version);
1044
+ const pkgFiles = getPkgKeyFiles(packageName, cwd, version);
1045
+ writeLock(baseDir, skillDirName, {
1046
+ packageName,
1047
+ version,
1048
+ repo: `${owner}/${repo}`,
1049
+ source: resources.docSource,
1050
+ syncedAt: (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
1051
+ generator: "skilld"
1052
+ });
1053
+ const baseSkillMd = generateSkillMd({
1054
+ name: packageName,
1055
+ version,
1056
+ releasedAt: resolved.releasedAt,
1057
+ description: resolved.description,
1058
+ relatedSkills: [],
1059
+ hasIssues: resources.hasIssues,
1060
+ hasDiscussions: resources.hasDiscussions,
1061
+ hasReleases: resources.hasReleases,
1062
+ hasChangelog,
1063
+ docsType: resources.docsType,
1064
+ hasShippedDocs: shippedDocs,
1065
+ pkgFiles,
1066
+ dirName: skillDirName,
1067
+ repoUrl,
1068
+ features
1069
+ });
1070
+ writeFileSync(join(skillDir, "SKILL.md"), baseSkillMd);
1071
+ p.log.success(`Created base skill: ${relative(cwd, skillDir)}`);
1072
+ if (!readConfig().skipLlm && (!yes || opts.model)) {
1073
+ const llmConfig = await selectLlmConfig(opts.model);
1074
+ if (llmConfig) {
1075
+ p.log.step(getModelLabel(llmConfig.model));
1076
+ await enhanceSkillWithLLM({
1077
+ packageName,
1078
+ version,
1079
+ skillDir,
1080
+ dirName: skillDirName,
1081
+ model: llmConfig.model,
1082
+ resolved,
1083
+ relatedSkills: [],
1084
+ hasIssues: resources.hasIssues,
1085
+ hasDiscussions: resources.hasDiscussions,
1086
+ hasReleases: resources.hasReleases,
1087
+ hasChangelog,
1088
+ docsType: resources.docsType,
1089
+ hasShippedDocs: shippedDocs,
1090
+ pkgFiles,
1091
+ force: opts.force,
1092
+ debug: opts.debug,
1093
+ sections: llmConfig.sections,
1094
+ customPrompt: llmConfig.customPrompt,
1095
+ features
1096
+ });
1097
+ }
1098
+ }
1099
+ const shared = !isGlobal && getSharedSkillsDir(cwd);
1100
+ if (shared) linkSkillToAgents(skillDirName, shared, cwd);
1101
+ if (!isGlobal) {
1102
+ registerProject(cwd);
1103
+ await ensureGitignore(shared || targets[agent].skillsDir, cwd, isGlobal);
1104
+ await ensureAgentInstructions(agent, cwd, isGlobal);
1105
+ }
1106
+ await shutdownWorker();
1107
+ track({
1108
+ event: "install",
1109
+ source: `${owner}/${repo}`,
1110
+ skills: skillDirName,
1111
+ agents: agent,
1112
+ ...isGlobal && { global: "1" },
1113
+ sourceType: "github-generated"
1114
+ });
1115
+ p.outro(`Synced ${owner}/${repo} to ${relative(cwd, skillDir)}`);
1116
+ }
1117
+ const STATUS_ICONS = {
1118
+ pending: "○",
1119
+ resolving: "◐",
1120
+ downloading: "◒",
1121
+ embedding: "◓",
1122
+ exploring: "◔",
1123
+ thinking: "◔",
1124
+ generating: "◑",
1125
+ done: "✓",
1126
+ error: "✗"
1127
+ };
1128
+ const STATUS_COLORS = {
1129
+ pending: "\x1B[90m",
1130
+ resolving: "\x1B[36m",
1131
+ downloading: "\x1B[36m",
1132
+ embedding: "\x1B[36m",
1133
+ exploring: "\x1B[34m",
1134
+ thinking: "\x1B[35m",
1135
+ generating: "\x1B[33m",
1136
+ done: "\x1B[32m",
1137
+ error: "\x1B[31m"
1138
+ };
1139
+ async function syncPackagesParallel(config) {
1140
+ const { packages, concurrency = 5 } = config;
1141
+ const agent = targets[config.agent];
1142
+ const states = /* @__PURE__ */ new Map();
1143
+ const cwd = process.cwd();
1144
+ for (const pkg of packages) states.set(pkg, {
1145
+ name: pkg,
1146
+ status: "pending",
1147
+ message: "Waiting..."
1148
+ });
1149
+ function render() {
1150
+ const maxNameLen = Math.max(...packages.map((p) => p.length), 20);
1151
+ const lines = [...states.values()].map((s) => {
1152
+ const icon = STATUS_ICONS[s.status];
1153
+ const color = STATUS_COLORS[s.status];
1154
+ const reset = "\x1B[0m";
1155
+ const dim = "\x1B[90m";
1156
+ const name = s.name.padEnd(maxNameLen);
1157
+ const version = s.version ? `${dim}${s.version}${reset} ` : "";
1158
+ const elapsed = (s.status === "done" || s.status === "error") && s.startedAt && s.completedAt ? ` ${dim}(${formatDuration(s.completedAt - s.startedAt)})${reset}` : "";
1159
+ const preview = s.streamPreview ? ` ${dim}${s.streamPreview}${reset}` : "";
1160
+ return ` ${color}${icon}${reset} ${name} ${version}${s.message}${elapsed}${preview}`;
1161
+ });
1162
+ const doneCount = [...states.values()].filter((s) => s.status === "done").length;
1163
+ const errorCount = [...states.values()].filter((s) => s.status === "error").length;
1164
+ logUpdate(`\x1B[1m${config.mode === "update" ? "Updating" : "Syncing"} ${packages.length} packages\x1B[0m (${doneCount} done${errorCount > 0 ? `, ${errorCount} failed` : ""})\n` + lines.join("\n"));
1165
+ }
1166
+ function update(pkg, status, message, version) {
1167
+ const state = states.get(pkg);
1168
+ if (!state.startedAt && status !== "pending") state.startedAt = performance.now();
1169
+ if ((status === "done" || status === "error") && !state.completedAt) state.completedAt = performance.now();
1170
+ state.status = status;
1171
+ state.message = message;
1172
+ state.streamPreview = void 0;
1173
+ if (version) state.version = version;
1174
+ render();
1175
+ }
1176
+ ensureCacheDir();
1177
+ render();
1178
+ const limit = pLimit(concurrency);
1179
+ const skillData = /* @__PURE__ */ new Map();
1180
+ const baseResults = await Promise.allSettled(packages.map((pkg) => limit(() => syncBaseSkill(pkg, config, cwd, update))));
1181
+ logUpdate.done();
1182
+ const successfulPkgs = [];
1183
+ const shippedPkgs = [];
1184
+ const errors = [];
1185
+ for (let i = 0; i < baseResults.length; i++) {
1186
+ const r = baseResults[i];
1187
+ if (r.status === "fulfilled" && r.value !== "shipped") {
1188
+ successfulPkgs.push(packages[i]);
1189
+ skillData.set(packages[i], r.value);
1190
+ } else if (r.status === "fulfilled" && r.value === "shipped") shippedPkgs.push(packages[i]);
1191
+ else if (r.status === "rejected") {
1192
+ const err = r.reason;
1193
+ const reason = err instanceof Error ? `${err.message}\n${err.stack}` : String(err);
1194
+ errors.push({
1195
+ pkg: packages[i],
1196
+ reason
1197
+ });
1198
+ }
1199
+ }
1200
+ const pastVerb = config.mode === "update" ? "Updated" : "Created";
1201
+ const skillMsg = `${pastVerb} ${successfulPkgs.length} base skills${shippedPkgs.length > 1 ? ` (Skipping ${shippedPkgs.length})` : ""}`;
1202
+ p.log.success(skillMsg);
1203
+ for (const [, data] of skillData) for (const w of data.warnings) p.log.warn(`\x1B[33m${w}\x1B[0m`);
1204
+ if (errors.length > 0) for (const { pkg, reason } of errors) p.log.error(` ${pkg}: ${reason}`);
1205
+ const globalConfig = readConfig();
1206
+ if (successfulPkgs.length > 0 && !globalConfig.skipLlm && !(config.yes && !config.model)) {
1207
+ const llmConfig = await selectLlmConfig(config.model);
1208
+ if (llmConfig) {
1209
+ p.log.step(getModelLabel(llmConfig.model));
1210
+ for (const pkg of successfulPkgs) states.set(pkg, {
1211
+ name: pkg,
1212
+ status: "pending",
1213
+ message: "Waiting..."
1214
+ });
1215
+ render();
1216
+ const llmResults = await Promise.allSettled(successfulPkgs.map((pkg) => limit(() => enhanceWithLLM(pkg, skillData.get(pkg), {
1217
+ ...config,
1218
+ model: llmConfig.model
1219
+ }, cwd, update, llmConfig.sections, llmConfig.customPrompt))));
1220
+ logUpdate.done();
1221
+ const llmSucceeded = llmResults.filter((r) => r.status === "fulfilled").length;
1222
+ p.log.success(`Enhanced ${llmSucceeded}/${successfulPkgs.length} skills with LLM`);
1223
+ }
1224
+ }
1225
+ await ensureGitignore(getSharedSkillsDir(cwd) ? SHARED_SKILLS_DIR : agent.skillsDir, cwd, config.global);
1226
+ await ensureAgentInstructions(config.agent, cwd, config.global);
1227
+ await shutdownWorker();
1228
+ p.outro(`${pastVerb} ${successfulPkgs.length}/${packages.length} packages`);
1229
+ }
1230
+ /** Phase 1: Generate base skill (no LLM). Returns 'shipped' if shipped skill was linked, or BaseSkillData. */
1231
+ async function syncBaseSkill(packageSpec, config, cwd, update) {
1232
+ const { name: packageName, tag: requestedTag } = parsePackageSpec(packageSpec);
1233
+ const localVersion = (await readLocalDependencies(cwd).catch(() => [])).find((d) => d.name === packageName)?.version;
1234
+ const { package: resolvedPkg, attempts } = await resolvePackageDocsWithAttempts(requestedTag ? packageSpec : packageName, {
1235
+ version: localVersion,
1236
+ cwd,
1237
+ onProgress: (step) => update(packageName, "resolving", RESOLVE_STEP_LABELS[step])
1238
+ });
1239
+ let resolved = resolvedPkg;
1240
+ if (!resolved) {
1241
+ update(packageName, "resolving", "Local package...");
1242
+ resolved = await resolveLocalDep(packageName, cwd);
1243
+ }
1244
+ if (!resolved) {
1245
+ const npmAttempt = attempts.find((a) => a.source === "npm");
1246
+ let reason;
1247
+ if (npmAttempt?.status === "not-found") {
1248
+ const suggestions = await searchNpmPackages(packageName, 3);
1249
+ const hint = suggestions.length > 0 ? ` (try: ${suggestions.map((s) => s.name).join(", ")})` : "";
1250
+ reason = (npmAttempt.message || "Not on npm") + hint;
1251
+ } else reason = attempts.filter((a) => a.status !== "success").map((a) => a.message || a.source).join("; ") || "No docs found";
1252
+ update(packageName, "error", reason);
1253
+ throw new Error(`Could not find docs for: ${packageName}`);
1254
+ }
1255
+ const version = localVersion || resolved.version || "latest";
1256
+ const versionKey = getVersionKey(version);
1257
+ if (!existsSync(join(cwd, "node_modules", packageName))) {
1258
+ update(packageName, "downloading", "Downloading dist...", versionKey);
1259
+ await fetchPkgDist(packageName, version);
1260
+ }
1261
+ const shippedResult = handleShippedSkills(packageName, version, cwd, config.agent, config.global);
1262
+ if (shippedResult) {
1263
+ const shared = !config.global && getSharedSkillsDir(cwd);
1264
+ if (shared) for (const shipped of shippedResult.shipped) linkSkillToAgents(shipped.skillName, shared, cwd);
1265
+ update(packageName, "done", "Published SKILL.md", versionKey);
1266
+ return "shipped";
1267
+ }
1268
+ if (config.force) forceClearCache(packageName, version);
1269
+ const useCache = isCached(packageName, version);
1270
+ if (useCache) update(packageName, "downloading", "Using cache", versionKey);
1271
+ else update(packageName, "downloading", config.force ? "Re-fetching docs..." : "Fetching docs...", versionKey);
1272
+ const baseDir = resolveBaseDir(cwd, config.agent, config.global);
1273
+ const skillDirName = computeSkillDirName(packageName);
1274
+ const skillDir = join(baseDir, skillDirName);
1275
+ mkdirSync(skillDir, { recursive: true });
1276
+ const features = readConfig().features ?? defaultFeatures;
1277
+ const resources = await fetchAndCacheResources({
1278
+ packageName,
1279
+ resolved,
1280
+ version,
1281
+ useCache,
1282
+ features,
1283
+ onProgress: (msg) => update(packageName, "downloading", msg, versionKey)
1284
+ });
1285
+ update(packageName, "downloading", "Linking references...", versionKey);
1286
+ linkAllReferences(skillDir, packageName, cwd, version, resources.docsType, void 0, features, resources.repoInfo);
1287
+ if (features.search) {
1288
+ update(packageName, "embedding", "Indexing docs", versionKey);
1289
+ await indexResources({
1290
+ packageName,
1291
+ version,
1292
+ cwd,
1293
+ docsToIndex: resources.docsToIndex,
1294
+ features,
1295
+ onProgress: (msg) => update(packageName, "embedding", msg, versionKey)
1296
+ });
1297
+ }
1298
+ const hasChangelog = detectChangelog(resolvePkgDir(packageName, cwd, version), getCacheDir(packageName, version));
1299
+ const relatedSkills = await findRelatedSkills(packageName, baseDir);
1300
+ const shippedDocs = hasShippedDocs(packageName, cwd, version);
1301
+ const pkgFiles = getPkgKeyFiles(packageName, cwd, version);
1302
+ const repoSlug = resolved.repoUrl?.match(/github\.com\/([^/]+\/[^/]+?)(?:\.git)?(?:[/#]|$)/)?.[1];
1303
+ linkPkgNamed(skillDir, packageName, cwd, version);
1304
+ writeLock(baseDir, skillDirName, {
1305
+ packageName,
1306
+ version,
1307
+ repo: repoSlug,
1308
+ source: resources.docSource,
1309
+ syncedAt: (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
1310
+ generator: "skilld"
1311
+ });
1312
+ const updatedLock = readLock(baseDir)?.skills[skillDirName];
1313
+ const allPackages = parsePackages(updatedLock?.packages).map((p) => ({ name: p.name }));
1314
+ const skillMd = generateSkillMd({
1315
+ name: packageName,
1316
+ version,
1317
+ releasedAt: resolved.releasedAt,
1318
+ description: resolved.description,
1319
+ dependencies: resolved.dependencies,
1320
+ distTags: resolved.distTags,
1321
+ relatedSkills,
1322
+ hasIssues: resources.hasIssues,
1323
+ hasDiscussions: resources.hasDiscussions,
1324
+ hasReleases: resources.hasReleases,
1325
+ hasChangelog,
1326
+ docsType: resources.docsType,
1327
+ hasShippedDocs: shippedDocs,
1328
+ pkgFiles,
1329
+ dirName: skillDirName,
1330
+ packages: allPackages.length > 1 ? allPackages : void 0,
1331
+ repoUrl: resolved.repoUrl,
1332
+ features
1333
+ });
1334
+ writeFileSync(join(skillDir, "SKILL.md"), skillMd);
1335
+ const shared = !config.global && getSharedSkillsDir(cwd);
1336
+ if (shared) linkSkillToAgents(skillDirName, shared, cwd);
1337
+ if (!config.global) registerProject(cwd);
1338
+ update(packageName, "done", config.mode === "update" ? "Skill updated" : "Base skill created", versionKey);
1339
+ return {
1340
+ resolved,
1341
+ version,
1342
+ skillDirName,
1343
+ docsType: resources.docsType,
1344
+ hasIssues: resources.hasIssues,
1345
+ hasDiscussions: resources.hasDiscussions,
1346
+ hasReleases: resources.hasReleases,
1347
+ hasChangelog,
1348
+ shippedDocs,
1349
+ pkgFiles,
1350
+ relatedSkills,
1351
+ packages: allPackages.length > 1 ? allPackages : void 0,
1352
+ warnings: resources.warnings,
1353
+ features,
1354
+ usedCache: resources.usedCache
1355
+ };
1356
+ }
1357
+ /** Phase 2: Enhance skill with LLM */
1358
+ async function enhanceWithLLM(packageName, data, config, cwd, update, sections, customPrompt) {
1359
+ const versionKey = getVersionKey(data.version);
1360
+ const skillDir = join(resolveBaseDir(cwd, config.agent, config.global), data.skillDirName);
1361
+ const hasGithub = data.hasIssues || data.hasDiscussions;
1362
+ const docFiles = listReferenceFiles(skillDir);
1363
+ update(packageName, "generating", config.model, versionKey);
1364
+ const { optimized, wasOptimized, error } = await optimizeDocs({
1365
+ packageName,
1366
+ skillDir,
1367
+ model: config.model,
1368
+ version: data.version,
1369
+ hasGithub,
1370
+ hasReleases: data.hasReleases,
1371
+ hasChangelog: data.hasChangelog,
1372
+ docFiles,
1373
+ docsType: data.docsType,
1374
+ hasShippedDocs: data.shippedDocs,
1375
+ noCache: config.force,
1376
+ debug: config.debug,
1377
+ sections,
1378
+ customPrompt,
1379
+ features: data.features,
1380
+ pkgFiles: data.pkgFiles,
1381
+ onProgress: (progress) => {
1382
+ const status = progress.type === "reasoning" ? "exploring" : "generating";
1383
+ const sectionPrefix = progress.section ? `[${progress.section}] ` : "";
1384
+ update(packageName, status, progress.chunk.startsWith("[") ? `${sectionPrefix}${progress.chunk}` : `${sectionPrefix}${config.model}`, versionKey);
1385
+ }
1386
+ });
1387
+ if (error) {
1388
+ update(packageName, "error", error, versionKey);
1389
+ throw new Error(error);
1390
+ }
1391
+ if (wasOptimized) {
1392
+ const skillMd = generateSkillMd({
1393
+ name: packageName,
1394
+ version: data.version,
1395
+ releasedAt: data.resolved.releasedAt,
1396
+ dependencies: data.resolved.dependencies,
1397
+ distTags: data.resolved.distTags,
1398
+ body: optimized,
1399
+ relatedSkills: data.relatedSkills,
1400
+ hasIssues: data.hasIssues,
1401
+ hasDiscussions: data.hasDiscussions,
1402
+ hasReleases: data.hasReleases,
1403
+ hasChangelog: data.hasChangelog,
1404
+ docsType: data.docsType,
1405
+ hasShippedDocs: data.shippedDocs,
1406
+ pkgFiles: data.pkgFiles,
1407
+ dirName: data.skillDirName,
1408
+ packages: data.packages,
1409
+ repoUrl: data.resolved.repoUrl,
1410
+ features: data.features
1411
+ });
1412
+ writeFileSync(join(skillDir, "SKILL.md"), skillMd);
1413
+ }
1414
+ update(packageName, "done", "Skill optimized", versionKey);
1415
+ }
1416
+ function showResolveAttempts(attempts) {
1417
+ if (attempts.length === 0) return;
1418
+ p.log.message("\x1B[90mResolution attempts:\x1B[0m");
1419
+ for (const attempt of attempts) {
1420
+ const icon = attempt.status === "success" ? "\x1B[32m✓\x1B[0m" : "\x1B[90m✗\x1B[0m";
1421
+ const source = `\x1B[90m${attempt.source}\x1B[0m`;
1422
+ const msg = attempt.message ? ` - ${attempt.message}` : "";
1423
+ p.log.message(` ${icon} ${source}${msg}`);
1424
+ }
1425
+ }
1426
+ async function syncCommand(state, opts) {
1427
+ if (opts.packages && opts.packages.length > 0) {
1428
+ if (opts.packages.length > 1) return syncPackagesParallel({
1429
+ packages: opts.packages,
1430
+ global: opts.global,
1431
+ agent: opts.agent,
1432
+ model: opts.model,
1433
+ yes: opts.yes,
1434
+ force: opts.force,
1435
+ debug: opts.debug,
1436
+ mode: opts.mode
1437
+ });
1438
+ await syncSinglePackage(opts.packages[0], opts);
1439
+ return;
1440
+ }
1441
+ const packages = await interactivePicker(state);
1442
+ if (!packages || packages.length === 0) {
1443
+ p.outro("No packages selected");
1444
+ return;
1445
+ }
1446
+ if (packages.length > 1) return syncPackagesParallel({
1447
+ packages,
1448
+ global: opts.global,
1449
+ agent: opts.agent,
1450
+ model: opts.model,
1451
+ yes: opts.yes,
1452
+ force: opts.force,
1453
+ debug: opts.debug,
1454
+ mode: opts.mode
1455
+ });
1456
+ await syncSinglePackage(packages[0], opts);
1457
+ }
1458
+ async function interactivePicker(state) {
1459
+ const spin = timedSpinner();
1460
+ spin.start("Detecting imports...");
1461
+ const { packages: detected, error } = await detectImportedPackages(process.cwd());
1462
+ const declaredMap = state.deps;
1463
+ if (error || detected.length === 0) {
1464
+ spin.stop(error ? `Detection failed: ${error}` : "No imports detected");
1465
+ if (declaredMap.size === 0) {
1466
+ p.log.warn("No dependencies found");
1467
+ return null;
1468
+ }
1469
+ return pickFromList([...declaredMap.entries()].map(([name, version]) => ({
1470
+ name,
1471
+ version: maskPatch(version),
1472
+ count: 0,
1473
+ inPkgJson: true
1474
+ })), state);
1475
+ }
1476
+ spin.stop(`Loaded ${detected.length} project skills`);
1477
+ return pickFromList(detected.map((pkg) => ({
1478
+ name: pkg.name,
1479
+ version: declaredMap.get(pkg.name),
1480
+ count: pkg.count,
1481
+ inPkgJson: declaredMap.has(pkg.name)
1482
+ })), state);
1483
+ }
1484
+ function maskPatch(version) {
1485
+ if (!version) return void 0;
1486
+ const parts = version.split(".");
1487
+ if (parts.length >= 3) {
1488
+ parts[2] = "x";
1489
+ return parts.slice(0, 3).join(".");
1490
+ }
1491
+ return version;
1492
+ }
1493
+ async function pickFromList(packages, state) {
1494
+ const missingSet = new Set(state.missing);
1495
+ const outdatedSet = new Set(state.outdated.map((s) => s.name));
1496
+ const options = packages.map((pkg) => ({
1497
+ label: pkg.inPkgJson ? `${pkg.name} ★` : pkg.name,
1498
+ value: pkg.name,
1499
+ hint: [maskPatch(pkg.version), pkg.count > 0 ? `${pkg.count} imports` : null].filter(Boolean).join(" · ") || void 0
1500
+ }));
1501
+ const initialValues = packages.filter((pkg) => missingSet.has(pkg.name) || outdatedSet.has(pkg.name)).map((pkg) => pkg.name);
1502
+ const selected = await p.multiselect({
1503
+ message: "Select packages to sync",
1504
+ options,
1505
+ required: false,
1506
+ initialValues
1507
+ });
1508
+ if (p.isCancel(selected)) {
1509
+ p.cancel("Cancelled");
1510
+ return null;
1511
+ }
1512
+ return selected;
1513
+ }
1514
+ async function syncSinglePackage(packageSpec, config) {
1515
+ const { name: packageName, tag: requestedTag } = parsePackageSpec(packageSpec);
1516
+ const spin = timedSpinner();
1517
+ spin.start(`Resolving ${packageSpec}`);
1518
+ const cwd = process.cwd();
1519
+ const localVersion = (await readLocalDependencies(cwd).catch(() => [])).find((d) => d.name === packageName)?.version;
1520
+ const resolveResult = await resolvePackageDocsWithAttempts(requestedTag ? packageSpec : packageName, {
1521
+ version: localVersion,
1522
+ cwd,
1523
+ onProgress: (step) => spin.message(`${packageName}: ${RESOLVE_STEP_LABELS[step]}`)
1524
+ });
1525
+ let resolved = resolveResult.package;
1526
+ if (!resolved) {
1527
+ spin.message(`Resolving local package: ${packageName}`);
1528
+ resolved = await resolveLocalDep(packageName, cwd);
1529
+ }
1530
+ if (!resolved) {
1531
+ spin.message(`Searching npm for "${packageName}"...`);
1532
+ const suggestions = await searchNpmPackages(packageName);
1533
+ if (suggestions.length > 0) {
1534
+ spin.stop(`Package "${packageName}" not found on npm`);
1535
+ showResolveAttempts(resolveResult.attempts);
1536
+ const selected = await p.select({
1537
+ message: "Did you mean one of these?",
1538
+ options: [...suggestions.map((s) => ({
1539
+ label: s.name,
1540
+ value: s.name,
1541
+ hint: s.description
1542
+ })), {
1543
+ label: "None of these",
1544
+ value: "_none_"
1545
+ }]
1546
+ });
1547
+ if (!p.isCancel(selected) && selected !== "_none_") return syncSinglePackage(selected, config);
1548
+ return;
1549
+ }
1550
+ spin.stop(`Could not find docs for: ${packageName}`);
1551
+ showResolveAttempts(resolveResult.attempts);
1552
+ return;
1553
+ }
1554
+ const version = localVersion || resolved.version || "latest";
1555
+ const versionKey = getVersionKey(version);
1556
+ if (config.force) forceClearCache(packageName, version);
1557
+ const useCache = isCached(packageName, version);
1558
+ if (!existsSync(join(cwd, "node_modules", packageName))) {
1559
+ spin.message(`Downloading ${packageName}@${version} dist`);
1560
+ await fetchPkgDist(packageName, version);
1561
+ }
1562
+ const shippedResult = handleShippedSkills(packageName, version, cwd, config.agent, config.global);
1563
+ if (shippedResult) {
1564
+ const shared = !config.global && getSharedSkillsDir(cwd);
1565
+ for (const shipped of shippedResult.shipped) {
1566
+ if (shared) linkSkillToAgents(shipped.skillName, shared, cwd);
1567
+ p.log.success(`Using published SKILL.md: ${shipped.skillName} → ${relative(cwd, shipped.skillDir)}`);
1568
+ }
1569
+ spin.stop(`Using published SKILL.md(s) from ${packageName}`);
1570
+ return;
1571
+ }
1572
+ spin.stop(`Resolved ${packageName}@${useCache ? versionKey : version}${config.force ? " (force)" : useCache ? " (cached)" : ""}`);
1573
+ if (!localVersion && !requestedTag && !isPrerelease(version)) {
1574
+ const nextTag = resolved.distTags?.next ?? resolved.distTags?.beta ?? resolved.distTags?.alpha;
1575
+ if (nextTag) p.log.warn(`\x1B[33mNo local dependency found — using latest stable (${version}). Prerelease ${nextTag.version} available: skilld add ${packageName}@beta\x1B[0m`);
1576
+ }
1577
+ ensureCacheDir();
1578
+ const baseDir = resolveBaseDir(cwd, config.agent, config.global);
1579
+ const skillDirName = config.name ? sanitizeName(config.name) : computeSkillDirName(packageName);
1580
+ const skillDir = config.eject ? typeof config.eject === "string" ? join(resolve(cwd, config.eject), skillDirName) : join(cwd, "skills", skillDirName) : join(baseDir, skillDirName);
1581
+ mkdirSync(skillDir, { recursive: true });
1582
+ const existingLock = config.eject ? void 0 : readLock(baseDir)?.skills[skillDirName];
1583
+ if (existingLock && existingLock.packageName && existingLock.packageName !== packageName) {
1584
+ spin.stop(`Merging ${packageName} into ${skillDirName}`);
1585
+ linkPkgNamed(skillDir, packageName, cwd, version);
1586
+ const repoSlug = resolved.repoUrl?.match(/github\.com\/([^/]+\/[^/]+?)(?:\.git)?(?:[/#]|$)/)?.[1];
1587
+ writeLock(baseDir, skillDirName, {
1588
+ packageName,
1589
+ version,
1590
+ repo: repoSlug,
1591
+ source: existingLock.source,
1592
+ syncedAt: (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
1593
+ generator: "skilld"
1594
+ });
1595
+ const updatedLock = readLock(baseDir)?.skills[skillDirName];
1596
+ const allPackages = parsePackages(updatedLock?.packages).map((p) => ({ name: p.name }));
1597
+ const relatedSkills = await findRelatedSkills(packageName, baseDir);
1598
+ const pkgFiles = getPkgKeyFiles(existingLock.packageName, cwd, existingLock.version);
1599
+ const shippedDocs = hasShippedDocs(existingLock.packageName, cwd, existingLock.version);
1600
+ const mergeFeatures = readConfig().features ?? defaultFeatures;
1601
+ const skillMd = generateSkillMd({
1602
+ name: existingLock.packageName,
1603
+ version: existingLock.version,
1604
+ relatedSkills,
1605
+ hasIssues: mergeFeatures.issues && existsSync(join(skillDir, ".skilld", "issues")),
1606
+ hasDiscussions: mergeFeatures.discussions && existsSync(join(skillDir, ".skilld", "discussions")),
1607
+ hasReleases: mergeFeatures.releases && existsSync(join(skillDir, ".skilld", "releases")),
1608
+ docsType: existingLock.source?.includes("llms.txt") ? "llms.txt" : "docs",
1609
+ hasShippedDocs: shippedDocs,
1610
+ pkgFiles,
1611
+ dirName: skillDirName,
1612
+ packages: allPackages,
1613
+ features: mergeFeatures
1614
+ });
1615
+ writeFileSync(join(skillDir, "SKILL.md"), skillMd);
1616
+ const mergeShared = !config.global && getSharedSkillsDir(cwd);
1617
+ if (mergeShared) linkSkillToAgents(skillDirName, mergeShared, cwd);
1618
+ if (!config.global) registerProject(cwd);
1619
+ p.outro(`Merged ${packageName} into ${skillDirName}`);
1620
+ return;
1621
+ }
1622
+ const features = readConfig().features ?? defaultFeatures;
1623
+ const resSpin = timedSpinner();
1624
+ resSpin.start("Finding resources");
1625
+ const resources = await fetchAndCacheResources({
1626
+ packageName,
1627
+ resolved,
1628
+ version,
1629
+ useCache,
1630
+ features,
1631
+ from: config.from,
1632
+ onProgress: (msg) => resSpin.message(msg)
1633
+ });
1634
+ const resParts = [];
1635
+ if (resources.docsToIndex.length > 0) {
1636
+ const docCount = resources.docsToIndex.filter((d) => d.metadata?.type === "doc").length;
1637
+ if (docCount > 0) resParts.push(`${docCount} docs`);
1638
+ }
1639
+ if (resources.hasIssues) resParts.push("issues");
1640
+ if (resources.hasDiscussions) resParts.push("discussions");
1641
+ if (resources.hasReleases) resParts.push("releases");
1642
+ resSpin.stop(resources.usedCache ? `Loaded ${resParts.length > 0 ? resParts.join(", ") : "resources"} (cached)` : `Fetched ${resParts.length > 0 ? resParts.join(", ") : "resources"}`);
1643
+ for (const w of resources.warnings) p.log.warn(`\x1B[33m${w}\x1B[0m`);
1644
+ linkAllReferences(skillDir, packageName, cwd, version, resources.docsType, void 0, features, resources.repoInfo);
1645
+ if (features.search) {
1646
+ const idxSpin = timedSpinner();
1647
+ idxSpin.start("Creating search index");
1648
+ await indexResources({
1649
+ packageName,
1650
+ version,
1651
+ cwd,
1652
+ docsToIndex: resources.docsToIndex,
1653
+ features,
1654
+ onProgress: (msg) => idxSpin.message(msg)
1655
+ });
1656
+ idxSpin.stop("Search index ready");
1657
+ }
1658
+ const hasChangelog = detectChangelog(resolvePkgDir(packageName, cwd, version), getCacheDir(packageName, version));
1659
+ const relatedSkills = await findRelatedSkills(packageName, baseDir);
1660
+ const shippedDocs = hasShippedDocs(packageName, cwd, version);
1661
+ const pkgFiles = getPkgKeyFiles(packageName, cwd, version);
1662
+ const repoSlug = resolved.repoUrl?.match(/github\.com\/([^/]+\/[^/]+?)(?:\.git)?(?:[/#]|$)/)?.[1];
1663
+ if (!config.eject) linkPkgNamed(skillDir, packageName, cwd, version);
1664
+ if (!config.eject) writeLock(baseDir, skillDirName, {
1665
+ packageName,
1666
+ version,
1667
+ repo: repoSlug,
1668
+ source: resources.docSource,
1669
+ syncedAt: (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
1670
+ generator: "skilld"
1671
+ });
1672
+ const allPackages = parsePackages((config.eject ? void 0 : readLock(baseDir)?.skills[skillDirName])?.packages).map((p) => ({ name: p.name }));
1673
+ const isEject = !!config.eject;
1674
+ const baseSkillMd = generateSkillMd({
1675
+ name: packageName,
1676
+ version,
1677
+ releasedAt: resolved.releasedAt,
1678
+ description: resolved.description,
1679
+ dependencies: resolved.dependencies,
1680
+ distTags: resolved.distTags,
1681
+ relatedSkills,
1682
+ hasIssues: resources.hasIssues,
1683
+ hasDiscussions: resources.hasDiscussions,
1684
+ hasReleases: resources.hasReleases,
1685
+ hasChangelog,
1686
+ docsType: resources.docsType,
1687
+ hasShippedDocs: shippedDocs,
1688
+ pkgFiles,
1689
+ dirName: skillDirName,
1690
+ packages: allPackages.length > 1 ? allPackages : void 0,
1691
+ repoUrl: resolved.repoUrl,
1692
+ features,
1693
+ eject: isEject
1694
+ });
1695
+ writeFileSync(join(skillDir, "SKILL.md"), baseSkillMd);
1696
+ p.log.success(config.mode === "update" ? `Updated skill: ${relative(cwd, skillDir)}` : `Created base skill: ${relative(cwd, skillDir)}`);
1697
+ if (!readConfig().skipLlm && (!config.yes || config.model)) {
1698
+ const llmConfig = await selectLlmConfig(config.model);
1699
+ if (llmConfig) {
1700
+ p.log.step(getModelLabel(llmConfig.model));
1701
+ await enhanceSkillWithLLM({
1702
+ packageName,
1703
+ version,
1704
+ skillDir,
1705
+ dirName: skillDirName,
1706
+ model: llmConfig.model,
1707
+ resolved,
1708
+ relatedSkills,
1709
+ hasIssues: resources.hasIssues,
1710
+ hasDiscussions: resources.hasDiscussions,
1711
+ hasReleases: resources.hasReleases,
1712
+ hasChangelog,
1713
+ docsType: resources.docsType,
1714
+ hasShippedDocs: shippedDocs,
1715
+ pkgFiles,
1716
+ force: config.force,
1717
+ debug: config.debug,
1718
+ sections: llmConfig.sections,
1719
+ customPrompt: llmConfig.customPrompt,
1720
+ packages: allPackages.length > 1 ? allPackages : void 0,
1721
+ features,
1722
+ eject: isEject
1723
+ });
1724
+ }
1725
+ }
1726
+ if (isEject) {
1727
+ const skilldDir = join(skillDir, ".skilld");
1728
+ if (existsSync(skilldDir) && !config.debug) rmSync(skilldDir, {
1729
+ recursive: true,
1730
+ force: true
1731
+ });
1732
+ ejectReferences(skillDir, packageName, cwd, version, resources.docsType, features, resources.repoInfo);
1733
+ }
1734
+ if (!isEject) {
1735
+ const shared = !config.global && getSharedSkillsDir(cwd);
1736
+ if (shared) linkSkillToAgents(skillDirName, shared, cwd);
1737
+ if (!config.global) registerProject(cwd);
1738
+ await ensureGitignore(shared ? SHARED_SKILLS_DIR : targets[config.agent].skillsDir, cwd, config.global);
1739
+ await ensureAgentInstructions(config.agent, cwd, config.global);
1740
+ }
1741
+ await shutdownWorker();
1742
+ const ejectMsg = isEject ? " (ejected)" : "";
1743
+ p.outro(config.mode === "update" ? `Updated ${packageName}${ejectMsg}` : `Synced ${packageName} to ${relative(cwd, skillDir)}${ejectMsg}`);
1744
+ }
1745
+ const addCommandDef = defineCommand({
1746
+ meta: {
1747
+ name: "add",
1748
+ description: "Add skills for package(s)"
1749
+ },
1750
+ args: {
1751
+ package: {
1752
+ type: "positional",
1753
+ description: "Package(s) to sync (space or comma-separated, e.g., vue nuxt pinia)",
1754
+ required: true
1755
+ },
1756
+ skill: {
1757
+ type: "string",
1758
+ alias: "s",
1759
+ description: "Select specific skills from a git repo (comma-separated)",
1760
+ valueHint: "name"
1761
+ },
1762
+ ...sharedArgs
1763
+ },
1764
+ async run({ args }) {
1765
+ const cwd = process.cwd();
1766
+ let agent = resolveAgent(args.agent);
1767
+ if (!agent) {
1768
+ agent = await promptForAgent();
1769
+ if (!agent) return;
1770
+ }
1771
+ if (!hasCompletedWizard()) await runWizard();
1772
+ const rawInputs = [...new Set([args.package, ...args._ || []].map((s) => s.trim()).filter(Boolean))];
1773
+ const gitSources = [];
1774
+ const npmTokens = [];
1775
+ for (const input of rawInputs) {
1776
+ const git = parseGitSkillInput(input);
1777
+ if (git) gitSources.push(git);
1778
+ else npmTokens.push(input);
1779
+ }
1780
+ if (gitSources.length > 0) for (const source of gitSources) {
1781
+ const skillFilter = args.skill ? args.skill.split(/[,\s]+/).map((s) => s.trim()).filter(Boolean) : void 0;
1782
+ await syncGitSkills({
1783
+ source,
1784
+ global: args.global,
1785
+ agent,
1786
+ yes: args.yes,
1787
+ model: args.model,
1788
+ force: args.force,
1789
+ debug: args.debug,
1790
+ skillFilter
1791
+ });
1792
+ }
1793
+ if (npmTokens.length > 0) {
1794
+ const packages = [...new Set(npmTokens.flatMap((s) => s.split(/[,\s]+/)).map((s) => s.trim()).filter(Boolean))];
1795
+ const state = await getProjectState(cwd);
1796
+ p.intro(introLine({ state }));
1797
+ return syncCommand(state, {
1798
+ packages,
1799
+ global: args.global,
1800
+ agent,
1801
+ model: args.model,
1802
+ yes: args.yes,
1803
+ force: args.force,
1804
+ debug: args.debug
1805
+ });
1806
+ }
1807
+ }
1808
+ });
1809
+ const ejectCommandDef = defineCommand({
1810
+ meta: {
1811
+ name: "eject",
1812
+ description: "Eject skill with references as real files (portable, no symlinks)"
1813
+ },
1814
+ args: {
1815
+ package: {
1816
+ type: "positional",
1817
+ description: "Package to eject",
1818
+ required: true
1819
+ },
1820
+ name: {
1821
+ type: "string",
1822
+ alias: "n",
1823
+ description: "Custom skill directory name (default: derived from package)"
1824
+ },
1825
+ out: {
1826
+ type: "string",
1827
+ alias: "o",
1828
+ description: "Output directory path override"
1829
+ },
1830
+ from: {
1831
+ type: "string",
1832
+ description: "Collect releases/issues/discussions from this date onward (YYYY-MM-DD)"
1833
+ },
1834
+ ...sharedArgs
1835
+ },
1836
+ async run({ args }) {
1837
+ const cwd = process.cwd();
1838
+ const agent = resolveAgent(args.agent) || "claude-code";
1839
+ if (!hasCompletedWizard()) await runWizard();
1840
+ const state = await getProjectState(cwd);
1841
+ p.intro(introLine({ state }));
1842
+ return syncCommand(state, {
1843
+ packages: [args.package],
1844
+ global: args.global,
1845
+ agent,
1846
+ model: args.model,
1847
+ yes: args.yes,
1848
+ force: args.force,
1849
+ debug: args.debug,
1850
+ eject: args.out || true,
1851
+ name: args.name,
1852
+ from: args.from
1853
+ });
1854
+ }
1855
+ });
1856
+ const updateCommandDef = defineCommand({
1857
+ meta: {
1858
+ name: "update",
1859
+ description: "Update outdated skills"
1860
+ },
1861
+ args: {
1862
+ package: {
1863
+ type: "positional",
1864
+ description: "Package(s) to update (space or comma-separated). Without args, syncs all outdated.",
1865
+ required: false
1866
+ },
1867
+ background: {
1868
+ type: "boolean",
1869
+ alias: "b",
1870
+ description: "Run in background (detached process, non-interactive)",
1871
+ default: false
1872
+ },
1873
+ ...sharedArgs
1874
+ },
1875
+ async run({ args }) {
1876
+ const cwd = process.cwd();
1877
+ if (args.background) {
1878
+ const { spawn } = await import("node:child_process");
1879
+ const updateArgs = [
1880
+ "update",
1881
+ ...args.package ? [args.package] : [],
1882
+ ...args.agent ? ["--agent", args.agent] : [],
1883
+ ...args.model ? ["--model", args.model] : []
1884
+ ];
1885
+ spawn(process.execPath, [process.argv[1], ...updateArgs], {
1886
+ cwd,
1887
+ detached: true,
1888
+ stdio: "ignore"
1889
+ }).unref();
1890
+ return;
1891
+ }
1892
+ const silent = !isInteractive();
1893
+ let agent = resolveAgent(args.agent);
1894
+ if (!agent) {
1895
+ if (silent) return;
1896
+ agent = await promptForAgent();
1897
+ if (!agent) return;
1898
+ }
1899
+ const config = readConfig();
1900
+ const state = await getProjectState(cwd);
1901
+ if (!silent) {
1902
+ const generators = getInstalledGenerators();
1903
+ p.intro(introLine({
1904
+ state,
1905
+ generators,
1906
+ modelId: config.model
1907
+ }));
1908
+ }
1909
+ if (args.package) return syncCommand(state, {
1910
+ packages: [...new Set([args.package, ...args._ || []].flatMap((s) => s.split(/[,\s]+/)).map((s) => s.trim()).filter(Boolean))],
1911
+ global: args.global,
1912
+ agent,
1913
+ model: args.model || (silent ? config.model : void 0),
1914
+ yes: args.yes || silent,
1915
+ force: args.force,
1916
+ debug: args.debug,
1917
+ mode: "update"
1918
+ });
1919
+ if (state.outdated.length === 0) {
1920
+ p.log.success("All skills up to date");
1921
+ return;
1922
+ }
1923
+ return syncCommand(state, {
1924
+ packages: state.outdated.map((s) => s.packageName || s.name),
1925
+ global: args.global,
1926
+ agent,
1927
+ model: args.model || (silent ? config.model : void 0),
1928
+ yes: args.yes || silent,
1929
+ force: args.force,
1930
+ debug: args.debug,
1931
+ mode: "update"
1932
+ });
1933
+ }
1934
+ });
1935
+ export { DEFAULT_SECTIONS as a, classifyCachedDoc as c, ensureGitignore as d, indexResources as f, selectSkillSections as h, updateCommandDef as i, enhanceSkillWithLLM as l, selectModel as m, ejectCommandDef as n, SKILLD_MARKER_END as o, selectLlmConfig as p, syncCommand as r, SKILLD_MARKER_START as s, addCommandDef as t, ensureAgentInstructions as u };
1936
+
1937
+ //# sourceMappingURL=sync.mjs.map