skilld 1.4.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +54 -4
- package/dist/_chunks/agent.mjs +2 -1
- package/dist/_chunks/agent.mjs.map +1 -1
- package/dist/_chunks/assemble.mjs +1 -0
- package/dist/_chunks/assemble.mjs.map +1 -1
- package/dist/_chunks/author.mjs +478 -0
- package/dist/_chunks/author.mjs.map +1 -0
- package/dist/_chunks/cli-helpers.mjs +133 -2
- package/dist/_chunks/cli-helpers.mjs.map +1 -1
- package/dist/_chunks/detect.mjs.map +1 -1
- package/dist/_chunks/index2.d.mts +2 -0
- package/dist/_chunks/index2.d.mts.map +1 -1
- package/dist/_chunks/install.mjs +7 -17
- package/dist/_chunks/install.mjs.map +1 -1
- package/dist/_chunks/list.mjs +2 -1
- package/dist/_chunks/list.mjs.map +1 -1
- package/dist/_chunks/lockfile.mjs +140 -0
- package/dist/_chunks/lockfile.mjs.map +1 -0
- package/dist/_chunks/prepare.mjs +94 -0
- package/dist/_chunks/prepare.mjs.map +1 -0
- package/dist/_chunks/prompts.mjs +32 -43
- package/dist/_chunks/prompts.mjs.map +1 -1
- package/dist/_chunks/sanitize.mjs.map +1 -1
- package/dist/_chunks/search-interactive.mjs +1 -0
- package/dist/_chunks/search-interactive.mjs.map +1 -1
- package/dist/_chunks/search.mjs +146 -9
- package/dist/_chunks/search.mjs.map +1 -1
- package/dist/_chunks/setup.mjs +1 -1
- package/dist/_chunks/skills.mjs +28 -142
- package/dist/_chunks/skills.mjs.map +1 -1
- package/dist/_chunks/sources.mjs +4 -2
- package/dist/_chunks/sources.mjs.map +1 -1
- package/dist/_chunks/sync-shared.mjs +14 -0
- package/dist/_chunks/sync-shared2.mjs +1054 -0
- package/dist/_chunks/sync-shared2.mjs.map +1 -0
- package/dist/_chunks/sync.mjs +72 -1065
- package/dist/_chunks/sync.mjs.map +1 -1
- package/dist/_chunks/uninstall.mjs +5 -3
- package/dist/_chunks/uninstall.mjs.map +1 -1
- package/dist/agent/index.d.mts +4 -2
- package/dist/agent/index.d.mts.map +1 -1
- package/dist/cli.mjs +76 -10
- package/dist/cli.mjs.map +1 -1
- package/package.json +7 -6
|
@@ -0,0 +1,1054 @@
|
|
|
1
|
+
import { a as getModelLabel, i as getAvailableModels, o as getModelName, r as createToolProgress, s as optimizeDocs } from "./agent.mjs";
|
|
2
|
+
import { a as getRepoCacheDir, i as getPackageDbPath, o as getCacheDir } from "./config.mjs";
|
|
3
|
+
import { n as sanitizeMarkdown } from "./sanitize.mjs";
|
|
4
|
+
import { _ as resolvePkgDir, a as getShippedSkills, b as writeToRepoCache, c as linkCachedDir, d as linkRepoCachedDir, f as linkShippedSkill, h as readCachedDocs, l as linkPkg, m as listReferenceFiles, n as clearCache, o as hasShippedDocs, u as linkPkgNamed, y as writeToCache } from "./cache.mjs";
|
|
5
|
+
import { i as parseFrontmatter } from "./markdown.mjs";
|
|
6
|
+
import { SearchDepsUnavailableError, createIndex, listIndexIds } from "../retriv/index.mjs";
|
|
7
|
+
import { a as semverDiff, c as getBlogPreset, n as getSharedSkillsDir, p as getPrereleaseChangelogRef } from "./shared.mjs";
|
|
8
|
+
import { B as isGhAvailable, C as downloadLlmsDocs, D as normalizeLlmsLinks, F as formatDiscussionAsMarkdown, G as fetchReleaseNotes, H as toCrawlPattern, I as generateDiscussionIndex, K as generateReleaseIndex, L as fetchGitHubIssues, M as resolveEntryFiles, N as generateDocsIndex, P as fetchGitHubDiscussions, R as formatIssueAsMarkdown, T as fetchLlmsTxt, U as fetchBlogReleases, V as fetchCrawledDocs, Z as fetchGitHubRaw, b as isShallowGitDocs, h as fetchGitDocs, n as fetchNpmPackage, q as isPrerelease, tt as parseGitHubUrl, u as resolveLocalPackageDocs, v as fetchReadmeContent, y as filterFrameworkDocs, z as generateIssueIndex } from "./sources.mjs";
|
|
9
|
+
import { a as targets } from "./detect.mjs";
|
|
10
|
+
import { c as SECTION_OUTPUT_FILES, g as maxLines, h as maxItems, l as buildAllSectionPrompts, t as generateSkillMd } from "./prompts.mjs";
|
|
11
|
+
import { D as registerProject, E as readConfig, S as defaultFeatures, f as pickModel, k as updateConfig, l as isInteractive, t as NO_MODELS_MESSAGE } from "./cli-helpers.mjs";
|
|
12
|
+
import { i as readLock, n as parsePackages, s as writeLock } from "./lockfile.mjs";
|
|
13
|
+
import { join, relative, resolve } from "pathe";
|
|
14
|
+
import { appendFileSync, copyFileSync, existsSync, mkdirSync, readFileSync, readdirSync, rmSync, writeFileSync } from "node:fs";
|
|
15
|
+
import * as p from "@clack/prompts";
|
|
16
|
+
//#region src/commands/sync-shared.ts
|
|
17
|
+
/** Max docs sent to the embedding pipeline to prevent oversized indexes */
|
|
18
|
+
const MAX_INDEX_DOCS = 250;
|
|
19
|
+
const RESOLVE_STEP_LABELS = {
|
|
20
|
+
"npm": "npm registry",
|
|
21
|
+
"github-docs": "GitHub docs",
|
|
22
|
+
"github-meta": "GitHub meta",
|
|
23
|
+
"github-search": "GitHub search",
|
|
24
|
+
"readme": "README",
|
|
25
|
+
"llms.txt": "llms.txt",
|
|
26
|
+
"crawl": "website crawl",
|
|
27
|
+
"local": "node_modules"
|
|
28
|
+
};
|
|
29
|
+
/** Classify a cached doc path into the right metadata type */
|
|
30
|
+
function classifyCachedDoc(path) {
|
|
31
|
+
const issueMatch = path.match(/^issues\/issue-(\d+)\.md$/);
|
|
32
|
+
if (issueMatch) return {
|
|
33
|
+
type: "issue",
|
|
34
|
+
number: Number(issueMatch[1])
|
|
35
|
+
};
|
|
36
|
+
const discussionMatch = path.match(/^discussions\/discussion-(\d+)\.md$/);
|
|
37
|
+
if (discussionMatch) return {
|
|
38
|
+
type: "discussion",
|
|
39
|
+
number: Number(discussionMatch[1])
|
|
40
|
+
};
|
|
41
|
+
if (path.startsWith("releases/")) return { type: "release" };
|
|
42
|
+
return { type: "doc" };
|
|
43
|
+
}
|
|
44
|
+
async function findRelatedSkills(packageName, skillsDir) {
|
|
45
|
+
const related = [];
|
|
46
|
+
const npmInfo = await fetchNpmPackage(packageName);
|
|
47
|
+
if (!npmInfo?.dependencies) return related;
|
|
48
|
+
const deps = new Set(Object.keys(npmInfo.dependencies));
|
|
49
|
+
if (!existsSync(skillsDir)) return related;
|
|
50
|
+
const lock = readLock(skillsDir);
|
|
51
|
+
const pkgToDirName = /* @__PURE__ */ new Map();
|
|
52
|
+
if (lock) for (const [dirName, info] of Object.entries(lock.skills)) {
|
|
53
|
+
if (info.packageName) pkgToDirName.set(info.packageName, dirName);
|
|
54
|
+
for (const pkg of parsePackages(info.packages)) pkgToDirName.set(pkg.name, dirName);
|
|
55
|
+
}
|
|
56
|
+
const installedSkills = readdirSync(skillsDir);
|
|
57
|
+
const installedSet = new Set(installedSkills);
|
|
58
|
+
for (const dep of deps) {
|
|
59
|
+
const dirName = pkgToDirName.get(dep);
|
|
60
|
+
if (dirName && installedSet.has(dirName)) related.push(dirName);
|
|
61
|
+
}
|
|
62
|
+
return related.slice(0, 5);
|
|
63
|
+
}
|
|
64
|
+
/** Clear cache + db for --force flag */
|
|
65
|
+
function forceClearCache(packageName, version, repoInfo) {
|
|
66
|
+
clearCache(packageName, version);
|
|
67
|
+
const forcedDbPath = getPackageDbPath(packageName, version);
|
|
68
|
+
if (existsSync(forcedDbPath)) rmSync(forcedDbPath, {
|
|
69
|
+
recursive: true,
|
|
70
|
+
force: true
|
|
71
|
+
});
|
|
72
|
+
if (repoInfo) {
|
|
73
|
+
const repoDir = getRepoCacheDir(repoInfo.owner, repoInfo.repo);
|
|
74
|
+
if (existsSync(repoDir)) rmSync(repoDir, {
|
|
75
|
+
recursive: true,
|
|
76
|
+
force: true
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
/** Link all reference symlinks (pkg, docs, issues, discussions, releases) */
|
|
81
|
+
function linkAllReferences(skillDir, packageName, cwd, version, docsType, extraPackages, features, repoInfo) {
|
|
82
|
+
const f = features ?? readConfig().features ?? defaultFeatures;
|
|
83
|
+
try {
|
|
84
|
+
linkPkg(skillDir, packageName, cwd, version);
|
|
85
|
+
linkPkgNamed(skillDir, packageName, cwd, version);
|
|
86
|
+
if (!hasShippedDocs(packageName, cwd, version) && docsType !== "readme") linkCachedDir(skillDir, packageName, version, "docs");
|
|
87
|
+
if (f.issues) if (repoInfo) linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, "issues");
|
|
88
|
+
else linkCachedDir(skillDir, packageName, version, "issues");
|
|
89
|
+
if (f.discussions) if (repoInfo) linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, "discussions");
|
|
90
|
+
else linkCachedDir(skillDir, packageName, version, "discussions");
|
|
91
|
+
if (f.releases) if (repoInfo) linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, "releases");
|
|
92
|
+
else linkCachedDir(skillDir, packageName, version, "releases");
|
|
93
|
+
linkCachedDir(skillDir, packageName, version, "sections");
|
|
94
|
+
if (extraPackages) {
|
|
95
|
+
for (const pkg of extraPackages) if (pkg.name !== packageName) linkPkgNamed(skillDir, pkg.name, cwd, pkg.version);
|
|
96
|
+
}
|
|
97
|
+
} catch {}
|
|
98
|
+
}
|
|
99
|
+
/** Detect docs type from cached directory contents */
|
|
100
|
+
function detectDocsType(packageName, version, repoUrl, llmsUrl) {
|
|
101
|
+
const cacheDir = getCacheDir(packageName, version);
|
|
102
|
+
if (existsSync(join(cacheDir, "docs", "index.md")) || existsSync(join(cacheDir, "docs", "guide"))) return {
|
|
103
|
+
docsType: "docs",
|
|
104
|
+
docSource: repoUrl ? `${repoUrl}/tree/v${version}/docs` : "git"
|
|
105
|
+
};
|
|
106
|
+
if (existsSync(join(cacheDir, "llms.txt"))) return {
|
|
107
|
+
docsType: "llms.txt",
|
|
108
|
+
docSource: llmsUrl || "llms.txt"
|
|
109
|
+
};
|
|
110
|
+
if (existsSync(join(cacheDir, "docs", "README.md"))) return { docsType: "readme" };
|
|
111
|
+
return { docsType: "readme" };
|
|
112
|
+
}
|
|
113
|
+
/** Link shipped skills, write lock entries, register project. Returns result or null if no shipped skills. */
|
|
114
|
+
function handleShippedSkills(packageName, version, cwd, agent, global) {
|
|
115
|
+
const shippedSkills = getShippedSkills(packageName, cwd, version);
|
|
116
|
+
if (shippedSkills.length === 0) return null;
|
|
117
|
+
const baseDir = resolveBaseDir(cwd, agent, global);
|
|
118
|
+
mkdirSync(baseDir, { recursive: true });
|
|
119
|
+
for (const shipped of shippedSkills) {
|
|
120
|
+
linkShippedSkill(baseDir, shipped.skillName, shipped.skillDir);
|
|
121
|
+
writeLock(baseDir, shipped.skillName, {
|
|
122
|
+
packageName,
|
|
123
|
+
version,
|
|
124
|
+
source: "shipped",
|
|
125
|
+
syncedAt: (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
|
|
126
|
+
generator: "skilld"
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
if (!global) registerProject(cwd);
|
|
130
|
+
return {
|
|
131
|
+
shipped: shippedSkills,
|
|
132
|
+
baseDir
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
/** Resolve the base skills directory for an agent */
|
|
136
|
+
function resolveBaseDir(cwd, agent, global) {
|
|
137
|
+
if (global) return targets[agent].globalSkillsDir;
|
|
138
|
+
const shared = getSharedSkillsDir(cwd);
|
|
139
|
+
if (shared) return shared;
|
|
140
|
+
const agentConfig = targets[agent];
|
|
141
|
+
return join(cwd, agentConfig.skillsDir);
|
|
142
|
+
}
|
|
143
|
+
/** Try resolving a `link:` dependency to local package docs. Returns null if not a link dep or resolution fails. */
|
|
144
|
+
async function resolveLocalDep(packageName, cwd) {
|
|
145
|
+
const pkgPath = join(cwd, "package.json");
|
|
146
|
+
if (!existsSync(pkgPath)) return null;
|
|
147
|
+
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
148
|
+
const depVersion = {
|
|
149
|
+
...pkg.dependencies,
|
|
150
|
+
...pkg.devDependencies
|
|
151
|
+
}[packageName];
|
|
152
|
+
if (!depVersion?.startsWith("link:")) return null;
|
|
153
|
+
return resolveLocalPackageDocs(resolve(cwd, depVersion.slice(5)));
|
|
154
|
+
}
|
|
155
|
+
/** Detect CHANGELOG.md in a package directory or cached releases */
|
|
156
|
+
function detectChangelog(pkgDir, cacheDir) {
|
|
157
|
+
if (pkgDir) {
|
|
158
|
+
const found = ["CHANGELOG.md", "changelog.md"].find((f) => existsSync(join(pkgDir, f)));
|
|
159
|
+
if (found) return `pkg/${found}`;
|
|
160
|
+
}
|
|
161
|
+
if (cacheDir && existsSync(join(cacheDir, "releases", "CHANGELOG.md"))) return "releases/CHANGELOG.md";
|
|
162
|
+
return false;
|
|
163
|
+
}
|
|
164
|
+
/** Fetch and cache all resources for a package (docs cascade + issues + discussions + releases) */
|
|
165
|
+
async function fetchAndCacheResources(opts) {
|
|
166
|
+
const { packageName, resolved, version, onProgress } = opts;
|
|
167
|
+
const features = opts.features ?? readConfig().features ?? defaultFeatures;
|
|
168
|
+
const cacheInvalidated = opts.useCache && resolved.crawlUrl && detectDocsType(packageName, version, resolved.repoUrl, resolved.llmsUrl).docsType === "readme";
|
|
169
|
+
const useCache = opts.useCache && !cacheInvalidated;
|
|
170
|
+
let docSource = resolved.readmeUrl || "readme";
|
|
171
|
+
let docsType = "readme";
|
|
172
|
+
const docsToIndex = [];
|
|
173
|
+
const warnings = [];
|
|
174
|
+
if (cacheInvalidated) warnings.push(`Retrying crawl for ${resolved.crawlUrl} (previous attempt only cached README)`);
|
|
175
|
+
if (!useCache) {
|
|
176
|
+
const cachedDocs = [];
|
|
177
|
+
const isFrameworkDoc = (path) => filterFrameworkDocs([path], packageName).length > 0;
|
|
178
|
+
if (resolved.gitDocsUrl && resolved.repoUrl) {
|
|
179
|
+
const gh = parseGitHubUrl(resolved.repoUrl);
|
|
180
|
+
if (gh) {
|
|
181
|
+
onProgress("Fetching git docs");
|
|
182
|
+
const gitDocs = await fetchGitDocs(gh.owner, gh.repo, version, packageName);
|
|
183
|
+
if (gitDocs?.fallback) warnings.push(`Docs fetched from ${gitDocs.ref} branch (no tag found for v${version})`);
|
|
184
|
+
if (gitDocs && gitDocs.files.length > 0) {
|
|
185
|
+
const BATCH_SIZE = 20;
|
|
186
|
+
const results = [];
|
|
187
|
+
for (let i = 0; i < gitDocs.files.length; i += BATCH_SIZE) {
|
|
188
|
+
const batch = gitDocs.files.slice(i, i + BATCH_SIZE);
|
|
189
|
+
onProgress(`Downloading docs ${Math.min(i + BATCH_SIZE, gitDocs.files.length)}/${gitDocs.files.length} from ${gitDocs.ref}`);
|
|
190
|
+
const batchResults = await Promise.all(batch.map(async (file) => {
|
|
191
|
+
const content = await fetchGitHubRaw(`${gitDocs.baseUrl}/${file}`);
|
|
192
|
+
if (!content) return null;
|
|
193
|
+
return {
|
|
194
|
+
file,
|
|
195
|
+
content
|
|
196
|
+
};
|
|
197
|
+
}));
|
|
198
|
+
results.push(...batchResults);
|
|
199
|
+
}
|
|
200
|
+
for (const r of results) if (r) {
|
|
201
|
+
const stripped = gitDocs.docsPrefix ? r.file.replace(gitDocs.docsPrefix, "") : r.file;
|
|
202
|
+
const cachePath = stripped.startsWith("docs/") ? stripped : `docs/${stripped}`;
|
|
203
|
+
cachedDocs.push({
|
|
204
|
+
path: cachePath,
|
|
205
|
+
content: r.content
|
|
206
|
+
});
|
|
207
|
+
docsToIndex.push({
|
|
208
|
+
id: cachePath,
|
|
209
|
+
content: r.content,
|
|
210
|
+
metadata: {
|
|
211
|
+
package: packageName,
|
|
212
|
+
source: cachePath,
|
|
213
|
+
type: "doc"
|
|
214
|
+
}
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
const downloaded = results.filter(Boolean).length;
|
|
218
|
+
if (downloaded > 0) if (isShallowGitDocs(downloaded) && resolved.llmsUrl) {
|
|
219
|
+
onProgress(`Shallow git-docs (${downloaded} files), trying llms.txt`);
|
|
220
|
+
cachedDocs.length = 0;
|
|
221
|
+
docsToIndex.length = 0;
|
|
222
|
+
} else {
|
|
223
|
+
docSource = `${resolved.repoUrl}/tree/${gitDocs.ref}/docs`;
|
|
224
|
+
docsType = "docs";
|
|
225
|
+
writeToCache(packageName, version, cachedDocs);
|
|
226
|
+
if (resolved.llmsUrl) {
|
|
227
|
+
onProgress("Caching supplementary llms.txt");
|
|
228
|
+
const llmsContent = await fetchLlmsTxt(resolved.llmsUrl);
|
|
229
|
+
if (llmsContent) {
|
|
230
|
+
const baseUrl = resolved.docsUrl || new URL(resolved.llmsUrl).origin;
|
|
231
|
+
const supplementary = [{
|
|
232
|
+
path: "llms.txt",
|
|
233
|
+
content: normalizeLlmsLinks(llmsContent.raw, baseUrl)
|
|
234
|
+
}];
|
|
235
|
+
if (llmsContent.links.length > 0) {
|
|
236
|
+
onProgress(`Downloading ${llmsContent.links.length} supplementary docs`);
|
|
237
|
+
const docs = await downloadLlmsDocs(llmsContent, baseUrl, (url, done, total) => {
|
|
238
|
+
onProgress(`Downloading supplementary doc ${done + 1}/${total}`);
|
|
239
|
+
});
|
|
240
|
+
for (const doc of docs) {
|
|
241
|
+
if (!isFrameworkDoc(doc.url)) continue;
|
|
242
|
+
const localPath = doc.url.startsWith("/") ? doc.url.slice(1) : doc.url;
|
|
243
|
+
supplementary.push({
|
|
244
|
+
path: join("llms-docs", ...localPath.split("/")),
|
|
245
|
+
content: doc.content
|
|
246
|
+
});
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
writeToCache(packageName, version, supplementary);
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
if (resolved.crawlUrl && cachedDocs.length === 0) {
|
|
257
|
+
onProgress("Crawling website");
|
|
258
|
+
const crawledDocs = await fetchCrawledDocs(resolved.crawlUrl, onProgress).catch((err) => {
|
|
259
|
+
warnings.push(`Crawl failed for ${resolved.crawlUrl}: ${err?.message || err}`);
|
|
260
|
+
return [];
|
|
261
|
+
});
|
|
262
|
+
if (crawledDocs.length === 0 && resolved.crawlUrl) warnings.push(`Crawl returned 0 docs from ${resolved.crawlUrl}`);
|
|
263
|
+
if (crawledDocs.length > 0) {
|
|
264
|
+
for (const doc of crawledDocs) {
|
|
265
|
+
if (!isFrameworkDoc(doc.path)) continue;
|
|
266
|
+
cachedDocs.push(doc);
|
|
267
|
+
docsToIndex.push({
|
|
268
|
+
id: doc.path,
|
|
269
|
+
content: doc.content,
|
|
270
|
+
metadata: {
|
|
271
|
+
package: packageName,
|
|
272
|
+
source: doc.path,
|
|
273
|
+
type: "doc"
|
|
274
|
+
}
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
docSource = resolved.crawlUrl;
|
|
278
|
+
docsType = "docs";
|
|
279
|
+
writeToCache(packageName, version, cachedDocs);
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
if (resolved.llmsUrl && cachedDocs.length === 0) {
|
|
283
|
+
onProgress("Fetching llms.txt");
|
|
284
|
+
const llmsContent = await fetchLlmsTxt(resolved.llmsUrl);
|
|
285
|
+
if (llmsContent) {
|
|
286
|
+
docSource = resolved.llmsUrl;
|
|
287
|
+
docsType = "llms.txt";
|
|
288
|
+
const baseUrl = resolved.docsUrl || new URL(resolved.llmsUrl).origin;
|
|
289
|
+
cachedDocs.push({
|
|
290
|
+
path: "llms.txt",
|
|
291
|
+
content: normalizeLlmsLinks(llmsContent.raw, baseUrl)
|
|
292
|
+
});
|
|
293
|
+
if (llmsContent.links.length > 0) {
|
|
294
|
+
onProgress(`Downloading ${llmsContent.links.length} linked docs`);
|
|
295
|
+
const docs = await downloadLlmsDocs(llmsContent, baseUrl, (url, done, total) => {
|
|
296
|
+
onProgress(`Downloading linked doc ${done + 1}/${total}`);
|
|
297
|
+
});
|
|
298
|
+
for (const doc of docs) {
|
|
299
|
+
if (!isFrameworkDoc(doc.url)) continue;
|
|
300
|
+
const cachePath = join("docs", ...(doc.url.startsWith("/") ? doc.url.slice(1) : doc.url).split("/"));
|
|
301
|
+
cachedDocs.push({
|
|
302
|
+
path: cachePath,
|
|
303
|
+
content: doc.content
|
|
304
|
+
});
|
|
305
|
+
docsToIndex.push({
|
|
306
|
+
id: doc.url,
|
|
307
|
+
content: doc.content,
|
|
308
|
+
metadata: {
|
|
309
|
+
package: packageName,
|
|
310
|
+
source: cachePath,
|
|
311
|
+
type: "doc"
|
|
312
|
+
}
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
if (docs.length > 0) docsType = "docs";
|
|
316
|
+
}
|
|
317
|
+
writeToCache(packageName, version, cachedDocs);
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
if (resolved.docsUrl && !cachedDocs.some((d) => d.path.startsWith("docs/"))) {
|
|
321
|
+
const crawlPattern = resolved.crawlUrl || toCrawlPattern(resolved.docsUrl);
|
|
322
|
+
onProgress("Crawling docs site");
|
|
323
|
+
const crawledDocs = await fetchCrawledDocs(crawlPattern, onProgress, resolved.crawlUrl ? 200 : 400).catch((err) => {
|
|
324
|
+
warnings.push(`Crawl failed for ${crawlPattern}: ${err?.message || err}`);
|
|
325
|
+
return [];
|
|
326
|
+
});
|
|
327
|
+
if (crawledDocs.length > 0) {
|
|
328
|
+
for (const doc of crawledDocs) {
|
|
329
|
+
if (!isFrameworkDoc(doc.path)) continue;
|
|
330
|
+
cachedDocs.push(doc);
|
|
331
|
+
docsToIndex.push({
|
|
332
|
+
id: doc.path,
|
|
333
|
+
content: doc.content,
|
|
334
|
+
metadata: {
|
|
335
|
+
package: packageName,
|
|
336
|
+
source: doc.path,
|
|
337
|
+
type: "doc"
|
|
338
|
+
}
|
|
339
|
+
});
|
|
340
|
+
}
|
|
341
|
+
docSource = crawlPattern;
|
|
342
|
+
docsType = "docs";
|
|
343
|
+
writeToCache(packageName, version, cachedDocs);
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
if (resolved.readmeUrl && cachedDocs.length === 0) {
|
|
347
|
+
onProgress("Fetching README");
|
|
348
|
+
const content = await fetchReadmeContent(resolved.readmeUrl);
|
|
349
|
+
if (content) {
|
|
350
|
+
cachedDocs.push({
|
|
351
|
+
path: "docs/README.md",
|
|
352
|
+
content
|
|
353
|
+
});
|
|
354
|
+
docsToIndex.push({
|
|
355
|
+
id: "README.md",
|
|
356
|
+
content,
|
|
357
|
+
metadata: {
|
|
358
|
+
package: packageName,
|
|
359
|
+
source: "docs/README.md",
|
|
360
|
+
type: "doc"
|
|
361
|
+
}
|
|
362
|
+
});
|
|
363
|
+
writeToCache(packageName, version, cachedDocs);
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
if (docsType !== "readme" && cachedDocs.filter((d) => d.path.startsWith("docs/") && d.path.endsWith(".md")).length > 1) {
|
|
367
|
+
const docsIndex = generateDocsIndex(cachedDocs);
|
|
368
|
+
if (docsIndex) writeToCache(packageName, version, [{
|
|
369
|
+
path: "docs/_INDEX.md",
|
|
370
|
+
content: docsIndex
|
|
371
|
+
}]);
|
|
372
|
+
}
|
|
373
|
+
} else {
|
|
374
|
+
onProgress("Loading cached docs");
|
|
375
|
+
const detected = detectDocsType(packageName, version, resolved.repoUrl, resolved.llmsUrl);
|
|
376
|
+
docsType = detected.docsType;
|
|
377
|
+
if (detected.docSource) docSource = detected.docSource;
|
|
378
|
+
if (!existsSync(getPackageDbPath(packageName, version))) {
|
|
379
|
+
onProgress("Reading cached docs for indexing");
|
|
380
|
+
const cached = readCachedDocs(packageName, version);
|
|
381
|
+
for (const doc of cached) docsToIndex.push({
|
|
382
|
+
id: doc.path,
|
|
383
|
+
content: doc.content,
|
|
384
|
+
metadata: {
|
|
385
|
+
package: packageName,
|
|
386
|
+
source: doc.path,
|
|
387
|
+
...classifyCachedDoc(doc.path)
|
|
388
|
+
}
|
|
389
|
+
});
|
|
390
|
+
}
|
|
391
|
+
if (docsType !== "readme" && !existsSync(join(getCacheDir(packageName, version), "docs", "_INDEX.md"))) {
|
|
392
|
+
onProgress("Generating docs index");
|
|
393
|
+
const cached = readCachedDocs(packageName, version);
|
|
394
|
+
if (cached.filter((d) => d.path.startsWith("docs/") && d.path.endsWith(".md")).length > 1) {
|
|
395
|
+
const docsIndex = generateDocsIndex(cached);
|
|
396
|
+
if (docsIndex) writeToCache(packageName, version, [{
|
|
397
|
+
path: "docs/_INDEX.md",
|
|
398
|
+
content: docsIndex
|
|
399
|
+
}]);
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
const gh = resolved.repoUrl ? parseGitHubUrl(resolved.repoUrl) : null;
|
|
404
|
+
const repoInfo = gh ? {
|
|
405
|
+
owner: gh.owner,
|
|
406
|
+
repo: gh.repo
|
|
407
|
+
} : void 0;
|
|
408
|
+
const repoCacheDir = repoInfo ? getRepoCacheDir(repoInfo.owner, repoInfo.repo) : null;
|
|
409
|
+
const cacheDir = getCacheDir(packageName, version);
|
|
410
|
+
const issuesDir = repoCacheDir ? join(repoCacheDir, "issues") : join(cacheDir, "issues");
|
|
411
|
+
const discussionsDir = repoCacheDir ? join(repoCacheDir, "discussions") : join(cacheDir, "discussions");
|
|
412
|
+
const releasesPath = repoCacheDir ? join(repoCacheDir, "releases") : join(cacheDir, "releases");
|
|
413
|
+
if (features.issues && gh && isGhAvailable() && !existsSync(issuesDir)) {
|
|
414
|
+
onProgress("Fetching issues via GitHub API");
|
|
415
|
+
const issues = await fetchGitHubIssues(gh.owner, gh.repo, 30, resolved.releasedAt, opts.from).catch(() => []);
|
|
416
|
+
if (issues.length > 0) {
|
|
417
|
+
onProgress(`Caching ${issues.length} issues`);
|
|
418
|
+
const issueDocs = [...issues.map((issue) => ({
|
|
419
|
+
path: `issues/issue-${issue.number}.md`,
|
|
420
|
+
content: formatIssueAsMarkdown(issue)
|
|
421
|
+
})), {
|
|
422
|
+
path: "issues/_INDEX.md",
|
|
423
|
+
content: generateIssueIndex(issues)
|
|
424
|
+
}];
|
|
425
|
+
if (repoInfo) writeToRepoCache(repoInfo.owner, repoInfo.repo, issueDocs);
|
|
426
|
+
else writeToCache(packageName, version, issueDocs);
|
|
427
|
+
for (const issue of issues) docsToIndex.push({
|
|
428
|
+
id: `issue-${issue.number}`,
|
|
429
|
+
content: sanitizeMarkdown(`#${issue.number}: ${issue.title}\n\n${issue.body || ""}`),
|
|
430
|
+
metadata: {
|
|
431
|
+
package: packageName,
|
|
432
|
+
source: `issues/issue-${issue.number}.md`,
|
|
433
|
+
type: "issue",
|
|
434
|
+
number: issue.number
|
|
435
|
+
}
|
|
436
|
+
});
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
if (features.discussions && gh && isGhAvailable() && !existsSync(discussionsDir)) {
|
|
440
|
+
onProgress("Fetching discussions via GitHub API");
|
|
441
|
+
const discussions = await fetchGitHubDiscussions(gh.owner, gh.repo, 20, resolved.releasedAt, opts.from).catch(() => []);
|
|
442
|
+
if (discussions.length > 0) {
|
|
443
|
+
onProgress(`Caching ${discussions.length} discussions`);
|
|
444
|
+
const discussionDocs = [...discussions.map((d) => ({
|
|
445
|
+
path: `discussions/discussion-${d.number}.md`,
|
|
446
|
+
content: formatDiscussionAsMarkdown(d)
|
|
447
|
+
})), {
|
|
448
|
+
path: "discussions/_INDEX.md",
|
|
449
|
+
content: generateDiscussionIndex(discussions)
|
|
450
|
+
}];
|
|
451
|
+
if (repoInfo) writeToRepoCache(repoInfo.owner, repoInfo.repo, discussionDocs);
|
|
452
|
+
else writeToCache(packageName, version, discussionDocs);
|
|
453
|
+
for (const d of discussions) docsToIndex.push({
|
|
454
|
+
id: `discussion-${d.number}`,
|
|
455
|
+
content: sanitizeMarkdown(`#${d.number}: ${d.title}\n\n${d.body || ""}`),
|
|
456
|
+
metadata: {
|
|
457
|
+
package: packageName,
|
|
458
|
+
source: `discussions/discussion-${d.number}.md`,
|
|
459
|
+
type: "discussion",
|
|
460
|
+
number: d.number
|
|
461
|
+
}
|
|
462
|
+
});
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
if (features.releases && gh && isGhAvailable() && !existsSync(releasesPath)) {
|
|
466
|
+
onProgress("Fetching releases via GitHub API");
|
|
467
|
+
const changelogRef = isPrerelease(version) ? getPrereleaseChangelogRef(packageName) : void 0;
|
|
468
|
+
const releaseDocs = await fetchReleaseNotes(gh.owner, gh.repo, version, resolved.gitRef, packageName, opts.from, changelogRef).catch(() => []);
|
|
469
|
+
let blogDocs = [];
|
|
470
|
+
if (getBlogPreset(packageName)) {
|
|
471
|
+
onProgress("Fetching blog release notes");
|
|
472
|
+
blogDocs = await fetchBlogReleases(packageName, version).catch(() => []);
|
|
473
|
+
}
|
|
474
|
+
const allDocs = [...releaseDocs, ...blogDocs];
|
|
475
|
+
const blogEntries = blogDocs.filter((d) => !d.path.endsWith("_INDEX.md")).map((d) => {
|
|
476
|
+
const versionMatch = d.path.match(/blog-(.+)\.md$/);
|
|
477
|
+
const fm = parseFrontmatter(d.content);
|
|
478
|
+
return {
|
|
479
|
+
version: versionMatch?.[1] ?? "",
|
|
480
|
+
title: fm.title ?? `Release ${versionMatch?.[1]}`,
|
|
481
|
+
date: fm.date ?? ""
|
|
482
|
+
};
|
|
483
|
+
}).filter((b) => b.version);
|
|
484
|
+
const ghReleases = releaseDocs.filter((d) => d.path.startsWith("releases/") && !d.path.endsWith("CHANGELOG.md")).map((d) => {
|
|
485
|
+
const fm = parseFrontmatter(d.content);
|
|
486
|
+
const tag = fm.tag ?? "";
|
|
487
|
+
const name = fm.name ?? tag;
|
|
488
|
+
const published = fm.published ?? "";
|
|
489
|
+
return {
|
|
490
|
+
id: 0,
|
|
491
|
+
tag,
|
|
492
|
+
name,
|
|
493
|
+
prerelease: false,
|
|
494
|
+
createdAt: published,
|
|
495
|
+
publishedAt: published,
|
|
496
|
+
markdown: ""
|
|
497
|
+
};
|
|
498
|
+
}).filter((r) => r.tag);
|
|
499
|
+
const hasChangelog = allDocs.some((d) => d.path === "releases/CHANGELOG.md");
|
|
500
|
+
if (ghReleases.length > 0 || blogEntries.length > 0) allDocs.push({
|
|
501
|
+
path: "releases/_INDEX.md",
|
|
502
|
+
content: generateReleaseIndex({
|
|
503
|
+
releases: ghReleases,
|
|
504
|
+
packageName,
|
|
505
|
+
blogReleases: blogEntries,
|
|
506
|
+
hasChangelog
|
|
507
|
+
})
|
|
508
|
+
});
|
|
509
|
+
if (allDocs.length > 0) {
|
|
510
|
+
onProgress(`Caching ${allDocs.length} releases`);
|
|
511
|
+
if (repoInfo) writeToRepoCache(repoInfo.owner, repoInfo.repo, allDocs);
|
|
512
|
+
else writeToCache(packageName, version, allDocs);
|
|
513
|
+
for (const doc of allDocs) docsToIndex.push({
|
|
514
|
+
id: doc.path,
|
|
515
|
+
content: doc.content,
|
|
516
|
+
metadata: {
|
|
517
|
+
package: packageName,
|
|
518
|
+
source: doc.path,
|
|
519
|
+
type: "release"
|
|
520
|
+
}
|
|
521
|
+
});
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
return {
|
|
525
|
+
docSource,
|
|
526
|
+
docsType,
|
|
527
|
+
docsToIndex,
|
|
528
|
+
hasIssues: features.issues && existsSync(issuesDir),
|
|
529
|
+
hasDiscussions: features.discussions && existsSync(discussionsDir),
|
|
530
|
+
hasReleases: features.releases && existsSync(releasesPath),
|
|
531
|
+
warnings,
|
|
532
|
+
repoInfo,
|
|
533
|
+
usedCache: useCache
|
|
534
|
+
};
|
|
535
|
+
}
|
|
536
|
+
/**
|
|
537
|
+
* Extract the parent document ID from a chunk ID.
|
|
538
|
+
* Chunk IDs have the form "docId#chunk-N"; non-chunk IDs return as-is.
|
|
539
|
+
*/
|
|
540
|
+
function parentDocId(id) {
|
|
541
|
+
const idx = id.indexOf("#chunk-");
|
|
542
|
+
return idx === -1 ? id : id.slice(0, idx);
|
|
543
|
+
}
|
|
544
|
+
/** Cap and sort docs by type priority, mutates and truncates allDocs in place */
|
|
545
|
+
function capDocs(allDocs, max, onProgress) {
|
|
546
|
+
if (allDocs.length <= max) return;
|
|
547
|
+
const TYPE_PRIORITY = {
|
|
548
|
+
doc: 0,
|
|
549
|
+
issue: 1,
|
|
550
|
+
discussion: 2,
|
|
551
|
+
release: 3,
|
|
552
|
+
source: 4,
|
|
553
|
+
types: 5
|
|
554
|
+
};
|
|
555
|
+
allDocs.sort((a, b) => {
|
|
556
|
+
const ta = TYPE_PRIORITY[a.metadata?.type || "doc"] ?? 3;
|
|
557
|
+
const tb = TYPE_PRIORITY[b.metadata?.type || "doc"] ?? 3;
|
|
558
|
+
if (ta !== tb) return ta - tb;
|
|
559
|
+
return a.id.localeCompare(b.id);
|
|
560
|
+
});
|
|
561
|
+
onProgress(`Indexing capped at ${max}/${allDocs.length} docs (prioritized by type)`);
|
|
562
|
+
allDocs.length = max;
|
|
563
|
+
}
|
|
564
|
+
/** Index all resources into the search database, with incremental support */
|
|
565
|
+
async function indexResources(opts) {
|
|
566
|
+
const { packageName, version, cwd, onProgress } = opts;
|
|
567
|
+
const features = opts.features ?? readConfig().features ?? defaultFeatures;
|
|
568
|
+
if (!features.search) return;
|
|
569
|
+
const dbPath = getPackageDbPath(packageName, version);
|
|
570
|
+
const dbExists = existsSync(dbPath);
|
|
571
|
+
const allDocs = [...opts.docsToIndex];
|
|
572
|
+
const pkgDir = resolvePkgDir(packageName, cwd, version);
|
|
573
|
+
if (features.search && pkgDir) {
|
|
574
|
+
onProgress("Scanning exports");
|
|
575
|
+
const entryFiles = await resolveEntryFiles(pkgDir);
|
|
576
|
+
for (const e of entryFiles) allDocs.push({
|
|
577
|
+
id: e.path,
|
|
578
|
+
content: e.content,
|
|
579
|
+
metadata: {
|
|
580
|
+
package: packageName,
|
|
581
|
+
source: `pkg/${e.path}`,
|
|
582
|
+
type: e.type
|
|
583
|
+
}
|
|
584
|
+
});
|
|
585
|
+
}
|
|
586
|
+
if (allDocs.length === 0) return;
|
|
587
|
+
capDocs(allDocs, MAX_INDEX_DOCS, onProgress);
|
|
588
|
+
if (!dbExists) {
|
|
589
|
+
onProgress(`Building search index (${allDocs.length} docs)`);
|
|
590
|
+
try {
|
|
591
|
+
await createIndex(allDocs, {
|
|
592
|
+
dbPath,
|
|
593
|
+
onProgress: ({ phase, current, total }) => {
|
|
594
|
+
if (phase === "storing") {
|
|
595
|
+
const d = allDocs[current - 1];
|
|
596
|
+
onProgress(`Storing ${d?.metadata?.type === "source" || d?.metadata?.type === "types" ? "code" : d?.metadata?.type || "doc"} (${current}/${total})`);
|
|
597
|
+
} else if (phase === "embedding") onProgress(`Creating embeddings (${current}/${total})`);
|
|
598
|
+
}
|
|
599
|
+
});
|
|
600
|
+
} catch (err) {
|
|
601
|
+
if (err instanceof SearchDepsUnavailableError) onProgress("Search indexing skipped (native deps unavailable)");
|
|
602
|
+
else throw err;
|
|
603
|
+
}
|
|
604
|
+
return;
|
|
605
|
+
}
|
|
606
|
+
let existingIds;
|
|
607
|
+
try {
|
|
608
|
+
existingIds = await listIndexIds({ dbPath });
|
|
609
|
+
} catch (err) {
|
|
610
|
+
if (err instanceof SearchDepsUnavailableError) {
|
|
611
|
+
onProgress("Search indexing skipped (native deps unavailable)");
|
|
612
|
+
return;
|
|
613
|
+
}
|
|
614
|
+
throw err;
|
|
615
|
+
}
|
|
616
|
+
const existingParentIds = new Set(existingIds.map(parentDocId));
|
|
617
|
+
const incomingIds = new Set(allDocs.map((d) => d.id));
|
|
618
|
+
const newDocs = allDocs.filter((d) => !existingParentIds.has(d.id));
|
|
619
|
+
const removeIds = existingIds.filter((id) => !incomingIds.has(parentDocId(id)));
|
|
620
|
+
if (newDocs.length === 0 && removeIds.length === 0) {
|
|
621
|
+
onProgress("Search index up to date");
|
|
622
|
+
return;
|
|
623
|
+
}
|
|
624
|
+
const parts = [];
|
|
625
|
+
if (newDocs.length > 0) parts.push(`+${newDocs.length} new`);
|
|
626
|
+
if (removeIds.length > 0) parts.push(`-${removeIds.length} stale`);
|
|
627
|
+
onProgress(`Updating search index (${parts.join(", ")})`);
|
|
628
|
+
try {
|
|
629
|
+
await createIndex(newDocs, {
|
|
630
|
+
dbPath,
|
|
631
|
+
removeIds,
|
|
632
|
+
onProgress: ({ phase, current, total }) => {
|
|
633
|
+
if (phase === "storing") {
|
|
634
|
+
const d = newDocs[current - 1];
|
|
635
|
+
onProgress(`Storing ${d?.metadata?.type === "source" || d?.metadata?.type === "types" ? "code" : d?.metadata?.type || "doc"} (${current}/${total})`);
|
|
636
|
+
} else if (phase === "embedding") onProgress(`Creating embeddings (${current}/${total})`);
|
|
637
|
+
}
|
|
638
|
+
});
|
|
639
|
+
} catch (err) {
|
|
640
|
+
if (err instanceof SearchDepsUnavailableError) onProgress("Search indexing skipped (native deps unavailable)");
|
|
641
|
+
else throw err;
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
/**
|
|
645
|
+
* Eject references: copy cached files as real files into references/ dir.
|
|
646
|
+
* Used for portable skills (git repos, sharing). Replaces symlinks with copies.
|
|
647
|
+
* Does NOT copy pkg files — those reference node_modules directly.
|
|
648
|
+
*/
|
|
649
|
+
function ejectReferences(skillDir, packageName, cwd, version, docsType, features, repoInfo) {
|
|
650
|
+
const f = features ?? readConfig().features ?? defaultFeatures;
|
|
651
|
+
const cacheDir = getCacheDir(packageName, version);
|
|
652
|
+
const refsDir = join(skillDir, "references");
|
|
653
|
+
const repoDir = repoInfo ? getRepoCacheDir(repoInfo.owner, repoInfo.repo) : cacheDir;
|
|
654
|
+
if (!hasShippedDocs(packageName, cwd, version) && docsType !== "readme") copyCachedSubdir(cacheDir, refsDir, "docs");
|
|
655
|
+
if (f.issues) copyCachedSubdir(repoDir, refsDir, "issues");
|
|
656
|
+
if (f.discussions) copyCachedSubdir(repoDir, refsDir, "discussions");
|
|
657
|
+
if (f.releases) copyCachedSubdir(repoDir, refsDir, "releases");
|
|
658
|
+
}
|
|
659
|
+
/** Recursively copy a cached subdirectory into the references dir */
|
|
660
|
+
function copyCachedSubdir(cacheDir, refsDir, subdir) {
|
|
661
|
+
const srcDir = join(cacheDir, subdir);
|
|
662
|
+
if (!existsSync(srcDir)) return;
|
|
663
|
+
const destDir = join(refsDir, subdir);
|
|
664
|
+
mkdirSync(destDir, { recursive: true });
|
|
665
|
+
function walk(dir, rel) {
|
|
666
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
667
|
+
const srcPath = join(dir, entry.name);
|
|
668
|
+
const destPath = join(destDir, rel ? `${rel}/${entry.name}` : entry.name);
|
|
669
|
+
if (entry.isDirectory()) {
|
|
670
|
+
mkdirSync(destPath, { recursive: true });
|
|
671
|
+
walk(srcPath, rel ? `${rel}/${entry.name}` : entry.name);
|
|
672
|
+
} else copyFileSync(srcPath, destPath);
|
|
673
|
+
}
|
|
674
|
+
}
|
|
675
|
+
walk(srcDir, "");
|
|
676
|
+
}
|
|
677
|
+
/**
|
|
678
|
+
* Check if .gitignore has `.skilld` entry.
|
|
679
|
+
* If missing, prompt to add it. Skipped for global installs.
|
|
680
|
+
*/
|
|
681
|
+
async function ensureGitignore(skillsDir, cwd, isGlobal) {
|
|
682
|
+
if (isGlobal) return;
|
|
683
|
+
const gitignorePath = join(cwd, ".gitignore");
|
|
684
|
+
const pattern = ".skilld";
|
|
685
|
+
if (existsSync(gitignorePath)) {
|
|
686
|
+
if (readFileSync(gitignorePath, "utf-8").split("\n").some((line) => line.trim() === pattern)) return;
|
|
687
|
+
}
|
|
688
|
+
if (!isInteractive()) {
|
|
689
|
+
const entry = `\n# Skilld references (recreated by \`skilld install\`)\n${pattern}\n`;
|
|
690
|
+
if (existsSync(gitignorePath)) appendFileSync(gitignorePath, `${readFileSync(gitignorePath, "utf-8").endsWith("\n") ? "" : "\n"}${entry}`);
|
|
691
|
+
else writeFileSync(gitignorePath, entry);
|
|
692
|
+
return;
|
|
693
|
+
}
|
|
694
|
+
const relSkillsDir = relative(cwd, skillsDir) || ".";
|
|
695
|
+
p.log.info(`\x1B[1mGit guidance:\x1B[0m\n \x1B[32m✓\x1B[0m Commit: \x1B[36m${relSkillsDir}/*/SKILL.md\x1B[0m\n \x1B[32m✓\x1B[0m Commit: \x1B[36m${relSkillsDir}/skilld-lock.yaml\x1B[0m\n \x1B[31m✗\x1B[0m Ignore: \x1B[36m${pattern}\x1B[0m \x1B[90m(recreated by \`skilld install\`)\x1B[0m`);
|
|
696
|
+
const add = await p.confirm({
|
|
697
|
+
message: `Add \`${pattern}\` to .gitignore?`,
|
|
698
|
+
initialValue: true
|
|
699
|
+
});
|
|
700
|
+
if (p.isCancel(add) || !add) return;
|
|
701
|
+
const entry = `\n# Skilld references (recreated by \`skilld install\`)\n${pattern}\n`;
|
|
702
|
+
if (existsSync(gitignorePath)) appendFileSync(gitignorePath, `${readFileSync(gitignorePath, "utf-8").endsWith("\n") ? "" : "\n"}${entry}`);
|
|
703
|
+
else writeFileSync(gitignorePath, entry);
|
|
704
|
+
p.log.success("Updated .gitignore");
|
|
705
|
+
}
|
|
706
|
+
const SKILLD_MARKER_START = "<!-- skilld -->";
|
|
707
|
+
const SKILLD_MARKER_END = "<!-- /skilld -->";
|
|
708
|
+
const DEFAULT_SKILL_HINT = "Before modifying code, evaluate each installed skill against the current task.\nFor each skill, determine YES/NO relevance and invoke all YES skills before proceeding.";
|
|
709
|
+
function getSkillInstructions(agent) {
|
|
710
|
+
return `${SKILLD_MARKER_START}\n${targets[agent].skillActivationHint || DEFAULT_SKILL_HINT}\n${SKILLD_MARKER_END}`;
|
|
711
|
+
}
|
|
712
|
+
function getMdcSkillInstructions(agent) {
|
|
713
|
+
return `---\ndescription: "Activates installed skilld skills before code changes"\nalwaysApply: true\n---\n\n${targets[agent].skillActivationHint || DEFAULT_SKILL_HINT}`;
|
|
714
|
+
}
|
|
715
|
+
/**
|
|
716
|
+
* Check if agent instruction file has skilld skill-activation snippet.
|
|
717
|
+
* If missing, prompt to add it. Skipped for global installs or agents without an instructionFile.
|
|
718
|
+
*/
|
|
719
|
+
async function ensureAgentInstructions(agent, cwd, isGlobal) {
|
|
720
|
+
if (isGlobal) return;
|
|
721
|
+
const agentConfig = targets[agent];
|
|
722
|
+
if (!agentConfig.instructionFile) return;
|
|
723
|
+
const filePath = join(cwd, agentConfig.instructionFile);
|
|
724
|
+
if (agentConfig.instructionFile.endsWith(".mdc")) {
|
|
725
|
+
if (existsSync(filePath)) return;
|
|
726
|
+
const content = `${getMdcSkillInstructions(agent)}\n`;
|
|
727
|
+
if (!isInteractive()) {
|
|
728
|
+
mkdirSync(join(filePath, ".."), { recursive: true });
|
|
729
|
+
writeFileSync(filePath, content);
|
|
730
|
+
return;
|
|
731
|
+
}
|
|
732
|
+
p.note(`This tells your agent to check installed skills before making
|
|
733
|
+
code changes. Without it, skills are available but may not
|
|
734
|
+
activate automatically.
|
|
735
|
+
|
|
736
|
+
\x1B[90m${getMdcSkillInstructions(agent)}\x1B[0m`, `Create ${agentConfig.instructionFile}`);
|
|
737
|
+
const add = await p.confirm({
|
|
738
|
+
message: `Create ${agentConfig.instructionFile} with skill activation instructions?`,
|
|
739
|
+
initialValue: true
|
|
740
|
+
});
|
|
741
|
+
if (p.isCancel(add) || !add) return;
|
|
742
|
+
mkdirSync(join(filePath, ".."), { recursive: true });
|
|
743
|
+
writeFileSync(filePath, content);
|
|
744
|
+
p.log.success(`Created ${agentConfig.instructionFile}`);
|
|
745
|
+
return;
|
|
746
|
+
}
|
|
747
|
+
if (existsSync(filePath)) {
|
|
748
|
+
if (readFileSync(filePath, "utf-8").includes("<!-- skilld -->")) return;
|
|
749
|
+
}
|
|
750
|
+
if (!isInteractive()) {
|
|
751
|
+
if (existsSync(filePath)) appendFileSync(filePath, `${readFileSync(filePath, "utf-8").endsWith("\n") ? "" : "\n"}\n${getSkillInstructions(agent)}\n`);
|
|
752
|
+
else writeFileSync(filePath, `${getSkillInstructions(agent)}\n`);
|
|
753
|
+
return;
|
|
754
|
+
}
|
|
755
|
+
const action = existsSync(filePath) ? "Append to" : "Create";
|
|
756
|
+
p.note(`This tells your agent to check installed skills before making
|
|
757
|
+
code changes. Without it, skills are available but may not
|
|
758
|
+
activate automatically.
|
|
759
|
+
|
|
760
|
+
\x1B[90m${getSkillInstructions(agent).replace(/\n/g, "\n")}\x1B[0m`, `${action} ${agentConfig.instructionFile}`);
|
|
761
|
+
const add = await p.confirm({
|
|
762
|
+
message: `${action} ${agentConfig.instructionFile} with skill activation instructions?`,
|
|
763
|
+
initialValue: true
|
|
764
|
+
});
|
|
765
|
+
if (p.isCancel(add) || !add) return;
|
|
766
|
+
if (existsSync(filePath)) appendFileSync(filePath, `${readFileSync(filePath, "utf-8").endsWith("\n") ? "" : "\n"}\n${getSkillInstructions(agent)}\n`);
|
|
767
|
+
else writeFileSync(filePath, `${getSkillInstructions(agent)}\n`);
|
|
768
|
+
p.log.success(`Updated ${agentConfig.instructionFile}`);
|
|
769
|
+
}
|
|
770
|
+
/** Default sections when model is pre-set (non-interactive) */
|
|
771
|
+
const DEFAULT_SECTIONS = ["best-practices", "api-changes"];
|
|
772
|
+
async function selectSkillSections(message = "Enhance SKILL.md") {
|
|
773
|
+
p.log.info("Budgets adapt to package release density.");
|
|
774
|
+
const selected = await p.multiselect({
|
|
775
|
+
message,
|
|
776
|
+
options: [
|
|
777
|
+
{
|
|
778
|
+
label: "API changes",
|
|
779
|
+
value: "api-changes",
|
|
780
|
+
hint: "new/deprecated APIs from version history"
|
|
781
|
+
},
|
|
782
|
+
{
|
|
783
|
+
label: "Best practices",
|
|
784
|
+
value: "best-practices",
|
|
785
|
+
hint: "gotchas, pitfalls, patterns"
|
|
786
|
+
},
|
|
787
|
+
{
|
|
788
|
+
label: "Custom section",
|
|
789
|
+
value: "custom",
|
|
790
|
+
hint: "add your own section"
|
|
791
|
+
}
|
|
792
|
+
],
|
|
793
|
+
initialValues: DEFAULT_SECTIONS,
|
|
794
|
+
required: false
|
|
795
|
+
});
|
|
796
|
+
if (p.isCancel(selected)) return {
|
|
797
|
+
sections: [],
|
|
798
|
+
cancelled: true
|
|
799
|
+
};
|
|
800
|
+
const sections = selected;
|
|
801
|
+
if (sections.length === 0) return {
|
|
802
|
+
sections: [],
|
|
803
|
+
cancelled: false
|
|
804
|
+
};
|
|
805
|
+
if (sections.length > 1) {
|
|
806
|
+
const n = sections.length;
|
|
807
|
+
const budgetLines = [];
|
|
808
|
+
for (const s of sections) switch (s) {
|
|
809
|
+
case "api-changes":
|
|
810
|
+
budgetLines.push(` API changes ${maxItems(6, 12, n)}–${maxItems(6, Math.round(12 * 1.6), n)} items (adapts to release churn)`);
|
|
811
|
+
break;
|
|
812
|
+
case "best-practices":
|
|
813
|
+
budgetLines.push(` Best practices ${maxItems(4, 10, n)}–${maxItems(4, Math.round(10 * 1.3), n)} items`);
|
|
814
|
+
break;
|
|
815
|
+
case "custom":
|
|
816
|
+
budgetLines.push(` Custom ≤${maxLines(50, 80, n)} lines`);
|
|
817
|
+
break;
|
|
818
|
+
}
|
|
819
|
+
p.log.info(`Budget (${n} sections):\n${budgetLines.join("\n")}`);
|
|
820
|
+
}
|
|
821
|
+
let customPrompt;
|
|
822
|
+
if (sections.includes("custom")) {
|
|
823
|
+
const heading = await p.text({
|
|
824
|
+
message: "Section heading",
|
|
825
|
+
placeholder: "e.g. \"Migration from v2\" or \"SSR Patterns\""
|
|
826
|
+
});
|
|
827
|
+
if (p.isCancel(heading)) return {
|
|
828
|
+
sections: [],
|
|
829
|
+
cancelled: true
|
|
830
|
+
};
|
|
831
|
+
const body = await p.text({
|
|
832
|
+
message: "Instructions for this section",
|
|
833
|
+
placeholder: "e.g. \"Document breaking changes and migration steps from v2 to v3\""
|
|
834
|
+
});
|
|
835
|
+
if (p.isCancel(body)) return {
|
|
836
|
+
sections: [],
|
|
837
|
+
cancelled: true
|
|
838
|
+
};
|
|
839
|
+
customPrompt = {
|
|
840
|
+
heading,
|
|
841
|
+
body
|
|
842
|
+
};
|
|
843
|
+
}
|
|
844
|
+
return {
|
|
845
|
+
sections,
|
|
846
|
+
customPrompt,
|
|
847
|
+
cancelled: false
|
|
848
|
+
};
|
|
849
|
+
}
|
|
850
|
+
/**
|
|
851
|
+
* Resolve sections + model for LLM enhancement.
|
|
852
|
+
* If presetModel is provided, uses DEFAULT_SECTIONS without prompting.
|
|
853
|
+
* Returns null if cancelled or no sections/model selected.
|
|
854
|
+
*/
|
|
855
|
+
async function selectLlmConfig(presetModel, message, updateCtx) {
|
|
856
|
+
if (presetModel) {
|
|
857
|
+
if ((await getAvailableModels()).some((m) => m.id === presetModel)) return {
|
|
858
|
+
model: presetModel,
|
|
859
|
+
sections: DEFAULT_SECTIONS
|
|
860
|
+
};
|
|
861
|
+
if (!isInteractive()) return null;
|
|
862
|
+
}
|
|
863
|
+
if (!isInteractive()) return null;
|
|
864
|
+
const config = readConfig();
|
|
865
|
+
const available = await getAvailableModels();
|
|
866
|
+
if (available.length === 0) {
|
|
867
|
+
p.log.warn(NO_MODELS_MESSAGE);
|
|
868
|
+
return null;
|
|
869
|
+
}
|
|
870
|
+
let defaultModel;
|
|
871
|
+
if (config.model && available.some((m) => m.id === config.model)) defaultModel = config.model;
|
|
872
|
+
else {
|
|
873
|
+
if (config.model) p.log.warn(`Configured model \x1B[36m${config.model}\x1B[0m is unavailable — using auto-selected fallback`);
|
|
874
|
+
defaultModel = available.find((m) => m.recommended)?.id ?? available[0].id;
|
|
875
|
+
}
|
|
876
|
+
const defaultModelName = getModelName(defaultModel);
|
|
877
|
+
const providerHint = available.find((m) => m.id === defaultModel)?.providerName ?? "";
|
|
878
|
+
const sourceHint = config.model === defaultModel ? "configured" : "recommended";
|
|
879
|
+
const defaultHint = providerHint ? `${providerHint} · ${sourceHint}` : sourceHint;
|
|
880
|
+
let enhanceMessage = "Enhance SKILL.md?";
|
|
881
|
+
let defaultToSkip = false;
|
|
882
|
+
if (updateCtx) {
|
|
883
|
+
const diff = updateCtx.bumpType ?? (updateCtx.oldVersion && updateCtx.newVersion ? semverDiff(updateCtx.oldVersion, updateCtx.newVersion) : null);
|
|
884
|
+
const isSmallBump = diff === "patch" || diff === "prerelease" || diff === "prepatch" || diff === "preminor" || diff === "premajor";
|
|
885
|
+
const ageParts = [];
|
|
886
|
+
if (diff) ageParts.push(diff);
|
|
887
|
+
if (updateCtx.syncedAt) {
|
|
888
|
+
const syncedAtMs = new Date(updateCtx.syncedAt).getTime();
|
|
889
|
+
if (Number.isFinite(syncedAtMs)) {
|
|
890
|
+
const days = Math.floor((Date.now() - syncedAtMs) / 864e5);
|
|
891
|
+
ageParts.push(days === 0 ? "today" : days === 1 ? "1d ago" : `${days}d ago`);
|
|
892
|
+
}
|
|
893
|
+
}
|
|
894
|
+
if (updateCtx.wasEnhanced) ageParts.push("LLM-enhanced");
|
|
895
|
+
const hint = [updateCtx.oldVersion && updateCtx.newVersion ? `${updateCtx.oldVersion} → ${updateCtx.newVersion}` : null, ...ageParts].filter(Boolean).join(" · ");
|
|
896
|
+
if (hint) enhanceMessage = `Enhance SKILL.md? \x1B[90m(${hint})\x1B[0m`;
|
|
897
|
+
if (updateCtx.wasEnhanced && isSmallBump) defaultToSkip = true;
|
|
898
|
+
}
|
|
899
|
+
const choice = await p.select({
|
|
900
|
+
message: enhanceMessage,
|
|
901
|
+
options: [
|
|
902
|
+
{
|
|
903
|
+
label: defaultModelName,
|
|
904
|
+
value: "default",
|
|
905
|
+
hint: defaultHint
|
|
906
|
+
},
|
|
907
|
+
{
|
|
908
|
+
label: "Different model",
|
|
909
|
+
value: "pick",
|
|
910
|
+
hint: "choose another enhancement model"
|
|
911
|
+
},
|
|
912
|
+
{
|
|
913
|
+
label: "Prompt only",
|
|
914
|
+
value: "prompt",
|
|
915
|
+
hint: "write prompts for manual use"
|
|
916
|
+
},
|
|
917
|
+
{
|
|
918
|
+
label: "Skip",
|
|
919
|
+
value: "skip",
|
|
920
|
+
hint: "base skill with docs, issues, and types"
|
|
921
|
+
}
|
|
922
|
+
],
|
|
923
|
+
...defaultToSkip ? { initialValue: "skip" } : {}
|
|
924
|
+
});
|
|
925
|
+
if (p.isCancel(choice)) return null;
|
|
926
|
+
if (choice === "skip") return null;
|
|
927
|
+
if (choice === "prompt") {
|
|
928
|
+
const { sections, customPrompt, cancelled } = await selectSkillSections(message ? `${message} (prompt only)` : "Select sections for prompt generation");
|
|
929
|
+
if (cancelled || sections.length === 0) return null;
|
|
930
|
+
return {
|
|
931
|
+
model: defaultModel,
|
|
932
|
+
sections,
|
|
933
|
+
customPrompt,
|
|
934
|
+
promptOnly: true
|
|
935
|
+
};
|
|
936
|
+
}
|
|
937
|
+
let model;
|
|
938
|
+
if (choice === "pick") {
|
|
939
|
+
const picked = await pickModel(available);
|
|
940
|
+
if (!picked) return null;
|
|
941
|
+
updateConfig({ model: picked });
|
|
942
|
+
model = picked;
|
|
943
|
+
} else model = defaultModel;
|
|
944
|
+
if (!model) return null;
|
|
945
|
+
const modelName = getModelName(model);
|
|
946
|
+
const { sections, customPrompt, cancelled } = await selectSkillSections(message ? `${message} (${modelName})` : `Enhance SKILL.md with ${modelName}`);
|
|
947
|
+
if (cancelled || sections.length === 0) return null;
|
|
948
|
+
return {
|
|
949
|
+
model,
|
|
950
|
+
sections,
|
|
951
|
+
customPrompt
|
|
952
|
+
};
|
|
953
|
+
}
|
|
954
|
+
async function enhanceSkillWithLLM(opts) {
|
|
955
|
+
const { packageName, version, skillDir, dirName, model, resolved, relatedSkills, hasIssues, hasDiscussions, hasReleases, hasChangelog, docsType, hasShippedDocs: shippedDocs, pkgFiles, force, debug, sections, customPrompt, packages, features, eject, overheadLines } = opts;
|
|
956
|
+
const effectiveFeatures = features;
|
|
957
|
+
const llmLog = p.taskLog({ title: `Agent exploring ${packageName}` });
|
|
958
|
+
const docFiles = listReferenceFiles(skillDir);
|
|
959
|
+
const { optimized, wasOptimized, usage, cost, warnings, error, debugLogsDir } = await optimizeDocs({
|
|
960
|
+
packageName,
|
|
961
|
+
skillDir,
|
|
962
|
+
model,
|
|
963
|
+
version,
|
|
964
|
+
hasGithub: hasIssues || hasDiscussions,
|
|
965
|
+
hasReleases,
|
|
966
|
+
hasChangelog,
|
|
967
|
+
docFiles,
|
|
968
|
+
docsType,
|
|
969
|
+
hasShippedDocs: shippedDocs,
|
|
970
|
+
noCache: force,
|
|
971
|
+
debug,
|
|
972
|
+
sections,
|
|
973
|
+
customPrompt,
|
|
974
|
+
features: effectiveFeatures,
|
|
975
|
+
pkgFiles,
|
|
976
|
+
overheadLines,
|
|
977
|
+
onProgress: createToolProgress(llmLog)
|
|
978
|
+
});
|
|
979
|
+
if (wasOptimized) {
|
|
980
|
+
const costParts = [];
|
|
981
|
+
if (usage) {
|
|
982
|
+
const totalK = Math.round(usage.totalTokens / 1e3);
|
|
983
|
+
costParts.push(`${totalK}k tokens`);
|
|
984
|
+
}
|
|
985
|
+
if (cost) costParts.push(`$${cost.toFixed(2)}`);
|
|
986
|
+
const costSuffix = costParts.length > 0 ? ` (${costParts.join(", ")})` : "";
|
|
987
|
+
llmLog.success(`Generated best practices${costSuffix}`);
|
|
988
|
+
if (debugLogsDir) p.log.info(`Debug logs: ${relative(process.cwd(), debugLogsDir)}`);
|
|
989
|
+
if (error) p.log.warn(`\x1B[33mPartial failure: ${error}\x1B[0m`);
|
|
990
|
+
if (warnings?.length) for (const w of warnings) p.log.warn(`\x1B[33m${w}\x1B[0m`);
|
|
991
|
+
const skillMd = generateSkillMd({
|
|
992
|
+
name: packageName,
|
|
993
|
+
version,
|
|
994
|
+
releasedAt: resolved.releasedAt,
|
|
995
|
+
distTags: resolved.distTags,
|
|
996
|
+
body: optimized,
|
|
997
|
+
relatedSkills,
|
|
998
|
+
hasIssues,
|
|
999
|
+
hasDiscussions,
|
|
1000
|
+
hasReleases,
|
|
1001
|
+
hasChangelog,
|
|
1002
|
+
docsType,
|
|
1003
|
+
hasShippedDocs: shippedDocs,
|
|
1004
|
+
pkgFiles,
|
|
1005
|
+
generatedBy: getModelLabel(model),
|
|
1006
|
+
dirName,
|
|
1007
|
+
packages,
|
|
1008
|
+
repoUrl: resolved.repoUrl,
|
|
1009
|
+
features,
|
|
1010
|
+
eject
|
|
1011
|
+
});
|
|
1012
|
+
writeFileSync(join(skillDir, "SKILL.md"), skillMd);
|
|
1013
|
+
} else llmLog.error(`Enhancement failed${error ? `: ${error}` : ""}`);
|
|
1014
|
+
}
|
|
1015
|
+
/**
|
|
1016
|
+
* Build and write PROMPT_*.md files for manual LLM use.
|
|
1017
|
+
* Returns the list of sections that had prompts written.
|
|
1018
|
+
*/
|
|
1019
|
+
function writePromptFiles(opts) {
|
|
1020
|
+
const { skillDir, sections, customPrompt, features } = opts;
|
|
1021
|
+
const docFiles = listReferenceFiles(skillDir);
|
|
1022
|
+
const prompts = buildAllSectionPrompts({
|
|
1023
|
+
packageName: opts.packageName,
|
|
1024
|
+
skillDir,
|
|
1025
|
+
version: opts.version,
|
|
1026
|
+
hasIssues: opts.hasIssues,
|
|
1027
|
+
hasDiscussions: opts.hasDiscussions,
|
|
1028
|
+
hasReleases: opts.hasReleases,
|
|
1029
|
+
hasChangelog: opts.hasChangelog,
|
|
1030
|
+
docFiles,
|
|
1031
|
+
docsType: opts.docsType,
|
|
1032
|
+
hasShippedDocs: opts.hasShippedDocs,
|
|
1033
|
+
pkgFiles: opts.pkgFiles,
|
|
1034
|
+
customPrompt,
|
|
1035
|
+
features,
|
|
1036
|
+
overheadLines: opts.overheadLines,
|
|
1037
|
+
sections
|
|
1038
|
+
});
|
|
1039
|
+
const skilldDir = join(skillDir, ".skilld");
|
|
1040
|
+
mkdirSync(skilldDir, { recursive: true });
|
|
1041
|
+
for (const [section, prompt] of prompts) writeFileSync(join(skilldDir, `PROMPT_${section}.md`), prompt);
|
|
1042
|
+
const written = [...prompts.keys()];
|
|
1043
|
+
if (written.length > 0) {
|
|
1044
|
+
const relDir = relative(process.cwd(), skillDir);
|
|
1045
|
+
const promptFiles = written.map((s) => `PROMPT_${s}.md`).join(", ");
|
|
1046
|
+
const outputFileList = written.map((s) => SECTION_OUTPUT_FILES[s]).join(", ");
|
|
1047
|
+
p.log.info(`Prompt files written to ${relDir}/.skilld/\n\x1B[2m\x1B[3m Read each prompt file (${promptFiles}) in ${relDir}/.skilld/, read the\n referenced files, then write your output to the matching file (${outputFileList}).\n When done, run: skilld assemble\x1B[0m`);
|
|
1048
|
+
}
|
|
1049
|
+
return written;
|
|
1050
|
+
}
|
|
1051
|
+
//#endregion
|
|
1052
|
+
export { writePromptFiles as S, linkAllReferences as _, classifyCachedDoc as a, selectLlmConfig as b, ejectReferences as c, ensureGitignore as d, fetchAndCacheResources as f, indexResources as g, handleShippedSkills as h, SKILLD_MARKER_START as i, enhanceSkillWithLLM as l, forceClearCache as m, RESOLVE_STEP_LABELS as n, detectChangelog as o, findRelatedSkills as p, SKILLD_MARKER_END as r, detectDocsType as s, DEFAULT_SECTIONS as t, ensureAgentInstructions as u, resolveBaseDir as v, selectSkillSections as x, resolveLocalDep as y };
|
|
1053
|
+
|
|
1054
|
+
//# sourceMappingURL=sync-shared2.mjs.map
|