skilld 1.5.2 → 1.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/THIRD-PARTY-LICENSES.md +38 -0
- package/dist/_chunks/agent.mjs +281 -147
- package/dist/_chunks/agent.mjs.map +1 -1
- package/dist/_chunks/assemble.mjs +2 -1
- package/dist/_chunks/assemble.mjs.map +1 -1
- package/dist/_chunks/author.mjs +11 -2
- package/dist/_chunks/author.mjs.map +1 -1
- package/dist/_chunks/cli-helpers.mjs +22 -5
- package/dist/_chunks/cli-helpers.mjs.map +1 -1
- package/dist/_chunks/cli-helpers2.mjs +2 -1
- package/dist/_chunks/index3.d.mts.map +1 -1
- package/dist/_chunks/install.mjs +6 -2
- package/dist/_chunks/install.mjs.map +1 -1
- package/dist/_chunks/libs/@sinclair/typebox.mjs +2748 -0
- package/dist/_chunks/libs/@sinclair/typebox.mjs.map +1 -0
- package/dist/_chunks/list.mjs +2 -1
- package/dist/_chunks/list.mjs.map +1 -1
- package/dist/_chunks/prepare2.mjs +2 -1
- package/dist/_chunks/prepare2.mjs.map +1 -1
- package/dist/_chunks/prompts.mjs +10 -15
- package/dist/_chunks/prompts.mjs.map +1 -1
- package/dist/_chunks/rolldown-runtime.mjs +13 -0
- package/dist/_chunks/sanitize.mjs +3 -0
- package/dist/_chunks/sanitize.mjs.map +1 -1
- package/dist/_chunks/search-interactive.mjs +2 -1
- package/dist/_chunks/search-interactive.mjs.map +1 -1
- package/dist/_chunks/search.mjs +2 -1
- package/dist/_chunks/setup.mjs +2 -1
- package/dist/_chunks/setup.mjs.map +1 -1
- package/dist/_chunks/sources.mjs +4 -3
- package/dist/_chunks/sources.mjs.map +1 -1
- package/dist/_chunks/sync-shared.mjs +2 -1
- package/dist/_chunks/sync-shared2.mjs +8 -4
- package/dist/_chunks/sync-shared2.mjs.map +1 -1
- package/dist/_chunks/sync.mjs +1 -1
- package/dist/_chunks/sync2.mjs +2 -1
- package/dist/_chunks/uninstall.mjs +2 -1
- package/dist/_chunks/uninstall.mjs.map +1 -1
- package/dist/_chunks/wizard.mjs +1 -1
- package/dist/agent/index.d.mts.map +1 -1
- package/dist/agent/index.mjs +2 -1
- package/dist/cli.mjs +2 -1
- package/dist/cli.mjs.map +1 -1
- package/package.json +1 -1
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import "./agent.mjs";
|
|
2
1
|
import "./config.mjs";
|
|
3
2
|
import "./package-json.mjs";
|
|
4
3
|
import "./prepare.mjs";
|
|
@@ -11,6 +10,8 @@ import "./shared.mjs";
|
|
|
11
10
|
import "./sources.mjs";
|
|
12
11
|
import "./detect.mjs";
|
|
13
12
|
import "./prompts.mjs";
|
|
13
|
+
import "./agent.mjs";
|
|
14
|
+
import "./libs/@sinclair/typebox.mjs";
|
|
14
15
|
import "./cli-helpers.mjs";
|
|
15
16
|
import "./lockfile.mjs";
|
|
16
17
|
import { h as handleShippedSkills } from "./sync-shared2.mjs";
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { a as getModelLabel, i as getAvailableModels, o as getModelName, r as createToolProgress, s as optimizeDocs } from "./agent.mjs";
|
|
2
1
|
import { a as getRepoCacheDir, i as getPackageDbPath, o as getCacheDir } from "./config.mjs";
|
|
3
2
|
import { i as readPackageJsonSafe } from "./package-json.mjs";
|
|
4
3
|
import { n as linkShippedSkill, r as resolvePkgDir, t as getShippedSkills } from "./prepare.mjs";
|
|
@@ -10,6 +9,7 @@ import { a as semverDiff, c as getBlogPreset, n as getSharedSkillsDir, p as getP
|
|
|
10
9
|
import { B as isGhAvailable, C as downloadLlmsDocs, D as normalizeLlmsLinks, F as formatDiscussionAsMarkdown, G as fetchReleaseNotes, H as toCrawlPattern, I as generateDiscussionIndex, K as generateReleaseIndex, L as fetchGitHubIssues, M as resolveEntryFiles, N as generateDocsIndex, P as fetchGitHubDiscussions, R as formatIssueAsMarkdown, T as fetchLlmsTxt, U as fetchBlogReleases, V as fetchCrawledDocs, Z as fetchGitHubRaw, b as isShallowGitDocs, h as fetchGitDocs, n as fetchNpmPackage, q as isPrerelease, tt as parseGitHubUrl, u as resolveLocalPackageDocs, v as fetchReadmeContent, y as filterFrameworkDocs, z as generateIssueIndex } from "./sources.mjs";
|
|
11
10
|
import { a as targets } from "./detect.mjs";
|
|
12
11
|
import { c as SECTION_OUTPUT_FILES, g as maxLines, h as maxItems, l as buildAllSectionPrompts, t as generateSkillMd } from "./prompts.mjs";
|
|
12
|
+
import { a as getModelLabel, i as getAvailableModels, o as getModelName, r as createToolProgress, s as optimizeDocs } from "./agent.mjs";
|
|
13
13
|
import { O as readConfig, g as pickModel, j as updateConfig, k as registerProject, n as NO_MODELS_MESSAGE, p as isInteractive, w as defaultFeatures } from "./cli-helpers.mjs";
|
|
14
14
|
import { i as readLock, n as parsePackages, s as writeLock } from "./lockfile.mjs";
|
|
15
15
|
import { join, relative, resolve } from "pathe";
|
|
@@ -879,7 +879,7 @@ async function selectLlmConfig(presetModel, message, updateCtx) {
|
|
|
879
879
|
const providerHint = available.find((m) => m.id === defaultModel)?.providerName ?? "";
|
|
880
880
|
const sourceHint = config.model === defaultModel ? "configured" : "recommended";
|
|
881
881
|
const defaultHint = providerHint ? `${providerHint} · ${sourceHint}` : sourceHint;
|
|
882
|
-
let enhanceMessage = "Enhance SKILL.md?";
|
|
882
|
+
let enhanceMessage = message ? `${message}?` : "Enhance SKILL.md?";
|
|
883
883
|
let defaultToSkip = false;
|
|
884
884
|
if (updateCtx) {
|
|
885
885
|
const diff = updateCtx.bumpType ?? (updateCtx.oldVersion && updateCtx.newVersion ? semverDiff(updateCtx.oldVersion, updateCtx.newVersion) : null);
|
|
@@ -956,7 +956,10 @@ async function selectLlmConfig(presetModel, message, updateCtx) {
|
|
|
956
956
|
async function enhanceSkillWithLLM(opts) {
|
|
957
957
|
const { packageName, version, skillDir, dirName, model, resolved, relatedSkills, hasIssues, hasDiscussions, hasReleases, hasChangelog, docsType, hasShippedDocs: shippedDocs, pkgFiles, force, debug, sections, customPrompt, packages, features, eject, overheadLines } = opts;
|
|
958
958
|
const effectiveFeatures = features;
|
|
959
|
-
const llmLog = p.taskLog({
|
|
959
|
+
const llmLog = p.taskLog({
|
|
960
|
+
title: `Agent exploring ${packageName}`,
|
|
961
|
+
limit: 3
|
|
962
|
+
});
|
|
960
963
|
const docFiles = listReferenceFiles(skillDir);
|
|
961
964
|
const { optimized, wasOptimized, usage, cost, warnings, error, debugLogsDir } = await optimizeDocs({
|
|
962
965
|
packageName,
|
|
@@ -1012,7 +1015,8 @@ async function enhanceSkillWithLLM(opts) {
|
|
|
1012
1015
|
eject
|
|
1013
1016
|
});
|
|
1014
1017
|
writeFileSync(join(skillDir, "SKILL.md"), skillMd);
|
|
1015
|
-
} else llmLog.error(`
|
|
1018
|
+
} else if (error && /\b429\b|rate.?limit|exhausted.*capacity|quota.*reset/i.test(error)) llmLog.error(`Rate limited by LLM provider. Try again shortly or use a different model via \`skilld config\``);
|
|
1019
|
+
else llmLog.error(`Enhancement failed${error ? `: ${error}` : ""}`);
|
|
1016
1020
|
}
|
|
1017
1021
|
/**
|
|
1018
1022
|
* Build and write PROMPT_*.md files for manual LLM use.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"sync-shared2.mjs","names":["agents"],"sources":["../../src/commands/sync-shared.ts"],"sourcesContent":["import type { AgentType, CustomPrompt, OptimizeModel, SkillSection } from '../agent/index.ts'\nimport type { FeaturesConfig } from '../core/config.ts'\nimport type { ResolvedPackage, ResolveStep } from '../sources/index.ts'\nimport { appendFileSync, copyFileSync, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs'\nimport * as p from '@clack/prompts'\nimport { join, relative, resolve } from 'pathe'\nimport {\n agents,\n buildAllSectionPrompts,\n createToolProgress,\n generateSkillMd,\n getAvailableModels,\n getModelLabel,\n getModelName,\n optimizeDocs,\n SECTION_OUTPUT_FILES,\n} from '../agent/index.ts'\nimport { maxItems, maxLines } from '../agent/prompts/optional/budget.ts'\nimport {\n clearCache,\n getCacheDir,\n getPackageDbPath,\n getRepoCacheDir,\n getShippedSkills,\n hasShippedDocs,\n linkCachedDir,\n linkPkg,\n linkPkgNamed,\n linkRepoCachedDir,\n linkShippedSkill,\n listReferenceFiles,\n readCachedDocs,\n resolvePkgDir,\n writeToCache,\n writeToRepoCache,\n} from '../cache/index.ts'\nimport { isInteractive, NO_MODELS_MESSAGE, pickModel } from '../cli-helpers.ts'\nimport { defaultFeatures, readConfig, registerProject, updateConfig } from '../core/config.ts'\nimport { parsePackages, readLock, writeLock } from '../core/lockfile.ts'\nimport { parseFrontmatter } from '../core/markdown.ts'\nimport { readPackageJsonSafe } from '../core/package-json.ts'\nimport { sanitizeMarkdown } from '../core/sanitize.ts'\nimport { getSharedSkillsDir, semverDiff } from '../core/shared.ts'\nimport { createIndex, listIndexIds, SearchDepsUnavailableError } from '../retriv/index.ts'\nimport {\n downloadLlmsDocs,\n fetchBlogReleases,\n fetchCrawledDocs,\n fetchGitDocs,\n fetchGitHubDiscussions,\n fetchGitHubIssues,\n fetchGitHubRaw,\n fetchLlmsTxt,\n fetchNpmPackage,\n fetchReadmeContent,\n fetchReleaseNotes,\n filterFrameworkDocs,\n formatDiscussionAsMarkdown,\n formatIssueAsMarkdown,\n generateDiscussionIndex,\n generateDocsIndex,\n generateIssueIndex,\n generateReleaseIndex,\n getBlogPreset,\n getPrereleaseChangelogRef,\n isGhAvailable,\n isPrerelease,\n isShallowGitDocs,\n normalizeLlmsLinks,\n parseGitHubUrl,\n resolveEntryFiles,\n resolveLocalPackageDocs,\n toCrawlPattern,\n} from '../sources/index.ts'\n\n/** Max docs sent to the embedding pipeline to prevent oversized indexes */\nconst MAX_INDEX_DOCS = 250\n\nexport const RESOLVE_STEP_LABELS: Record<ResolveStep, string> = {\n 'npm': 'npm registry',\n 'github-docs': 'GitHub docs',\n 'github-meta': 'GitHub meta',\n 'github-search': 'GitHub search',\n 'readme': 'README',\n 'llms.txt': 'llms.txt',\n 'crawl': 'website crawl',\n 'local': 'node_modules',\n}\n\n/** Classify a cached doc path into the right metadata type */\nexport function classifyCachedDoc(path: string): { type: string, number?: number } {\n const issueMatch = path.match(/^issues\\/issue-(\\d+)\\.md$/)\n if (issueMatch)\n return { type: 'issue', number: Number(issueMatch[1]) }\n const discussionMatch = path.match(/^discussions\\/discussion-(\\d+)\\.md$/)\n if (discussionMatch)\n return { type: 'discussion', number: Number(discussionMatch[1]) }\n if (path.startsWith('releases/'))\n return { type: 'release' }\n return { type: 'doc' }\n}\n\nexport async function findRelatedSkills(packageName: string, skillsDir: string): Promise<string[]> {\n const related: string[] = []\n\n const npmInfo = await fetchNpmPackage(packageName)\n if (!npmInfo?.dependencies)\n return related\n\n const deps = new Set(Object.keys(npmInfo.dependencies))\n\n if (!existsSync(skillsDir))\n return related\n\n // Build packageName → dirName map from lockfile for accurate matching\n const lock = readLock(skillsDir)\n const pkgToDirName = new Map<string, string>()\n if (lock) {\n for (const [dirName, info] of Object.entries(lock.skills)) {\n if (info.packageName)\n pkgToDirName.set(info.packageName, dirName)\n for (const pkg of parsePackages(info.packages))\n pkgToDirName.set(pkg.name, dirName)\n }\n }\n\n const installedSkills = readdirSync(skillsDir)\n const installedSet = new Set(installedSkills)\n\n for (const dep of deps) {\n const dirName = pkgToDirName.get(dep)\n if (dirName && installedSet.has(dirName))\n related.push(dirName)\n }\n\n return related.slice(0, 5)\n}\n\n/** Clear cache + db for --force flag */\nexport function forceClearCache(packageName: string, version: string, repoInfo?: { owner: string, repo: string }): void {\n clearCache(packageName, version)\n const forcedDbPath = getPackageDbPath(packageName, version)\n if (existsSync(forcedDbPath))\n rmSync(forcedDbPath, { recursive: true, force: true })\n // Also clear repo-level cache when force is used\n if (repoInfo) {\n const repoDir = getRepoCacheDir(repoInfo.owner, repoInfo.repo)\n if (existsSync(repoDir))\n rmSync(repoDir, { recursive: true, force: true })\n }\n}\n\n/** Link all reference symlinks (pkg, docs, issues, discussions, releases) */\nexport function linkAllReferences(skillDir: string, packageName: string, cwd: string, version: string, docsType: string, extraPackages?: Array<{ name: string, version?: string }>, features?: FeaturesConfig, repoInfo?: { owner: string, repo: string }): void {\n const f = features ?? readConfig().features ?? defaultFeatures\n try {\n linkPkg(skillDir, packageName, cwd, version)\n linkPkgNamed(skillDir, packageName, cwd, version)\n if (!hasShippedDocs(packageName, cwd, version) && docsType !== 'readme') {\n linkCachedDir(skillDir, packageName, version, 'docs')\n }\n // Issues/discussions/releases: use repo cache when available, else package cache\n if (f.issues) {\n if (repoInfo)\n linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, 'issues')\n else\n linkCachedDir(skillDir, packageName, version, 'issues')\n }\n if (f.discussions) {\n if (repoInfo)\n linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, 'discussions')\n else\n linkCachedDir(skillDir, packageName, version, 'discussions')\n }\n if (f.releases) {\n if (repoInfo)\n linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, 'releases')\n else\n linkCachedDir(skillDir, packageName, version, 'releases')\n }\n linkCachedDir(skillDir, packageName, version, 'sections')\n // Create named symlinks for additional packages in multi-package skills\n if (extraPackages) {\n for (const pkg of extraPackages) {\n if (pkg.name !== packageName)\n linkPkgNamed(skillDir, pkg.name, cwd, pkg.version)\n }\n }\n }\n catch {\n // Symlink may fail on some systems\n }\n}\n\n/** Detect docs type from cached directory contents */\nexport function detectDocsType(packageName: string, version: string, repoUrl?: string, llmsUrl?: string): { docsType: 'docs' | 'llms.txt' | 'readme', docSource?: string } {\n const cacheDir = getCacheDir(packageName, version)\n if (existsSync(join(cacheDir, 'docs', 'index.md')) || existsSync(join(cacheDir, 'docs', 'guide'))) {\n return {\n docsType: 'docs',\n docSource: repoUrl ? `${repoUrl}/tree/v${version}/docs` : 'git',\n }\n }\n if (existsSync(join(cacheDir, 'llms.txt'))) {\n return {\n docsType: 'llms.txt',\n docSource: llmsUrl || 'llms.txt',\n }\n }\n if (existsSync(join(cacheDir, 'docs', 'README.md'))) {\n return { docsType: 'readme' }\n }\n return { docsType: 'readme' }\n}\n\nexport interface HandleShippedResult {\n shipped: Array<{ skillName: string, skillDir: string }>\n baseDir: string\n}\n\n/** Link shipped skills, write lock entries, register project. Returns result or null if no shipped skills. */\nexport function handleShippedSkills(\n packageName: string,\n version: string,\n cwd: string,\n agent: AgentType,\n global: boolean,\n): HandleShippedResult | null {\n const shippedSkills = getShippedSkills(packageName, cwd, version)\n if (shippedSkills.length === 0)\n return null\n\n const baseDir = resolveBaseDir(cwd, agent, global)\n mkdirSync(baseDir, { recursive: true })\n\n for (const shipped of shippedSkills) {\n linkShippedSkill(baseDir, shipped.skillName, shipped.skillDir)\n writeLock(baseDir, shipped.skillName, {\n packageName,\n version,\n source: 'shipped',\n syncedAt: new Date().toISOString().split('T')[0],\n generator: 'skilld',\n })\n }\n\n if (!global)\n registerProject(cwd)\n\n return { shipped: shippedSkills, baseDir }\n}\n\n/** Resolve the base skills directory for an agent */\nexport function resolveBaseDir(cwd: string, agent: AgentType, global: boolean): string {\n if (global) {\n const agentConfig = agents[agent]\n return agentConfig.globalSkillsDir\n }\n const shared = getSharedSkillsDir(cwd)\n if (shared)\n return shared\n const agentConfig = agents[agent]\n return join(cwd, agentConfig.skillsDir)\n}\n\n/** Try resolving a `link:` dependency to local package docs. Returns null if not a link dep or resolution fails. */\nexport async function resolveLocalDep(packageName: string, cwd: string): Promise<ResolvedPackage | null> {\n const result = readPackageJsonSafe(join(cwd, 'package.json'))\n if (!result)\n return null\n\n const pkg = result.parsed\n const deps = { ...pkg.dependencies as Record<string, string>, ...pkg.devDependencies as Record<string, string> }\n const depVersion = deps[packageName]\n\n if (!depVersion?.startsWith('link:'))\n return null\n\n const localPath = resolve(cwd, depVersion.slice(5))\n return resolveLocalPackageDocs(localPath)\n}\n\n/** Detect CHANGELOG.md in a package directory or cached releases */\nexport function detectChangelog(pkgDir: string | null, cacheDir?: string): string | false {\n if (pkgDir) {\n const found = ['CHANGELOG.md', 'changelog.md'].find(f => existsSync(join(pkgDir, f)))\n if (found)\n return `pkg/${found}`\n }\n // Also check cached releases/CHANGELOG.md (fetched from GitHub)\n if (cacheDir && existsSync(join(cacheDir, 'releases', 'CHANGELOG.md')))\n return 'releases/CHANGELOG.md'\n return false\n}\n\n// ── Shared pipeline functions ──\n\nexport interface IndexDoc {\n id: string\n content: string\n metadata: Record<string, any>\n}\n\nexport interface FetchResult {\n docSource: string\n docsType: 'llms.txt' | 'readme' | 'docs'\n docsToIndex: IndexDoc[]\n hasIssues: boolean\n hasDiscussions: boolean\n hasReleases: boolean\n warnings: string[]\n /** Parsed GitHub owner/repo for repo-level cache */\n repoInfo?: { owner: string, repo: string }\n /** Whether this result was served from cache (no fresh fetches) */\n usedCache: boolean\n}\n\n/** Fetch and cache all resources for a package (docs cascade + issues + discussions + releases) */\nexport async function fetchAndCacheResources(opts: {\n packageName: string\n resolved: ResolvedPackage\n version: string\n useCache: boolean\n features?: FeaturesConfig\n /** Lower-bound date for release/issue/discussion collection (ISO date) */\n from?: string\n onProgress: (message: string) => void\n}): Promise<FetchResult> {\n const { packageName, resolved, version, onProgress } = opts\n const features = opts.features ?? readConfig().features ?? defaultFeatures\n\n // Retry fetch if cache is README-only but richer sources exist (likely transient failure)\n const cacheInvalidated = opts.useCache\n && resolved.crawlUrl\n && detectDocsType(packageName, version, resolved.repoUrl, resolved.llmsUrl).docsType === 'readme'\n const useCache = opts.useCache && !cacheInvalidated\n let docSource: string = resolved.readmeUrl || 'readme'\n let docsType: 'llms.txt' | 'readme' | 'docs' = 'readme'\n const docsToIndex: IndexDoc[] = []\n const warnings: string[] = []\n if (cacheInvalidated)\n warnings.push(`Retrying crawl for ${resolved.crawlUrl} (previous attempt only cached README)`)\n\n if (!useCache) {\n const cachedDocs: Array<{ path: string, content: string }> = []\n const isFrameworkDoc = (path: string) => filterFrameworkDocs([path], packageName).length > 0\n\n // Try versioned git docs first\n if (resolved.gitDocsUrl && resolved.repoUrl) {\n const gh = parseGitHubUrl(resolved.repoUrl)\n if (gh) {\n onProgress('Fetching git docs')\n const gitDocs = await fetchGitDocs(gh.owner, gh.repo, version, packageName)\n if (gitDocs?.fallback) {\n warnings.push(`Docs fetched from ${gitDocs.ref} branch (no tag found for v${version})`)\n }\n if (gitDocs && gitDocs.files.length > 0) {\n const BATCH_SIZE = 20\n const results: Array<{ file: string, content: string } | null> = []\n\n for (let i = 0; i < gitDocs.files.length; i += BATCH_SIZE) {\n const batch = gitDocs.files.slice(i, i + BATCH_SIZE)\n onProgress(`Downloading docs ${Math.min(i + BATCH_SIZE, gitDocs.files.length)}/${gitDocs.files.length} from ${gitDocs.ref}`)\n const batchResults = await Promise.all(\n batch.map(async (file) => {\n const url = `${gitDocs.baseUrl}/${file}`\n const content = await fetchGitHubRaw(url)\n if (!content)\n return null\n return { file, content }\n }),\n )\n results.push(...batchResults)\n }\n\n for (const r of results) {\n if (r) {\n const stripped = gitDocs.docsPrefix ? r.file.replace(gitDocs.docsPrefix, '') : r.file\n const cachePath = stripped.startsWith('docs/') ? stripped : `docs/${stripped}`\n cachedDocs.push({ path: cachePath, content: r.content })\n docsToIndex.push({\n id: cachePath,\n content: r.content,\n metadata: { package: packageName, source: cachePath, type: 'doc' },\n })\n }\n }\n\n const downloaded = results.filter(Boolean).length\n if (downloaded > 0) {\n // Shallow git-docs: if < threshold and llms.txt exists, discard and fall through\n if (isShallowGitDocs(downloaded) && resolved.llmsUrl) {\n onProgress(`Shallow git-docs (${downloaded} files), trying llms.txt`)\n cachedDocs.length = 0\n docsToIndex.length = 0\n }\n else {\n docSource = `${resolved.repoUrl}/tree/${gitDocs.ref}/docs`\n docsType = 'docs'\n writeToCache(packageName, version, cachedDocs)\n\n // Always cache llms.txt alongside good git-docs as supplementary reference\n if (resolved.llmsUrl) {\n onProgress('Caching supplementary llms.txt')\n const llmsContent = await fetchLlmsTxt(resolved.llmsUrl)\n if (llmsContent) {\n const baseUrl = resolved.docsUrl || new URL(resolved.llmsUrl).origin\n const supplementary: Array<{ path: string, content: string }> = [\n { path: 'llms.txt', content: normalizeLlmsLinks(llmsContent.raw, baseUrl) },\n ]\n if (llmsContent.links.length > 0) {\n onProgress(`Downloading ${llmsContent.links.length} supplementary docs`)\n const docs = await downloadLlmsDocs(llmsContent, baseUrl, (url, done, total) => {\n onProgress(`Downloading supplementary doc ${done + 1}/${total}`)\n })\n for (const doc of docs) {\n if (!isFrameworkDoc(doc.url))\n continue\n const localPath = doc.url.startsWith('/') ? doc.url.slice(1) : doc.url\n supplementary.push({ path: join('llms-docs', ...localPath.split('/')), content: doc.content })\n }\n }\n writeToCache(packageName, version, supplementary)\n }\n }\n }\n }\n }\n }\n }\n\n // Try website crawl\n if (resolved.crawlUrl && cachedDocs.length === 0) {\n onProgress('Crawling website')\n const crawledDocs = await fetchCrawledDocs(resolved.crawlUrl, onProgress).catch((err) => {\n warnings.push(`Crawl failed for ${resolved.crawlUrl}: ${err?.message || err}`)\n return []\n })\n if (crawledDocs.length === 0 && resolved.crawlUrl) {\n warnings.push(`Crawl returned 0 docs from ${resolved.crawlUrl}`)\n }\n if (crawledDocs.length > 0) {\n for (const doc of crawledDocs) {\n if (!isFrameworkDoc(doc.path))\n continue\n cachedDocs.push(doc)\n docsToIndex.push({\n id: doc.path,\n content: doc.content,\n metadata: { package: packageName, source: doc.path, type: 'doc' },\n })\n }\n docSource = resolved.crawlUrl\n docsType = 'docs'\n writeToCache(packageName, version, cachedDocs)\n }\n }\n\n // Try llms.txt\n if (resolved.llmsUrl && cachedDocs.length === 0) {\n onProgress('Fetching llms.txt')\n const llmsContent = await fetchLlmsTxt(resolved.llmsUrl)\n if (llmsContent) {\n docSource = resolved.llmsUrl!\n docsType = 'llms.txt'\n const baseUrl = resolved.docsUrl || new URL(resolved.llmsUrl).origin\n cachedDocs.push({ path: 'llms.txt', content: normalizeLlmsLinks(llmsContent.raw, baseUrl) })\n\n if (llmsContent.links.length > 0) {\n onProgress(`Downloading ${llmsContent.links.length} linked docs`)\n const docs = await downloadLlmsDocs(llmsContent, baseUrl, (url, done, total) => {\n onProgress(`Downloading linked doc ${done + 1}/${total}`)\n })\n\n for (const doc of docs) {\n if (!isFrameworkDoc(doc.url))\n continue\n const localPath = doc.url.startsWith('/') ? doc.url.slice(1) : doc.url\n const cachePath = join('docs', ...localPath.split('/'))\n cachedDocs.push({ path: cachePath, content: doc.content })\n docsToIndex.push({\n id: doc.url,\n content: doc.content,\n metadata: { package: packageName, source: cachePath, type: 'doc' },\n })\n }\n if (docs.length > 0)\n docsType = 'docs'\n }\n\n writeToCache(packageName, version, cachedDocs)\n }\n }\n\n // Try crawling docsUrl as fallback (when no actual doc files from git/crawl/llms.txt)\n if (resolved.docsUrl && !cachedDocs.some(d => d.path.startsWith('docs/'))) {\n const crawlPattern = resolved.crawlUrl || toCrawlPattern(resolved.docsUrl)\n onProgress('Crawling docs site')\n const crawlMaxPages = resolved.crawlUrl ? 200 : 400\n const crawledDocs = await fetchCrawledDocs(crawlPattern, onProgress, crawlMaxPages).catch((err) => {\n warnings.push(`Crawl failed for ${crawlPattern}: ${err?.message || err}`)\n return []\n })\n if (crawledDocs.length > 0) {\n for (const doc of crawledDocs) {\n if (!isFrameworkDoc(doc.path))\n continue\n cachedDocs.push(doc)\n docsToIndex.push({\n id: doc.path,\n content: doc.content,\n metadata: { package: packageName, source: doc.path, type: 'doc' },\n })\n }\n docSource = crawlPattern\n docsType = 'docs'\n writeToCache(packageName, version, cachedDocs)\n }\n }\n\n // Fallback to README\n if (resolved.readmeUrl && cachedDocs.length === 0) {\n onProgress('Fetching README')\n const content = await fetchReadmeContent(resolved.readmeUrl)\n if (content) {\n cachedDocs.push({ path: 'docs/README.md', content })\n docsToIndex.push({\n id: 'README.md',\n content,\n metadata: { package: packageName, source: 'docs/README.md', type: 'doc' },\n })\n writeToCache(packageName, version, cachedDocs)\n }\n }\n\n // Generate docs index if we have multiple doc files\n if (docsType !== 'readme' && cachedDocs.filter(d => d.path.startsWith('docs/') && d.path.endsWith('.md')).length > 1) {\n const docsIndex = generateDocsIndex(cachedDocs)\n if (docsIndex) {\n writeToCache(packageName, version, [{ path: 'docs/_INDEX.md', content: docsIndex }])\n }\n }\n }\n else {\n // Detect docs type from cache\n onProgress('Loading cached docs')\n const detected = detectDocsType(packageName, version, resolved.repoUrl, resolved.llmsUrl)\n docsType = detected.docsType\n if (detected.docSource)\n docSource = detected.docSource\n\n // Load cached docs for indexing if db doesn't exist yet\n const dbPath = getPackageDbPath(packageName, version)\n if (!existsSync(dbPath)) {\n onProgress('Reading cached docs for indexing')\n const cached = readCachedDocs(packageName, version)\n for (const doc of cached) {\n docsToIndex.push({\n id: doc.path,\n content: doc.content,\n metadata: { package: packageName, source: doc.path, ...classifyCachedDoc(doc.path) },\n })\n }\n }\n\n // Backfill docs index for caches created before this feature\n if (docsType !== 'readme' && !existsSync(join(getCacheDir(packageName, version), 'docs', '_INDEX.md'))) {\n onProgress('Generating docs index')\n const cached = readCachedDocs(packageName, version)\n const docFiles = cached.filter(d => d.path.startsWith('docs/') && d.path.endsWith('.md'))\n if (docFiles.length > 1) {\n const docsIndex = generateDocsIndex(cached)\n if (docsIndex) {\n writeToCache(packageName, version, [{ path: 'docs/_INDEX.md', content: docsIndex }])\n }\n }\n }\n }\n\n // Parse repo info once for repo-level caching\n const gh = resolved.repoUrl ? parseGitHubUrl(resolved.repoUrl) : null\n const repoInfo = gh ? { owner: gh.owner, repo: gh.repo } : undefined\n\n // Determine where repo-level data lives (repo cache if available, else package cache)\n const repoCacheDir = repoInfo ? getRepoCacheDir(repoInfo.owner, repoInfo.repo) : null\n const cacheDir = getCacheDir(packageName, version)\n const issuesDir = repoCacheDir ? join(repoCacheDir, 'issues') : join(cacheDir, 'issues')\n const discussionsDir = repoCacheDir ? join(repoCacheDir, 'discussions') : join(cacheDir, 'discussions')\n const releasesPath = repoCacheDir ? join(repoCacheDir, 'releases') : join(cacheDir, 'releases')\n\n // Issues (independent of useCache — has its own existsSync guard)\n if (features.issues && gh && isGhAvailable() && !existsSync(issuesDir)) {\n onProgress('Fetching issues via GitHub API')\n const issues = await fetchGitHubIssues(gh.owner, gh.repo, 30, resolved.releasedAt, opts.from).catch(() => [])\n if (issues.length > 0) {\n onProgress(`Caching ${issues.length} issues`)\n const issueDocs = [\n ...issues.map(issue => ({\n path: `issues/issue-${issue.number}.md`,\n content: formatIssueAsMarkdown(issue),\n })),\n {\n path: 'issues/_INDEX.md',\n content: generateIssueIndex(issues),\n },\n ]\n if (repoInfo)\n writeToRepoCache(repoInfo.owner, repoInfo.repo, issueDocs)\n else\n writeToCache(packageName, version, issueDocs)\n for (const issue of issues) {\n docsToIndex.push({\n id: `issue-${issue.number}`,\n content: sanitizeMarkdown(`#${issue.number}: ${issue.title}\\n\\n${issue.body || ''}`),\n metadata: { package: packageName, source: `issues/issue-${issue.number}.md`, type: 'issue', number: issue.number },\n })\n }\n }\n }\n\n // Discussions\n if (features.discussions && gh && isGhAvailable() && !existsSync(discussionsDir)) {\n onProgress('Fetching discussions via GitHub API')\n const discussions = await fetchGitHubDiscussions(gh.owner, gh.repo, 20, resolved.releasedAt, opts.from).catch(() => [])\n if (discussions.length > 0) {\n onProgress(`Caching ${discussions.length} discussions`)\n const discussionDocs = [\n ...discussions.map(d => ({\n path: `discussions/discussion-${d.number}.md`,\n content: formatDiscussionAsMarkdown(d),\n })),\n {\n path: 'discussions/_INDEX.md',\n content: generateDiscussionIndex(discussions),\n },\n ]\n if (repoInfo)\n writeToRepoCache(repoInfo.owner, repoInfo.repo, discussionDocs)\n else\n writeToCache(packageName, version, discussionDocs)\n for (const d of discussions) {\n docsToIndex.push({\n id: `discussion-${d.number}`,\n content: sanitizeMarkdown(`#${d.number}: ${d.title}\\n\\n${d.body || ''}`),\n metadata: { package: packageName, source: `discussions/discussion-${d.number}.md`, type: 'discussion', number: d.number },\n })\n }\n }\n }\n\n // Releases (GitHub releases + blog releases + CHANGELOG → unified releases/ dir)\n if (features.releases && gh && isGhAvailable() && !existsSync(releasesPath)) {\n onProgress('Fetching releases via GitHub API')\n const changelogRef = isPrerelease(version) ? getPrereleaseChangelogRef(packageName) : undefined\n const releaseDocs = await fetchReleaseNotes(gh.owner, gh.repo, version, resolved.gitRef, packageName, opts.from, changelogRef).catch(() => [])\n\n // Fetch blog releases into same releases/ dir\n let blogDocs: Array<{ path: string, content: string }> = []\n if (getBlogPreset(packageName)) {\n onProgress('Fetching blog release notes')\n blogDocs = await fetchBlogReleases(packageName, version).catch(() => [])\n }\n\n const allDocs = [...releaseDocs, ...blogDocs]\n\n // Parse blog release metadata for index generation\n const blogEntries = blogDocs\n .filter(d => !d.path.endsWith('_INDEX.md'))\n .map((d) => {\n const versionMatch = d.path.match(/blog-(.+)\\.md$/)\n const fm = parseFrontmatter(d.content)\n return {\n version: versionMatch?.[1] ?? '',\n title: fm.title ?? `Release ${versionMatch?.[1]}`,\n date: fm.date ?? '',\n }\n })\n .filter(b => b.version)\n\n // Parse GitHub releases for index (extract from frontmatter)\n const ghReleases = releaseDocs\n .filter(d => d.path.startsWith('releases/') && !d.path.endsWith('CHANGELOG.md'))\n .map((d) => {\n const fm = parseFrontmatter(d.content)\n const tag = fm.tag ?? ''\n const name = fm.name ?? tag\n const published = fm.published ?? ''\n return { id: 0, tag, name, prerelease: false, createdAt: published, publishedAt: published, markdown: '' }\n })\n .filter(r => r.tag)\n\n const hasChangelog = allDocs.some(d => d.path === 'releases/CHANGELOG.md')\n\n // Generate unified _INDEX.md\n if (ghReleases.length > 0 || blogEntries.length > 0) {\n allDocs.push({\n path: 'releases/_INDEX.md',\n content: generateReleaseIndex({ releases: ghReleases, packageName, blogReleases: blogEntries, hasChangelog }),\n })\n }\n\n if (allDocs.length > 0) {\n onProgress(`Caching ${allDocs.length} releases`)\n if (repoInfo)\n writeToRepoCache(repoInfo.owner, repoInfo.repo, allDocs)\n else\n writeToCache(packageName, version, allDocs)\n for (const doc of allDocs) {\n docsToIndex.push({\n id: doc.path,\n content: doc.content,\n metadata: { package: packageName, source: doc.path, type: 'release' },\n })\n }\n }\n }\n\n return {\n docSource,\n docsType,\n docsToIndex,\n hasIssues: features.issues && existsSync(issuesDir),\n hasDiscussions: features.discussions && existsSync(discussionsDir),\n hasReleases: features.releases && existsSync(releasesPath),\n warnings,\n repoInfo,\n usedCache: useCache,\n }\n}\n\n/**\n * Extract the parent document ID from a chunk ID.\n * Chunk IDs have the form \"docId#chunk-N\"; non-chunk IDs return as-is.\n */\nfunction parentDocId(id: string): string {\n const idx = id.indexOf('#chunk-')\n return idx === -1 ? id : id.slice(0, idx)\n}\n\n/** Cap and sort docs by type priority, mutates and truncates allDocs in place */\nfunction capDocs(allDocs: IndexDoc[], max: number, onProgress: (msg: string) => void): void {\n if (allDocs.length <= max)\n return\n const TYPE_PRIORITY: Record<string, number> = { doc: 0, issue: 1, discussion: 2, release: 3, source: 4, types: 5 }\n allDocs.sort((a, b) => {\n const ta = TYPE_PRIORITY[a.metadata?.type || 'doc'] ?? 3\n const tb = TYPE_PRIORITY[b.metadata?.type || 'doc'] ?? 3\n if (ta !== tb)\n return ta - tb\n return a.id.localeCompare(b.id)\n })\n onProgress(`Indexing capped at ${max}/${allDocs.length} docs (prioritized by type)`)\n allDocs.length = max\n}\n\n/** Index all resources into the search database, with incremental support */\nexport async function indexResources(opts: {\n packageName: string\n version: string\n cwd: string\n docsToIndex: IndexDoc[]\n features?: FeaturesConfig\n onProgress: (message: string) => void\n}): Promise<void> {\n const { packageName, version, cwd, onProgress } = opts\n const features = opts.features ?? readConfig().features ?? defaultFeatures\n\n if (!features.search)\n return\n\n const dbPath = getPackageDbPath(packageName, version)\n const dbExists = existsSync(dbPath)\n\n const allDocs = [...opts.docsToIndex]\n\n // Add entry files\n const pkgDir = resolvePkgDir(packageName, cwd, version)\n if (features.search && pkgDir) {\n onProgress('Scanning exports')\n const entryFiles = await resolveEntryFiles(pkgDir)\n for (const e of entryFiles) {\n allDocs.push({\n id: e.path,\n content: e.content,\n metadata: { package: packageName, source: `pkg/${e.path}`, type: e.type },\n })\n }\n }\n\n if (allDocs.length === 0)\n return\n\n capDocs(allDocs, MAX_INDEX_DOCS, onProgress)\n\n // Full build when no existing DB\n if (!dbExists) {\n onProgress(`Building search index (${allDocs.length} docs)`)\n try {\n await createIndex(allDocs, {\n dbPath,\n onProgress: ({ phase, current, total }) => {\n if (phase === 'storing') {\n const d = allDocs[current - 1]\n const type = d?.metadata?.type === 'source' || d?.metadata?.type === 'types' ? 'code' : (d?.metadata?.type || 'doc')\n onProgress(`Storing ${type} (${current}/${total})`)\n }\n else if (phase === 'embedding') {\n onProgress(`Creating embeddings (${current}/${total})`)\n }\n },\n })\n }\n catch (err) {\n if (err instanceof SearchDepsUnavailableError)\n onProgress('Search indexing skipped (native deps unavailable)')\n else\n throw err\n }\n return\n }\n\n // Incremental update: diff incoming docs against existing index\n let existingIds: string[]\n try {\n existingIds = await listIndexIds({ dbPath })\n }\n catch (err) {\n if (err instanceof SearchDepsUnavailableError) {\n onProgress('Search indexing skipped (native deps unavailable)')\n return\n }\n throw err\n }\n\n // Group existing chunk IDs by parent doc ID\n const existingParentIds = new Set(existingIds.map(parentDocId))\n const incomingIds = new Set(allDocs.map(d => d.id))\n\n // Docs to add: in incoming but not in existing index\n const newDocs = allDocs.filter(d => !existingParentIds.has(d.id))\n\n // Chunk IDs to remove: their parent doc is no longer in incoming set\n const removeIds = existingIds.filter(id => !incomingIds.has(parentDocId(id)))\n\n if (newDocs.length === 0 && removeIds.length === 0) {\n onProgress('Search index up to date')\n return\n }\n\n const parts: string[] = []\n if (newDocs.length > 0)\n parts.push(`+${newDocs.length} new`)\n if (removeIds.length > 0)\n parts.push(`-${removeIds.length} stale`)\n onProgress(`Updating search index (${parts.join(', ')})`)\n\n try {\n await createIndex(newDocs, {\n dbPath,\n removeIds,\n onProgress: ({ phase, current, total }) => {\n if (phase === 'storing') {\n const d = newDocs[current - 1]\n const type = d?.metadata?.type === 'source' || d?.metadata?.type === 'types' ? 'code' : (d?.metadata?.type || 'doc')\n onProgress(`Storing ${type} (${current}/${total})`)\n }\n else if (phase === 'embedding') {\n onProgress(`Creating embeddings (${current}/${total})`)\n }\n },\n })\n }\n catch (err) {\n if (err instanceof SearchDepsUnavailableError)\n onProgress('Search indexing skipped (native deps unavailable)')\n else\n throw err\n }\n}\n\n/**\n * Eject references: copy cached files as real files into references/ dir.\n * Used for portable skills (git repos, sharing). Replaces symlinks with copies.\n * Does NOT copy pkg files — those reference node_modules directly.\n */\nexport function ejectReferences(skillDir: string, packageName: string, cwd: string, version: string, docsType: string, features?: FeaturesConfig, repoInfo?: { owner: string, repo: string }): void {\n const f = features ?? readConfig().features ?? defaultFeatures\n const cacheDir = getCacheDir(packageName, version)\n const refsDir = join(skillDir, 'references')\n // Repo-level data source (falls back to package cache)\n const repoDir = repoInfo ? getRepoCacheDir(repoInfo.owner, repoInfo.repo) : cacheDir\n\n // Copy cached docs (skip pkg — eject is for portable sharing, pkg references node_modules)\n if (!hasShippedDocs(packageName, cwd, version) && docsType !== 'readme')\n copyCachedSubdir(cacheDir, refsDir, 'docs')\n\n if (f.issues)\n copyCachedSubdir(repoDir, refsDir, 'issues')\n if (f.discussions)\n copyCachedSubdir(repoDir, refsDir, 'discussions')\n if (f.releases)\n copyCachedSubdir(repoDir, refsDir, 'releases')\n}\n\n/** Recursively copy a cached subdirectory into the references dir */\nfunction copyCachedSubdir(cacheDir: string, refsDir: string, subdir: string): void {\n const srcDir = join(cacheDir, subdir)\n if (!existsSync(srcDir))\n return\n\n const destDir = join(refsDir, subdir)\n mkdirSync(destDir, { recursive: true })\n\n function walk(dir: string, rel: string) {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n const srcPath = join(dir, entry.name)\n const destPath = join(destDir, rel ? `${rel}/${entry.name}` : entry.name)\n if (entry.isDirectory()) {\n mkdirSync(destPath, { recursive: true })\n walk(srcPath, rel ? `${rel}/${entry.name}` : entry.name)\n }\n else {\n copyFileSync(srcPath, destPath)\n }\n }\n }\n\n walk(srcDir, '')\n}\n\n// ── Shared UI + LLM functions (used by sync.ts, sync-git.ts, sync-parallel.ts, etc.) ──\n\n/**\n * Check if .gitignore has `.skilld` entry.\n * If missing, prompt to add it. Skipped for global installs.\n */\nexport async function ensureGitignore(skillsDir: string, cwd: string, isGlobal: boolean): Promise<void> {\n if (isGlobal)\n return\n\n const gitignorePath = join(cwd, '.gitignore')\n const pattern = '.skilld'\n\n // Check if already ignored\n if (existsSync(gitignorePath)) {\n const content = readFileSync(gitignorePath, 'utf-8')\n if (content.split('\\n').some(line => line.trim() === pattern))\n return\n }\n\n // Non-interactive: auto-add (default is true anyway)\n if (!isInteractive()) {\n const entry = `\\n# Skilld references (recreated by \\`skilld install\\`)\\n${pattern}\\n`\n if (existsSync(gitignorePath)) {\n const existing = readFileSync(gitignorePath, 'utf-8')\n const separator = existing.endsWith('\\n') ? '' : '\\n'\n appendFileSync(gitignorePath, `${separator}${entry}`)\n }\n else {\n writeFileSync(gitignorePath, entry)\n }\n return\n }\n\n // Show guidance\n const relSkillsDir = relative(cwd, skillsDir) || '.'\n p.log.info(\n `\\x1B[1mGit guidance:\\x1B[0m\\n`\n + ` \\x1B[32m✓\\x1B[0m Commit: \\x1B[36m${relSkillsDir}/*/SKILL.md\\x1B[0m\\n`\n + ` \\x1B[32m✓\\x1B[0m Commit: \\x1B[36m${relSkillsDir}/skilld-lock.yaml\\x1B[0m\\n`\n + ` \\x1B[31m✗\\x1B[0m Ignore: \\x1B[36m${pattern}\\x1B[0m \\x1B[90m(recreated by \\`skilld install\\`)\\x1B[0m`,\n )\n\n const add = await p.confirm({\n message: `Add \\`${pattern}\\` to .gitignore?`,\n initialValue: true,\n })\n\n if (p.isCancel(add) || !add)\n return\n\n const entry = `\\n# Skilld references (recreated by \\`skilld install\\`)\\n${pattern}\\n`\n if (existsSync(gitignorePath)) {\n const existing = readFileSync(gitignorePath, 'utf-8')\n const separator = existing.endsWith('\\n') ? '' : '\\n'\n appendFileSync(gitignorePath, `${separator}${entry}`)\n }\n else {\n writeFileSync(gitignorePath, entry)\n }\n\n p.log.success('Updated .gitignore')\n}\n\nexport const SKILLD_MARKER_START = '<!-- skilld -->'\nexport const SKILLD_MARKER_END = '<!-- /skilld -->'\n\nconst DEFAULT_SKILL_HINT = 'Before modifying code, evaluate each installed skill against the current task.\\nFor each skill, determine YES/NO relevance and invoke all YES skills before proceeding.'\n\nfunction getSkillInstructions(agent: AgentType): string {\n const hint = agents[agent].skillActivationHint || DEFAULT_SKILL_HINT\n return `${SKILLD_MARKER_START}\\n${hint}\\n${SKILLD_MARKER_END}`\n}\n\nfunction getMdcSkillInstructions(agent: AgentType): string {\n const hint = agents[agent].skillActivationHint || DEFAULT_SKILL_HINT\n return `---\\ndescription: \"Activates installed skilld skills before code changes\"\\nalwaysApply: true\\n---\\n\\n${hint}`\n}\n\n/**\n * Check if agent instruction file has skilld skill-activation snippet.\n * If missing, prompt to add it. Skipped for global installs or agents without an instructionFile.\n */\nexport async function ensureAgentInstructions(agent: AgentType, cwd: string, isGlobal: boolean): Promise<void> {\n if (isGlobal)\n return\n\n const agentConfig = agents[agent]\n if (!agentConfig.instructionFile)\n return\n\n const filePath = join(cwd, agentConfig.instructionFile)\n const isMdc = agentConfig.instructionFile.endsWith('.mdc')\n\n // MDC format: dedicated file, no markers needed\n if (isMdc) {\n if (existsSync(filePath))\n return\n\n const content = `${getMdcSkillInstructions(agent)}\\n`\n\n if (!isInteractive()) {\n mkdirSync(join(filePath, '..'), { recursive: true })\n writeFileSync(filePath, content)\n return\n }\n\n p.note(\n `This tells your agent to check installed skills before making\\n`\n + `code changes. Without it, skills are available but may not\\n`\n + `activate automatically.\\n`\n + `\\n`\n + `\\x1B[90m${getMdcSkillInstructions(agent)}\\x1B[0m`,\n `Create ${agentConfig.instructionFile}`,\n )\n\n const add = await p.confirm({\n message: `Create ${agentConfig.instructionFile} with skill activation instructions?`,\n initialValue: true,\n })\n\n if (p.isCancel(add) || !add)\n return\n\n mkdirSync(join(filePath, '..'), { recursive: true })\n writeFileSync(filePath, content)\n p.log.success(`Created ${agentConfig.instructionFile}`)\n return\n }\n\n // Check if marker already present\n if (existsSync(filePath)) {\n const content = readFileSync(filePath, 'utf-8')\n if (content.includes(SKILLD_MARKER_START))\n return\n }\n\n // Non-interactive: auto-add\n if (!isInteractive()) {\n if (existsSync(filePath)) {\n const existing = readFileSync(filePath, 'utf-8')\n const separator = existing.endsWith('\\n') ? '' : '\\n'\n appendFileSync(filePath, `${separator}\\n${getSkillInstructions(agent)}\\n`)\n }\n else {\n writeFileSync(filePath, `${getSkillInstructions(agent)}\\n`)\n }\n return\n }\n\n const fileExists = existsSync(filePath)\n const action = fileExists ? 'Append to' : 'Create'\n p.note(\n `This tells your agent to check installed skills before making\\n`\n + `code changes. Without it, skills are available but may not\\n`\n + `activate automatically.\\n`\n + `\\n`\n + `\\x1B[90m${getSkillInstructions(agent).replace(/\\n/g, '\\n')}\\x1B[0m`,\n `${action} ${agentConfig.instructionFile}`,\n )\n\n const add = await p.confirm({\n message: `${action} ${agentConfig.instructionFile} with skill activation instructions?`,\n initialValue: true,\n })\n\n if (p.isCancel(add) || !add)\n return\n\n if (existsSync(filePath)) {\n const existing = readFileSync(filePath, 'utf-8')\n const separator = existing.endsWith('\\n') ? '' : '\\n'\n appendFileSync(filePath, `${separator}\\n${getSkillInstructions(agent)}\\n`)\n }\n else {\n writeFileSync(filePath, `${getSkillInstructions(agent)}\\n`)\n }\n\n p.log.success(`Updated ${agentConfig.instructionFile}`)\n}\n\n/** Select LLM model for SKILL.md generation (independent of target agent) */\nexport async function selectModel(skipPrompt: boolean): Promise<OptimizeModel | null> {\n const config = readConfig()\n const available = await getAvailableModels()\n\n if (available.length === 0) {\n p.log.warn(NO_MODELS_MESSAGE)\n return null\n }\n\n // Use config model if set and available (only when not prompting)\n if (skipPrompt) {\n if (config.model && available.some(m => m.id === config.model))\n return config.model\n // Warn if configured model is unavailable (auth revoked, CLI uninstalled, etc.)\n if (config.model)\n p.log.warn(`Configured model \\x1B[36m${config.model}\\x1B[0m is unavailable — using auto-selected fallback`)\n return available.find(m => m.recommended)?.id ?? available[0]!.id\n }\n\n // Smart provider → model (skips provider step when only 1 provider)\n const choice = await pickModel(available)\n if (!choice)\n return null\n\n // Remember choice for next time\n updateConfig({ model: choice as OptimizeModel })\n\n return choice as OptimizeModel\n}\n\n/** Default sections when model is pre-set (non-interactive) */\nexport const DEFAULT_SECTIONS: SkillSection[] = ['best-practices', 'api-changes']\n\nexport async function selectSkillSections(message = 'Enhance SKILL.md'): Promise<{ sections: SkillSection[], customPrompt?: CustomPrompt, cancelled: boolean }> {\n p.log.info('Budgets adapt to package release density.')\n const selected = await p.multiselect({\n message,\n options: [\n { label: 'API changes', value: 'api-changes' as SkillSection, hint: 'new/deprecated APIs from version history' },\n { label: 'Best practices', value: 'best-practices' as SkillSection, hint: 'gotchas, pitfalls, patterns' },\n { label: 'Custom section', value: 'custom' as SkillSection, hint: 'add your own section' },\n ],\n initialValues: DEFAULT_SECTIONS,\n required: false,\n })\n\n if (p.isCancel(selected))\n return { sections: [], cancelled: true }\n\n const sections = selected as SkillSection[]\n if (sections.length === 0)\n return { sections: [], cancelled: false }\n\n // Show per-section budget based on selection count\n if (sections.length > 1) {\n const n = sections.length\n const budgetLines: string[] = []\n for (const s of sections) {\n switch (s) {\n case 'api-changes':\n budgetLines.push(` API changes ${maxItems(6, 12, n)}–${maxItems(6, Math.round(12 * 1.6), n)} items (adapts to release churn)`)\n break\n case 'best-practices':\n budgetLines.push(` Best practices ${maxItems(4, 10, n)}–${maxItems(4, Math.round(10 * 1.3), n)} items`)\n break\n case 'custom':\n budgetLines.push(` Custom ≤${maxLines(50, 80, n)} lines`)\n break\n }\n }\n p.log.info(`Budget (${n} sections):\\n${budgetLines.join('\\n')}`)\n }\n\n let customPrompt: CustomPrompt | undefined\n if (sections.includes('custom')) {\n const heading = await p.text({\n message: 'Section heading',\n placeholder: 'e.g. \"Migration from v2\" or \"SSR Patterns\"',\n })\n if (p.isCancel(heading))\n return { sections: [], cancelled: true }\n\n const body = await p.text({\n message: 'Instructions for this section',\n placeholder: 'e.g. \"Document breaking changes and migration steps from v2 to v3\"',\n })\n if (p.isCancel(body))\n return { sections: [], cancelled: true }\n\n customPrompt = { heading: heading as string, body: body as string }\n }\n\n return { sections, customPrompt, cancelled: false }\n}\n\nexport interface LlmConfig {\n model: OptimizeModel\n sections: SkillSection[]\n customPrompt?: CustomPrompt\n promptOnly?: boolean\n}\n\n/** Context about the existing skill when running an update (not a fresh add). */\nexport interface UpdateContext {\n oldVersion?: string\n newVersion?: string\n syncedAt?: string\n /** Whether the existing SKILL.md was LLM-enhanced (has generated_by in frontmatter). */\n wasEnhanced: boolean\n /** Pre-computed bump type (used by parallel sync to pass the max across packages). */\n bumpType?: string\n}\n\n/**\n * Resolve sections + model for LLM enhancement.\n * If presetModel is provided, uses DEFAULT_SECTIONS without prompting.\n * Returns null if cancelled or no sections/model selected.\n */\nexport async function selectLlmConfig(presetModel?: OptimizeModel, message?: string, updateCtx?: UpdateContext): Promise<LlmConfig | null> {\n if (presetModel) {\n // Validate preset model is still available (env/OAuth may have changed)\n const available = await getAvailableModels()\n if (available.some(m => m.id === presetModel))\n return { model: presetModel, sections: DEFAULT_SECTIONS }\n // Fall through to interactive selection if preset unavailable\n if (!isInteractive())\n return null\n }\n\n // Non-interactive (CI, agent, no TTY): skip generation unless model explicitly provided\n if (!isInteractive()) {\n return null\n }\n\n // Resolve default model (configured or recommended) without prompting\n const config = readConfig()\n const available = await getAvailableModels()\n\n if (available.length === 0) {\n p.log.warn(NO_MODELS_MESSAGE)\n return null\n }\n\n // Inline the skipPrompt logic from selectModel to avoid a second getAvailableModels() call\n let defaultModel: OptimizeModel\n if (config.model && available.some(m => m.id === config.model)) {\n defaultModel = config.model\n }\n else {\n if (config.model)\n p.log.warn(`Configured model \\x1B[36m${config.model}\\x1B[0m is unavailable — using auto-selected fallback`)\n defaultModel = (available.find(m => m.recommended)?.id ?? available[0]!.id) as OptimizeModel\n }\n\n const defaultModelName = getModelName(defaultModel)\n const defaultModelInfo = available.find(m => m.id === defaultModel)\n const providerHint = defaultModelInfo?.providerName ?? ''\n const sourceHint = config.model === defaultModel ? 'configured' : 'recommended'\n const defaultHint = providerHint ? `${providerHint} · ${sourceHint}` : sourceHint\n\n // Build update context hint for the prompt message\n let enhanceMessage = 'Enhance SKILL.md?'\n let defaultToSkip = false\n if (updateCtx) {\n const diff = updateCtx.bumpType\n ?? (updateCtx.oldVersion && updateCtx.newVersion ? semverDiff(updateCtx.oldVersion, updateCtx.newVersion) : null)\n const isSmallBump = diff === 'patch' || diff === 'prerelease' || diff === 'prepatch' || diff === 'preminor' || diff === 'premajor'\n\n const ageParts: string[] = []\n if (diff)\n ageParts.push(diff)\n if (updateCtx.syncedAt) {\n const syncedAtMs = new Date(updateCtx.syncedAt).getTime()\n if (Number.isFinite(syncedAtMs)) {\n const days = Math.floor((Date.now() - syncedAtMs) / 86_400_000)\n ageParts.push(days === 0 ? 'today' : days === 1 ? '1d ago' : `${days}d ago`)\n }\n }\n if (updateCtx.wasEnhanced)\n ageParts.push('LLM-enhanced')\n\n const versionHint = updateCtx.oldVersion && updateCtx.newVersion\n ? `${updateCtx.oldVersion} → ${updateCtx.newVersion}`\n : null\n const hint = [versionHint, ...ageParts].filter(Boolean).join(' · ')\n if (hint)\n enhanceMessage = `Enhance SKILL.md? \\x1B[90m(${hint})\\x1B[0m`\n\n // Default to Skip for patch/prerelease bumps on already-enhanced skills\n if (updateCtx.wasEnhanced && isSmallBump)\n defaultToSkip = true\n }\n\n const choice = await p.select({\n message: enhanceMessage,\n options: [\n { label: defaultModelName, value: 'default' as const, hint: defaultHint },\n { label: 'Different model', value: 'pick' as const, hint: 'choose another enhancement model' },\n { label: 'Prompt only', value: 'prompt' as const, hint: 'write prompts for manual use' },\n { label: 'Skip', value: 'skip' as const, hint: 'base skill with docs, issues, and types' },\n ],\n ...(defaultToSkip ? { initialValue: 'skip' as const } : {}),\n })\n\n if (p.isCancel(choice))\n return null\n\n if (choice === 'skip')\n return null\n\n if (choice === 'prompt') {\n const { sections, customPrompt, cancelled } = await selectSkillSections(\n message ? `${message} (prompt only)` : 'Select sections for prompt generation',\n )\n if (cancelled || sections.length === 0)\n return null\n // model is unused for prompt-only but required by type — use defaultModel as placeholder\n return { model: defaultModel, sections, customPrompt, promptOnly: true }\n }\n\n let model: OptimizeModel\n if (choice === 'pick') {\n const picked = await pickModel(available)\n if (!picked)\n return null\n updateConfig({ model: picked as OptimizeModel })\n model = picked as OptimizeModel\n }\n else {\n model = defaultModel\n }\n if (!model)\n return null\n\n const modelName = getModelName(model)\n const { sections, customPrompt, cancelled } = await selectSkillSections(\n message ? `${message} (${modelName})` : `Enhance SKILL.md with ${modelName}`,\n )\n\n if (cancelled || sections.length === 0)\n return null\n\n return { model, sections, customPrompt }\n}\n\nexport interface EnhanceOptions {\n packageName: string\n version: string\n skillDir: string\n dirName?: string\n model: OptimizeModel\n resolved: { repoUrl?: string, llmsUrl?: string, releasedAt?: string, docsUrl?: string, gitRef?: string, dependencies?: Record<string, string>, distTags?: Record<string, { version: string, releasedAt?: string }> }\n relatedSkills: string[]\n hasIssues: boolean\n hasDiscussions: boolean\n hasReleases: boolean\n hasChangelog: string | false\n docsType: 'llms.txt' | 'readme' | 'docs'\n hasShippedDocs: boolean\n pkgFiles: string[]\n force?: boolean\n debug?: boolean\n sections?: SkillSection[]\n customPrompt?: CustomPrompt\n packages?: Array<{ name: string }>\n features?: FeaturesConfig\n eject?: boolean\n overheadLines?: number\n}\n\nexport async function enhanceSkillWithLLM(opts: EnhanceOptions): Promise<void> {\n const { packageName, version, skillDir, dirName, model, resolved, relatedSkills, hasIssues, hasDiscussions, hasReleases, hasChangelog, docsType, hasShippedDocs: shippedDocs, pkgFiles, force, debug, sections, customPrompt, packages, features, eject, overheadLines } = opts\n\n const effectiveFeatures = features\n\n const llmLog = p.taskLog({ title: `Agent exploring ${packageName}` })\n const docFiles = listReferenceFiles(skillDir)\n const hasGithub = hasIssues || hasDiscussions\n const { optimized, wasOptimized, usage, cost, warnings, error, debugLogsDir } = await optimizeDocs({\n packageName,\n skillDir,\n model,\n version,\n hasGithub,\n hasReleases,\n hasChangelog,\n docFiles,\n docsType,\n hasShippedDocs: shippedDocs,\n noCache: force,\n debug,\n sections,\n customPrompt,\n features: effectiveFeatures,\n pkgFiles,\n overheadLines,\n onProgress: createToolProgress(llmLog),\n })\n\n if (wasOptimized) {\n const costParts: string[] = []\n if (usage) {\n const totalK = Math.round(usage.totalTokens / 1000)\n costParts.push(`${totalK}k tokens`)\n }\n if (cost)\n costParts.push(`$${cost.toFixed(2)}`)\n const costSuffix = costParts.length > 0 ? ` (${costParts.join(', ')})` : ''\n llmLog.success(`Generated best practices${costSuffix}`)\n if (debugLogsDir)\n p.log.info(`Debug logs: ${relative(process.cwd(), debugLogsDir)}`)\n if (error)\n p.log.warn(`\\x1B[33mPartial failure: ${error}\\x1B[0m`)\n if (warnings?.length) {\n for (const w of warnings)\n p.log.warn(`\\x1B[33m${w}\\x1B[0m`)\n }\n const skillMd = generateSkillMd({\n name: packageName,\n version,\n releasedAt: resolved.releasedAt,\n\n distTags: resolved.distTags,\n body: optimized,\n relatedSkills,\n hasIssues,\n hasDiscussions,\n hasReleases,\n hasChangelog,\n docsType,\n hasShippedDocs: shippedDocs,\n pkgFiles,\n generatedBy: getModelLabel(model),\n dirName,\n packages,\n repoUrl: resolved.repoUrl,\n features,\n eject,\n })\n writeFileSync(join(skillDir, 'SKILL.md'), skillMd)\n }\n else {\n llmLog.error(`Enhancement failed${error ? `: ${error}` : ''}`)\n }\n}\n\nexport interface WritePromptFilesOptions {\n packageName: string\n skillDir: string\n version: string\n hasIssues: boolean\n hasDiscussions: boolean\n hasReleases: boolean\n hasChangelog: string | false\n docsType: 'llms.txt' | 'readme' | 'docs'\n hasShippedDocs: boolean\n pkgFiles: string[]\n sections: SkillSection[]\n customPrompt?: CustomPrompt\n features?: FeaturesConfig\n overheadLines?: number\n}\n\n/**\n * Build and write PROMPT_*.md files for manual LLM use.\n * Returns the list of sections that had prompts written.\n */\nexport function writePromptFiles(opts: WritePromptFilesOptions): SkillSection[] {\n const { skillDir, sections, customPrompt, features } = opts\n const docFiles = listReferenceFiles(skillDir)\n const prompts = buildAllSectionPrompts({\n packageName: opts.packageName,\n skillDir,\n version: opts.version,\n hasIssues: opts.hasIssues,\n hasDiscussions: opts.hasDiscussions,\n hasReleases: opts.hasReleases,\n hasChangelog: opts.hasChangelog,\n docFiles,\n docsType: opts.docsType,\n hasShippedDocs: opts.hasShippedDocs,\n pkgFiles: opts.pkgFiles,\n customPrompt,\n features,\n overheadLines: opts.overheadLines,\n sections,\n })\n\n const skilldDir = join(skillDir, '.skilld')\n mkdirSync(skilldDir, { recursive: true })\n\n for (const [section, prompt] of prompts)\n writeFileSync(join(skilldDir, `PROMPT_${section}.md`), prompt)\n\n const written = [...prompts.keys()]\n if (written.length > 0) {\n const relDir = relative(process.cwd(), skillDir)\n const promptFiles = written.map(s => `PROMPT_${s}.md`).join(', ')\n const outputFileList = written.map(s => SECTION_OUTPUT_FILES[s]).join(', ')\n p.log.info(`Prompt files written to ${relDir}/.skilld/\\n\\x1B[2m\\x1B[3m Read each prompt file (${promptFiles}) in ${relDir}/.skilld/, read the\\n referenced files, then write your output to the matching file (${outputFileList}).\\n When done, run: skilld assemble\\x1B[0m`)\n }\n\n return written\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AA4EA,MAAM,iBAAiB;AAEvB,MAAa,sBAAmD;CAC9D,OAAO;CACP,eAAe;CACf,eAAe;CACf,iBAAiB;CACjB,UAAU;CACV,YAAY;CACZ,SAAS;CACT,SAAS;CACV;;AAGD,SAAgB,kBAAkB,MAAiD;CACjF,MAAM,aAAa,KAAK,MAAM,4BAA4B;AAC1D,KAAI,WACF,QAAO;EAAE,MAAM;EAAS,QAAQ,OAAO,WAAW,GAAA;EAAK;CACzD,MAAM,kBAAkB,KAAK,MAAM,sCAAsC;AACzE,KAAI,gBACF,QAAO;EAAE,MAAM;EAAc,QAAQ,OAAO,gBAAgB,GAAA;EAAK;AACnE,KAAI,KAAK,WAAW,YAAY,CAC9B,QAAO,EAAE,MAAM,WAAW;AAC5B,QAAO,EAAE,MAAM,OAAO;;AAGxB,eAAsB,kBAAkB,aAAqB,WAAsC;CACjG,MAAM,UAAoB,EAAE;CAE5B,MAAM,UAAU,MAAM,gBAAgB,YAAY;AAClD,KAAI,CAAC,SAAS,aACZ,QAAO;CAET,MAAM,OAAO,IAAI,IAAI,OAAO,KAAK,QAAQ,aAAa,CAAC;AAEvD,KAAI,CAAC,WAAW,UAAU,CACxB,QAAO;CAGT,MAAM,OAAO,SAAS,UAAU;CAChC,MAAM,+BAAe,IAAI,KAAqB;AAC9C,KAAI,KACF,MAAK,MAAM,CAAC,SAAS,SAAS,OAAO,QAAQ,KAAK,OAAO,EAAE;AACzD,MAAI,KAAK,YACP,cAAa,IAAI,KAAK,aAAa,QAAQ;AAC7C,OAAK,MAAM,OAAO,cAAc,KAAK,SAAS,CAC5C,cAAa,IAAI,IAAI,MAAM,QAAQ;;CAIzC,MAAM,kBAAkB,YAAY,UAAU;CAC9C,MAAM,eAAe,IAAI,IAAI,gBAAgB;AAE7C,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,UAAU,aAAa,IAAI,IAAI;AACrC,MAAI,WAAW,aAAa,IAAI,QAAQ,CACtC,SAAQ,KAAK,QAAQ;;AAGzB,QAAO,QAAQ,MAAM,GAAG,EAAE;;;AAI5B,SAAgB,gBAAgB,aAAqB,SAAiB,UAAkD;AACtH,YAAW,aAAa,QAAQ;CAChC,MAAM,eAAe,iBAAiB,aAAa,QAAQ;AAC3D,KAAI,WAAW,aAAa,CAC1B,QAAO,cAAc;EAAE,WAAW;EAAM,OAAO;EAAM,CAAC;AAExD,KAAI,UAAU;EACZ,MAAM,UAAU,gBAAgB,SAAS,OAAO,SAAS,KAAK;AAC9D,MAAI,WAAW,QAAQ,CACrB,QAAO,SAAS;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;;;;AAKvD,SAAgB,kBAAkB,UAAkB,aAAqB,KAAa,SAAiB,UAAkB,eAA2D,UAA2B,UAAkD;CAC/P,MAAM,IAAI,YAAY,YAAY,CAAC,YAAY;AAC/C,KAAI;AACF,UAAQ,UAAU,aAAa,KAAK,QAAQ;AAC5C,eAAa,UAAU,aAAa,KAAK,QAAQ;AACjD,MAAI,CAAC,eAAe,aAAa,KAAK,QAAQ,IAAI,aAAa,SAC7D,eAAc,UAAU,aAAa,SAAS,OAAO;AAGvD,MAAI,EAAE,OACJ,KAAI,SACF,mBAAkB,UAAU,SAAS,OAAO,SAAS,MAAM,SAAS;MAEpE,eAAc,UAAU,aAAa,SAAS,SAAS;AAE3D,MAAI,EAAE,YACJ,KAAI,SACF,mBAAkB,UAAU,SAAS,OAAO,SAAS,MAAM,cAAc;MAEzE,eAAc,UAAU,aAAa,SAAS,cAAc;AAEhE,MAAI,EAAE,SACJ,KAAI,SACF,mBAAkB,UAAU,SAAS,OAAO,SAAS,MAAM,WAAW;MAEtE,eAAc,UAAU,aAAa,SAAS,WAAW;AAE7D,gBAAc,UAAU,aAAa,SAAS,WAAW;AAEzD,MAAI;QACG,MAAM,OAAO,cAChB,KAAI,IAAI,SAAS,YACf,cAAa,UAAU,IAAI,MAAM,KAAK,IAAI,QAAQ;;SAIpD;;;AAMR,SAAgB,eAAe,aAAqB,SAAiB,SAAkB,SAAoF;CACzK,MAAM,WAAW,YAAY,aAAa,QAAQ;AAClD,KAAI,WAAW,KAAK,UAAU,QAAQ,WAAW,CAAC,IAAI,WAAW,KAAK,UAAU,QAAQ,QAAQ,CAAC,CAC/F,QAAO;EACL,UAAU;EACV,WAAW,UAAU,GAAG,QAAQ,SAAS,QAAQ,SAAS;EAC3D;AAEH,KAAI,WAAW,KAAK,UAAU,WAAW,CAAC,CACxC,QAAO;EACL,UAAU;EACV,WAAW,WAAW;EACvB;AAEH,KAAI,WAAW,KAAK,UAAU,QAAQ,YAAY,CAAC,CACjD,QAAO,EAAE,UAAU,UAAU;AAE/B,QAAO,EAAE,UAAU,UAAU;;;AAS/B,SAAgB,oBACd,aACA,SACA,KACA,OACA,QAC4B;CAC5B,MAAM,gBAAgB,iBAAiB,aAAa,KAAK,QAAQ;AACjE,KAAI,cAAc,WAAW,EAC3B,QAAO;CAET,MAAM,UAAU,eAAe,KAAK,OAAO,OAAO;AAClD,WAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AAEvC,MAAK,MAAM,WAAW,eAAe;AACnC,mBAAiB,SAAS,QAAQ,WAAW,QAAQ,SAAS;AAC9D,YAAU,SAAS,QAAQ,WAAW;GACpC;GACA;GACA,QAAQ;GACR,2BAAU,IAAI,MAAM,EAAC,aAAa,CAAC,MAAM,IAAI,CAAC;GAC9C,WAAW;GACZ,CAAC;;AAGJ,KAAI,CAAC,OACH,iBAAgB,IAAI;AAEtB,QAAO;EAAE,SAAS;EAAe;EAAS;;;AAI5C,SAAgB,eAAe,KAAa,OAAkB,QAAyB;AACrF,KAAI,OAEF,QADoBA,QAAO,OACR;CAErB,MAAM,SAAS,mBAAmB,IAAI;AACtC,KAAI,OACF,QAAO;CACT,MAAM,cAAcA,QAAO;AAC3B,QAAO,KAAK,KAAK,YAAY,UAAU;;;AAIzC,eAAsB,gBAAgB,aAAqB,KAA8C;CACvG,MAAM,SAAS,oBAAoB,KAAK,KAAK,eAAe,CAAC;AAC7D,KAAI,CAAC,OACH,QAAO;CAET,MAAM,MAAM,OAAO;CAEnB,MAAM,aADO;EAAE,GAAG,IAAI;EAAwC,GAAG,IAAI;EAA2C,CACxF;AAExB,KAAI,CAAC,YAAY,WAAW,QAAQ,CAClC,QAAO;AAGT,QAAO,wBADW,QAAQ,KAAK,WAAW,MAAM,EAAE,CAAC,CACV;;;AAI3C,SAAgB,gBAAgB,QAAuB,UAAmC;AACxF,KAAI,QAAQ;EACV,MAAM,QAAQ,CAAC,gBAAgB,eAAe,CAAC,MAAK,MAAK,WAAW,KAAK,QAAQ,EAAE,CAAC,CAAC;AACrF,MAAI,MACF,QAAO,OAAO;;AAGlB,KAAI,YAAY,WAAW,KAAK,UAAU,YAAY,eAAe,CAAC,CACpE,QAAO;AACT,QAAO;;;AA0BT,eAAsB,uBAAuB,MASpB;CACvB,MAAM,EAAE,aAAa,UAAU,SAAS,eAAe;CACvD,MAAM,WAAW,KAAK,YAAY,YAAY,CAAC,YAAY;CAG3D,MAAM,mBAAmB,KAAK,YACzB,SAAS,YACT,eAAe,aAAa,SAAS,SAAS,SAAS,SAAS,QAAQ,CAAC,aAAa;CAC3F,MAAM,WAAW,KAAK,YAAY,CAAC;CACnC,IAAI,YAAoB,SAAS,aAAa;CAC9C,IAAI,WAA2C;CAC/C,MAAM,cAA0B,EAAE;CAClC,MAAM,WAAqB,EAAE;AAC7B,KAAI,iBACF,UAAS,KAAK,sBAAsB,SAAS,SAAS,wCAAwC;AAEhG,KAAI,CAAC,UAAU;EACb,MAAM,aAAuD,EAAE;EAC/D,MAAM,kBAAkB,SAAiB,oBAAoB,CAAC,KAAK,EAAE,YAAY,CAAC,SAAS;AAG3F,MAAI,SAAS,cAAc,SAAS,SAAS;GAC3C,MAAM,KAAK,eAAe,SAAS,QAAQ;AAC3C,OAAI,IAAI;AACN,eAAW,oBAAoB;IAC/B,MAAM,UAAU,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,SAAS,YAAY;AAC3E,QAAI,SAAS,SACX,UAAS,KAAK,qBAAqB,QAAQ,IAAI,6BAA6B,QAAQ,GAAG;AAEzF,QAAI,WAAW,QAAQ,MAAM,SAAS,GAAG;KACvC,MAAM,aAAa;KACnB,MAAM,UAA2D,EAAE;AAEnE,UAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,MAAM,QAAQ,KAAK,YAAY;MACzD,MAAM,QAAQ,QAAQ,MAAM,MAAM,GAAG,IAAI,WAAW;AACpD,iBAAW,oBAAoB,KAAK,IAAI,IAAI,YAAY,QAAQ,MAAM,OAAO,CAAC,GAAG,QAAQ,MAAM,OAAO,QAAQ,QAAQ,MAAM;MAC5H,MAAM,eAAe,MAAM,QAAQ,IACjC,MAAM,IAAI,OAAO,SAAS;OAExB,MAAM,UAAU,MAAM,eADV,GAAG,QAAQ,QAAQ,GAAG,OACO;AACzC,WAAI,CAAC,QACH,QAAO;AACT,cAAO;QAAE;QAAM;QAAS;QACxB,CACH;AACD,cAAQ,KAAK,GAAG,aAAa;;AAG/B,UAAK,MAAM,KAAK,QACd,KAAI,GAAG;MACL,MAAM,WAAW,QAAQ,aAAa,EAAE,KAAK,QAAQ,QAAQ,YAAY,GAAG,GAAG,EAAE;MACjF,MAAM,YAAY,SAAS,WAAW,QAAQ,GAAG,WAAW,QAAQ;AACpE,iBAAW,KAAK;OAAE,MAAM;OAAW,SAAS,EAAE;OAAS,CAAC;AACxD,kBAAY,KAAK;OACf,IAAI;OACJ,SAAS,EAAE;OACX,UAAU;QAAE,SAAS;QAAa,QAAQ;QAAW,MAAM;;OAC5D,CAAC;;KAIN,MAAM,aAAa,QAAQ,OAAO,QAAQ,CAAC;AAC3C,SAAI,aAAa,EAEf,KAAI,iBAAiB,WAAW,IAAI,SAAS,SAAS;AACpD,iBAAW,qBAAqB,WAAW,0BAA0B;AACrE,iBAAW,SAAS;AACpB,kBAAY,SAAS;YAElB;AACH,kBAAY,GAAG,SAAS,QAAQ,QAAQ,QAAQ,IAAI;AACpD,iBAAW;AACX,mBAAa,aAAa,SAAS,WAAW;AAG9C,UAAI,SAAS,SAAS;AACpB,kBAAW,iCAAiC;OAC5C,MAAM,cAAc,MAAM,aAAa,SAAS,QAAQ;AACxD,WAAI,aAAa;QACf,MAAM,UAAU,SAAS,WAAW,IAAI,IAAI,SAAS,QAAQ,CAAC;QAC9D,MAAM,gBAA0D,CAC9D;SAAE,MAAM;SAAY,SAAS,mBAAmB,YAAY,KAAK,QAAA;SAAU,CAC5E;AACD,YAAI,YAAY,MAAM,SAAS,GAAG;AAChC,oBAAW,eAAe,YAAY,MAAM,OAAO,qBAAqB;SACxE,MAAM,OAAO,MAAM,iBAAiB,aAAa,UAAU,KAAK,MAAM,UAAU;AAC9E,qBAAW,iCAAiC,OAAO,EAAE,GAAG,QAAQ;WAChE;AACF,cAAK,MAAM,OAAO,MAAM;AACtB,cAAI,CAAC,eAAe,IAAI,IAAI,CAC1B;UACF,MAAM,YAAY,IAAI,IAAI,WAAW,IAAI,GAAG,IAAI,IAAI,MAAM,EAAE,GAAG,IAAI;AACnE,wBAAc,KAAK;WAAE,MAAM,KAAK,aAAa,GAAG,UAAU,MAAM,IAAI,CAAC;WAAE,SAAS,IAAI;WAAS,CAAC;;;AAGlG,qBAAa,aAAa,SAAS,cAAc;;;;;;;AAU/D,MAAI,SAAS,YAAY,WAAW,WAAW,GAAG;AAChD,cAAW,mBAAmB;GAC9B,MAAM,cAAc,MAAM,iBAAiB,SAAS,UAAU,WAAW,CAAC,OAAO,QAAQ;AACvF,aAAS,KAAK,oBAAoB,SAAS,SAAS,IAAI,KAAK,WAAW,MAAM;AAC9E,WAAO,EAAE;KACT;AACF,OAAI,YAAY,WAAW,KAAK,SAAS,SACvC,UAAS,KAAK,8BAA8B,SAAS,WAAW;AAElE,OAAI,YAAY,SAAS,GAAG;AAC1B,SAAK,MAAM,OAAO,aAAa;AAC7B,SAAI,CAAC,eAAe,IAAI,KAAK,CAC3B;AACF,gBAAW,KAAK,IAAI;AACpB,iBAAY,KAAK;MACf,IAAI,IAAI;MACR,SAAS,IAAI;MACb,UAAU;OAAE,SAAS;OAAa,QAAQ,IAAI;OAAM,MAAM;;MAC3D,CAAC;;AAEJ,gBAAY,SAAS;AACrB,eAAW;AACX,iBAAa,aAAa,SAAS,WAAW;;;AAKlD,MAAI,SAAS,WAAW,WAAW,WAAW,GAAG;AAC/C,cAAW,oBAAoB;GAC/B,MAAM,cAAc,MAAM,aAAa,SAAS,QAAQ;AACxD,OAAI,aAAa;AACf,gBAAY,SAAS;AACrB,eAAW;IACX,MAAM,UAAU,SAAS,WAAW,IAAI,IAAI,SAAS,QAAQ,CAAC;AAC9D,eAAW,KAAK;KAAE,MAAM;KAAY,SAAS,mBAAmB,YAAY,KAAK,QAAA;KAAU,CAAC;AAE5F,QAAI,YAAY,MAAM,SAAS,GAAG;AAChC,gBAAW,eAAe,YAAY,MAAM,OAAO,cAAc;KACjE,MAAM,OAAO,MAAM,iBAAiB,aAAa,UAAU,KAAK,MAAM,UAAU;AAC9E,iBAAW,0BAA0B,OAAO,EAAE,GAAG,QAAQ;OACzD;AAEF,UAAK,MAAM,OAAO,MAAM;AACtB,UAAI,CAAC,eAAe,IAAI,IAAI,CAC1B;MAEF,MAAM,YAAY,KAAK,QAAQ,IADb,IAAI,IAAI,WAAW,IAAI,GAAG,IAAI,IAAI,MAAM,EAAE,GAAG,IAAI,KACvB,MAAM,IAAI,CAAC;AACvD,iBAAW,KAAK;OAAE,MAAM;OAAW,SAAS,IAAI;OAAS,CAAC;AAC1D,kBAAY,KAAK;OACf,IAAI,IAAI;OACR,SAAS,IAAI;OACb,UAAU;QAAE,SAAS;QAAa,QAAQ;QAAW,MAAM;;OAC5D,CAAC;;AAEJ,SAAI,KAAK,SAAS,EAChB,YAAW;;AAGf,iBAAa,aAAa,SAAS,WAAW;;;AAKlD,MAAI,SAAS,WAAW,CAAC,WAAW,MAAK,MAAK,EAAE,KAAK,WAAW,QAAQ,CAAC,EAAE;GACzE,MAAM,eAAe,SAAS,YAAY,eAAe,SAAS,QAAQ;AAC1E,cAAW,qBAAqB;GAEhC,MAAM,cAAc,MAAM,iBAAiB,cAAc,YADnC,SAAS,WAAW,MAAM,IACmC,CAAC,OAAO,QAAQ;AACjG,aAAS,KAAK,oBAAoB,aAAa,IAAI,KAAK,WAAW,MAAM;AACzE,WAAO,EAAE;KACT;AACF,OAAI,YAAY,SAAS,GAAG;AAC1B,SAAK,MAAM,OAAO,aAAa;AAC7B,SAAI,CAAC,eAAe,IAAI,KAAK,CAC3B;AACF,gBAAW,KAAK,IAAI;AACpB,iBAAY,KAAK;MACf,IAAI,IAAI;MACR,SAAS,IAAI;MACb,UAAU;OAAE,SAAS;OAAa,QAAQ,IAAI;OAAM,MAAM;;MAC3D,CAAC;;AAEJ,gBAAY;AACZ,eAAW;AACX,iBAAa,aAAa,SAAS,WAAW;;;AAKlD,MAAI,SAAS,aAAa,WAAW,WAAW,GAAG;AACjD,cAAW,kBAAkB;GAC7B,MAAM,UAAU,MAAM,mBAAmB,SAAS,UAAU;AAC5D,OAAI,SAAS;AACX,eAAW,KAAK;KAAE,MAAM;KAAkB;KAAS,CAAC;AACpD,gBAAY,KAAK;KACf,IAAI;KACJ;KACA,UAAU;MAAE,SAAS;MAAa,QAAQ;MAAkB,MAAM;;KACnE,CAAC;AACF,iBAAa,aAAa,SAAS,WAAW;;;AAKlD,MAAI,aAAa,YAAY,WAAW,QAAO,MAAK,EAAE,KAAK,WAAW,QAAQ,IAAI,EAAE,KAAK,SAAS,MAAM,CAAC,CAAC,SAAS,GAAG;GACpH,MAAM,YAAY,kBAAkB,WAAW;AAC/C,OAAI,UACF,cAAa,aAAa,SAAS,CAAC;IAAE,MAAM;IAAkB,SAAS;IAAW,CAAC,CAAC;;QAIrF;AAEH,aAAW,sBAAsB;EACjC,MAAM,WAAW,eAAe,aAAa,SAAS,SAAS,SAAS,SAAS,QAAQ;AACzF,aAAW,SAAS;AACpB,MAAI,SAAS,UACX,aAAY,SAAS;AAIvB,MAAI,CAAC,WADU,iBAAiB,aAAa,QAAQ,CAC9B,EAAE;AACvB,cAAW,mCAAmC;GAC9C,MAAM,SAAS,eAAe,aAAa,QAAQ;AACnD,QAAK,MAAM,OAAO,OAChB,aAAY,KAAK;IACf,IAAI,IAAI;IACR,SAAS,IAAI;IACb,UAAU;KAAE,SAAS;KAAa,QAAQ,IAAI;KAAM,GAAG,kBAAkB,IAAI,KAAA;;IAC9E,CAAC;;AAKN,MAAI,aAAa,YAAY,CAAC,WAAW,KAAK,YAAY,aAAa,QAAQ,EAAE,QAAQ,YAAY,CAAC,EAAE;AACtG,cAAW,wBAAwB;GACnC,MAAM,SAAS,eAAe,aAAa,QAAQ;AAEnD,OADiB,OAAO,QAAO,MAAK,EAAE,KAAK,WAAW,QAAQ,IAAI,EAAE,KAAK,SAAS,MAAM,CAAC,CAC5E,SAAS,GAAG;IACvB,MAAM,YAAY,kBAAkB,OAAO;AAC3C,QAAI,UACF,cAAa,aAAa,SAAS,CAAC;KAAE,MAAM;KAAkB,SAAS;KAAW,CAAC,CAAC;;;;CAO5F,MAAM,KAAK,SAAS,UAAU,eAAe,SAAS,QAAQ,GAAG;CACjE,MAAM,WAAW,KAAK;EAAE,OAAO,GAAG;EAAO,MAAM,GAAG;EAAM,GAAG,KAAA;CAG3D,MAAM,eAAe,WAAW,gBAAgB,SAAS,OAAO,SAAS,KAAK,GAAG;CACjF,MAAM,WAAW,YAAY,aAAa,QAAQ;CAClD,MAAM,YAAY,eAAe,KAAK,cAAc,SAAS,GAAG,KAAK,UAAU,SAAS;CACxF,MAAM,iBAAiB,eAAe,KAAK,cAAc,cAAc,GAAG,KAAK,UAAU,cAAc;CACvG,MAAM,eAAe,eAAe,KAAK,cAAc,WAAW,GAAG,KAAK,UAAU,WAAW;AAG/F,KAAI,SAAS,UAAU,MAAM,eAAe,IAAI,CAAC,WAAW,UAAU,EAAE;AACtE,aAAW,iCAAiC;EAC5C,MAAM,SAAS,MAAM,kBAAkB,GAAG,OAAO,GAAG,MAAM,IAAI,SAAS,YAAY,KAAK,KAAK,CAAC,YAAY,EAAE,CAAC;AAC7G,MAAI,OAAO,SAAS,GAAG;AACrB,cAAW,WAAW,OAAO,OAAO,SAAS;GAC7C,MAAM,YAAY,CAChB,GAAG,OAAO,KAAI,WAAU;IACtB,MAAM,gBAAgB,MAAM,OAAO;IACnC,SAAS,sBAAsB,MAAA;IAChC,EAAE,EACH;IACE,MAAM;IACN,SAAS,mBAAmB,OAAA;IAC7B,CACF;AACD,OAAI,SACF,kBAAiB,SAAS,OAAO,SAAS,MAAM,UAAU;OAE1D,cAAa,aAAa,SAAS,UAAU;AAC/C,QAAK,MAAM,SAAS,OAClB,aAAY,KAAK;IACf,IAAI,SAAS,MAAM;IACnB,SAAS,iBAAiB,IAAI,MAAM,OAAO,IAAI,MAAM,MAAM,MAAM,MAAM,QAAQ,KAAK;IACpF,UAAU;KAAE,SAAS;KAAa,QAAQ,gBAAgB,MAAM,OAAO;KAAM,MAAM;KAAS,QAAQ,MAAM;;IAC3G,CAAC;;;AAMR,KAAI,SAAS,eAAe,MAAM,eAAe,IAAI,CAAC,WAAW,eAAe,EAAE;AAChF,aAAW,sCAAsC;EACjD,MAAM,cAAc,MAAM,uBAAuB,GAAG,OAAO,GAAG,MAAM,IAAI,SAAS,YAAY,KAAK,KAAK,CAAC,YAAY,EAAE,CAAC;AACvH,MAAI,YAAY,SAAS,GAAG;AAC1B,cAAW,WAAW,YAAY,OAAO,cAAc;GACvD,MAAM,iBAAiB,CACrB,GAAG,YAAY,KAAI,OAAM;IACvB,MAAM,0BAA0B,EAAE,OAAO;IACzC,SAAS,2BAA2B,EAAA;IACrC,EAAE,EACH;IACE,MAAM;IACN,SAAS,wBAAwB,YAAA;IAClC,CACF;AACD,OAAI,SACF,kBAAiB,SAAS,OAAO,SAAS,MAAM,eAAe;OAE/D,cAAa,aAAa,SAAS,eAAe;AACpD,QAAK,MAAM,KAAK,YACd,aAAY,KAAK;IACf,IAAI,cAAc,EAAE;IACpB,SAAS,iBAAiB,IAAI,EAAE,OAAO,IAAI,EAAE,MAAM,MAAM,EAAE,QAAQ,KAAK;IACxE,UAAU;KAAE,SAAS;KAAa,QAAQ,0BAA0B,EAAE,OAAO;KAAM,MAAM;KAAc,QAAQ,EAAE;;IAClH,CAAC;;;AAMR,KAAI,SAAS,YAAY,MAAM,eAAe,IAAI,CAAC,WAAW,aAAa,EAAE;AAC3E,aAAW,mCAAmC;EAC9C,MAAM,eAAe,aAAa,QAAQ,GAAG,0BAA0B,YAAY,GAAG,KAAA;EACtF,MAAM,cAAc,MAAM,kBAAkB,GAAG,OAAO,GAAG,MAAM,SAAS,SAAS,QAAQ,aAAa,KAAK,MAAM,aAAa,CAAC,YAAY,EAAE,CAAC;EAG9I,IAAI,WAAqD,EAAE;AAC3D,MAAI,cAAc,YAAY,EAAE;AAC9B,cAAW,8BAA8B;AACzC,cAAW,MAAM,kBAAkB,aAAa,QAAQ,CAAC,YAAY,EAAE,CAAC;;EAG1E,MAAM,UAAU,CAAC,GAAG,aAAa,GAAG,SAAS;EAG7C,MAAM,cAAc,SACjB,QAAO,MAAK,CAAC,EAAE,KAAK,SAAS,YAAY,CAAC,CAC1C,KAAK,MAAM;GACV,MAAM,eAAe,EAAE,KAAK,MAAM,iBAAiB;GACnD,MAAM,KAAK,iBAAiB,EAAE,QAAQ;AACtC,UAAO;IACL,SAAS,eAAe,MAAM;IAC9B,OAAO,GAAG,SAAS,WAAW,eAAe;IAC7C,MAAM,GAAG,QAAQ;IAClB;IACD,CACD,QAAO,MAAK,EAAE,QAAQ;EAGzB,MAAM,aAAa,YAChB,QAAO,MAAK,EAAE,KAAK,WAAW,YAAY,IAAI,CAAC,EAAE,KAAK,SAAS,eAAe,CAAC,CAC/E,KAAK,MAAM;GACV,MAAM,KAAK,iBAAiB,EAAE,QAAQ;GACtC,MAAM,MAAM,GAAG,OAAO;GACtB,MAAM,OAAO,GAAG,QAAQ;GACxB,MAAM,YAAY,GAAG,aAAa;AAClC,UAAO;IAAE,IAAI;IAAG;IAAK;IAAM,YAAY;IAAO,WAAW;IAAW,aAAa;IAAW,UAAU;IAAI;IAC1G,CACD,QAAO,MAAK,EAAE,IAAI;EAErB,MAAM,eAAe,QAAQ,MAAK,MAAK,EAAE,SAAS,wBAAwB;AAG1E,MAAI,WAAW,SAAS,KAAK,YAAY,SAAS,EAChD,SAAQ,KAAK;GACX,MAAM;GACN,SAAS,qBAAqB;IAAE,UAAU;IAAY;IAAa,cAAc;IAAa;IAAc,CAAA;GAC7G,CAAC;AAGJ,MAAI,QAAQ,SAAS,GAAG;AACtB,cAAW,WAAW,QAAQ,OAAO,WAAW;AAChD,OAAI,SACF,kBAAiB,SAAS,OAAO,SAAS,MAAM,QAAQ;OAExD,cAAa,aAAa,SAAS,QAAQ;AAC7C,QAAK,MAAM,OAAO,QAChB,aAAY,KAAK;IACf,IAAI,IAAI;IACR,SAAS,IAAI;IACb,UAAU;KAAE,SAAS;KAAa,QAAQ,IAAI;KAAM,MAAM;;IAC3D,CAAC;;;AAKR,QAAO;EACL;EACA;EACA;EACA,WAAW,SAAS,UAAU,WAAW,UAAU;EACnD,gBAAgB,SAAS,eAAe,WAAW,eAAe;EAClE,aAAa,SAAS,YAAY,WAAW,aAAa;EAC1D;EACA;EACA,WAAW;EACZ;;;;;;AAOH,SAAS,YAAY,IAAoB;CACvC,MAAM,MAAM,GAAG,QAAQ,UAAU;AACjC,QAAO,QAAQ,KAAK,KAAK,GAAG,MAAM,GAAG,IAAI;;;AAI3C,SAAS,QAAQ,SAAqB,KAAa,YAAyC;AAC1F,KAAI,QAAQ,UAAU,IACpB;CACF,MAAM,gBAAwC;EAAE,KAAK;EAAG,OAAO;EAAG,YAAY;EAAG,SAAS;EAAG,QAAQ;EAAG,OAAO;EAAG;AAClH,SAAQ,MAAM,GAAG,MAAM;EACrB,MAAM,KAAK,cAAc,EAAE,UAAU,QAAQ,UAAU;EACvD,MAAM,KAAK,cAAc,EAAE,UAAU,QAAQ,UAAU;AACvD,MAAI,OAAO,GACT,QAAO,KAAK;AACd,SAAO,EAAE,GAAG,cAAc,EAAE,GAAG;GAC/B;AACF,YAAW,sBAAsB,IAAI,GAAG,QAAQ,OAAO,6BAA6B;AACpF,SAAQ,SAAS;;;AAInB,eAAsB,eAAe,MAOnB;CAChB,MAAM,EAAE,aAAa,SAAS,KAAK,eAAe;CAClD,MAAM,WAAW,KAAK,YAAY,YAAY,CAAC,YAAY;AAE3D,KAAI,CAAC,SAAS,OACZ;CAEF,MAAM,SAAS,iBAAiB,aAAa,QAAQ;CACrD,MAAM,WAAW,WAAW,OAAO;CAEnC,MAAM,UAAU,CAAC,GAAG,KAAK,YAAY;CAGrC,MAAM,SAAS,cAAc,aAAa,KAAK,QAAQ;AACvD,KAAI,SAAS,UAAU,QAAQ;AAC7B,aAAW,mBAAmB;EAC9B,MAAM,aAAa,MAAM,kBAAkB,OAAO;AAClD,OAAK,MAAM,KAAK,WACd,SAAQ,KAAK;GACX,IAAI,EAAE;GACN,SAAS,EAAE;GACX,UAAU;IAAE,SAAS;IAAa,QAAQ,OAAO,EAAE;IAAQ,MAAM,EAAE;;GACpE,CAAC;;AAIN,KAAI,QAAQ,WAAW,EACrB;AAEF,SAAQ,SAAS,gBAAgB,WAAW;AAG5C,KAAI,CAAC,UAAU;AACb,aAAW,0BAA0B,QAAQ,OAAO,QAAQ;AAC5D,MAAI;AACF,SAAM,YAAY,SAAS;IACzB;IACA,aAAa,EAAE,OAAO,SAAS,YAAY;AACzC,SAAI,UAAU,WAAW;MACvB,MAAM,IAAI,QAAQ,UAAU;AAE5B,iBAAW,WADE,GAAG,UAAU,SAAS,YAAY,GAAG,UAAU,SAAS,UAAU,SAAU,GAAG,UAAU,QAAQ,MACnF,IAAI,QAAQ,GAAG,MAAM,GAAG;gBAE5C,UAAU,YACjB,YAAW,wBAAwB,QAAQ,GAAG,MAAM,GAAG;;IAG5D,CAAC;WAEG,KAAK;AACV,OAAI,eAAe,2BACjB,YAAW,oDAAoD;OAE/D,OAAM;;AAEV;;CAIF,IAAI;AACJ,KAAI;AACF,gBAAc,MAAM,aAAa,EAAE,QAAQ,CAAC;UAEvC,KAAK;AACV,MAAI,eAAe,4BAA4B;AAC7C,cAAW,oDAAoD;AAC/D;;AAEF,QAAM;;CAIR,MAAM,oBAAoB,IAAI,IAAI,YAAY,IAAI,YAAY,CAAC;CAC/D,MAAM,cAAc,IAAI,IAAI,QAAQ,KAAI,MAAK,EAAE,GAAG,CAAC;CAGnD,MAAM,UAAU,QAAQ,QAAO,MAAK,CAAC,kBAAkB,IAAI,EAAE,GAAG,CAAC;CAGjE,MAAM,YAAY,YAAY,QAAO,OAAM,CAAC,YAAY,IAAI,YAAY,GAAG,CAAC,CAAC;AAE7E,KAAI,QAAQ,WAAW,KAAK,UAAU,WAAW,GAAG;AAClD,aAAW,0BAA0B;AACrC;;CAGF,MAAM,QAAkB,EAAE;AAC1B,KAAI,QAAQ,SAAS,EACnB,OAAM,KAAK,IAAI,QAAQ,OAAO,MAAM;AACtC,KAAI,UAAU,SAAS,EACrB,OAAM,KAAK,IAAI,UAAU,OAAO,QAAQ;AAC1C,YAAW,0BAA0B,MAAM,KAAK,KAAK,CAAC,GAAG;AAEzD,KAAI;AACF,QAAM,YAAY,SAAS;GACzB;GACA;GACA,aAAa,EAAE,OAAO,SAAS,YAAY;AACzC,QAAI,UAAU,WAAW;KACvB,MAAM,IAAI,QAAQ,UAAU;AAE5B,gBAAW,WADE,GAAG,UAAU,SAAS,YAAY,GAAG,UAAU,SAAS,UAAU,SAAU,GAAG,UAAU,QAAQ,MACnF,IAAI,QAAQ,GAAG,MAAM,GAAG;eAE5C,UAAU,YACjB,YAAW,wBAAwB,QAAQ,GAAG,MAAM,GAAG;;GAG5D,CAAC;UAEG,KAAK;AACV,MAAI,eAAe,2BACjB,YAAW,oDAAoD;MAE/D,OAAM;;;;;;;;AASZ,SAAgB,gBAAgB,UAAkB,aAAqB,KAAa,SAAiB,UAAkB,UAA2B,UAAkD;CAClM,MAAM,IAAI,YAAY,YAAY,CAAC,YAAY;CAC/C,MAAM,WAAW,YAAY,aAAa,QAAQ;CAClD,MAAM,UAAU,KAAK,UAAU,aAAa;CAE5C,MAAM,UAAU,WAAW,gBAAgB,SAAS,OAAO,SAAS,KAAK,GAAG;AAG5E,KAAI,CAAC,eAAe,aAAa,KAAK,QAAQ,IAAI,aAAa,SAC7D,kBAAiB,UAAU,SAAS,OAAO;AAE7C,KAAI,EAAE,OACJ,kBAAiB,SAAS,SAAS,SAAS;AAC9C,KAAI,EAAE,YACJ,kBAAiB,SAAS,SAAS,cAAc;AACnD,KAAI,EAAE,SACJ,kBAAiB,SAAS,SAAS,WAAW;;;AAIlD,SAAS,iBAAiB,UAAkB,SAAiB,QAAsB;CACjF,MAAM,SAAS,KAAK,UAAU,OAAO;AACrC,KAAI,CAAC,WAAW,OAAO,CACrB;CAEF,MAAM,UAAU,KAAK,SAAS,OAAO;AACrC,WAAU,SAAS,EAAE,WAAW,MAAM,CAAC;CAEvC,SAAS,KAAK,KAAa,KAAa;AACtC,OAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;GAC7D,MAAM,UAAU,KAAK,KAAK,MAAM,KAAK;GACrC,MAAM,WAAW,KAAK,SAAS,MAAM,GAAG,IAAI,GAAG,MAAM,SAAS,MAAM,KAAK;AACzE,OAAI,MAAM,aAAa,EAAE;AACvB,cAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AACxC,SAAK,SAAS,MAAM,GAAG,IAAI,GAAG,MAAM,SAAS,MAAM,KAAK;SAGxD,cAAa,SAAS,SAAS;;;AAKrC,MAAK,QAAQ,GAAG;;;;;;AASlB,eAAsB,gBAAgB,WAAmB,KAAa,UAAkC;AACtG,KAAI,SACF;CAEF,MAAM,gBAAgB,KAAK,KAAK,aAAa;CAC7C,MAAM,UAAU;AAGhB,KAAI,WAAW,cAAc;MACX,aAAa,eAAe,QAAQ,CACxC,MAAM,KAAK,CAAC,MAAK,SAAQ,KAAK,MAAM,KAAK,QAAQ,CAC3D;;AAIJ,KAAI,CAAC,eAAe,EAAE;EACpB,MAAM,QAAQ,4DAA4D,QAAQ;AAClF,MAAI,WAAW,cAAc,CAG3B,gBAAe,eAAe,GAFb,aAAa,eAAe,QAAQ,CAC1B,SAAS,KAAK,GAAG,KAAK,OACJ,QAAQ;MAGrD,eAAc,eAAe,MAAM;AAErC;;CAIF,MAAM,eAAe,SAAS,KAAK,UAAU,IAAI;AACjD,GAAE,IAAI,KACJ,mEACwC,aAAa,yDACb,aAAa,+DACb,QAAQ,0DACjD;CAED,MAAM,MAAM,MAAM,EAAE,QAAQ;EAC1B,SAAS,SAAS,QAAQ;EAC1B,cAAc;EACf,CAAC;AAEF,KAAI,EAAE,SAAS,IAAI,IAAI,CAAC,IACtB;CAEF,MAAM,QAAQ,4DAA4D,QAAQ;AAClF,KAAI,WAAW,cAAc,CAG3B,gBAAe,eAAe,GAFb,aAAa,eAAe,QAAQ,CAC1B,SAAS,KAAK,GAAG,KAAK,OACJ,QAAQ;KAGrD,eAAc,eAAe,MAAM;AAGrC,GAAE,IAAI,QAAQ,qBAAqB;;AAGrC,MAAa,sBAAsB;AACnC,MAAa,oBAAoB;AAEjC,MAAM,qBAAqB;AAE3B,SAAS,qBAAqB,OAA0B;AAEtD,QAAO,GAAG,oBAAoB,IADjBA,QAAO,OAAO,uBAAuB,mBACX,IAAI;;AAG7C,SAAS,wBAAwB,OAA0B;AAEzD,QAAO,wGADMA,QAAO,OAAO,uBAAuB;;;;;;AAQpD,eAAsB,wBAAwB,OAAkB,KAAa,UAAkC;AAC7G,KAAI,SACF;CAEF,MAAM,cAAcA,QAAO;AAC3B,KAAI,CAAC,YAAY,gBACf;CAEF,MAAM,WAAW,KAAK,KAAK,YAAY,gBAAgB;AAIvD,KAHc,YAAY,gBAAgB,SAAS,OAAO,EAG/C;AACT,MAAI,WAAW,SAAS,CACtB;EAEF,MAAM,UAAU,GAAG,wBAAwB,MAAM,CAAC;AAElD,MAAI,CAAC,eAAe,EAAE;AACpB,aAAU,KAAK,UAAU,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;AACpD,iBAAc,UAAU,QAAQ;AAChC;;AAGF,IAAE,KACA;;;;UAIa,wBAAwB,MAAM,CAAC,UAC5C,UAAU,YAAY,kBACvB;EAED,MAAM,MAAM,MAAM,EAAE,QAAQ;GAC1B,SAAS,UAAU,YAAY,gBAAgB;GAC/C,cAAc;GACf,CAAC;AAEF,MAAI,EAAE,SAAS,IAAI,IAAI,CAAC,IACtB;AAEF,YAAU,KAAK,UAAU,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;AACpD,gBAAc,UAAU,QAAQ;AAChC,IAAE,IAAI,QAAQ,WAAW,YAAY,kBAAkB;AACvD;;AAIF,KAAI,WAAW,SAAS;MACN,aAAa,UAAU,QAAQ,CACnC,SAAA,kBAA6B,CACvC;;AAIJ,KAAI,CAAC,eAAe,EAAE;AACpB,MAAI,WAAW,SAAS,CAGtB,gBAAe,UAAU,GAFR,aAAa,UAAU,QAAQ,CACrB,SAAS,KAAK,GAAG,KAAK,KACX,IAAI,qBAAqB,MAAM,CAAC,IAAI;MAG1E,eAAc,UAAU,GAAG,qBAAqB,MAAM,CAAC,IAAI;AAE7D;;CAIF,MAAM,SADa,WAAW,SAAS,GACX,cAAc;AAC1C,GAAE,KACA;;;;UAIa,qBAAqB,MAAM,CAAC,QAAQ,OAAO,KAAK,CAAC,UAC9D,GAAG,OAAO,GAAG,YAAY,kBAC1B;CAED,MAAM,MAAM,MAAM,EAAE,QAAQ;EAC1B,SAAS,GAAG,OAAO,GAAG,YAAY,gBAAgB;EAClD,cAAc;EACf,CAAC;AAEF,KAAI,EAAE,SAAS,IAAI,IAAI,CAAC,IACtB;AAEF,KAAI,WAAW,SAAS,CAGtB,gBAAe,UAAU,GAFR,aAAa,UAAU,QAAQ,CACrB,SAAS,KAAK,GAAG,KAAK,KACX,IAAI,qBAAqB,MAAM,CAAC,IAAI;KAG1E,eAAc,UAAU,GAAG,qBAAqB,MAAM,CAAC,IAAI;AAG7D,GAAE,IAAI,QAAQ,WAAW,YAAY,kBAAkB;;;AAmCzD,MAAa,mBAAmC,CAAC,kBAAkB,cAAc;AAEjF,eAAsB,oBAAoB,UAAU,oBAA4G;AAC9J,GAAE,IAAI,KAAK,4CAA4C;CACvD,MAAM,WAAW,MAAM,EAAE,YAAY;EACnC;EACA,SAAS;GACP;IAAE,OAAO;IAAe,OAAO;IAA+B,MAAM;IAA4C;GAChH;IAAE,OAAO;IAAkB,OAAO;IAAkC,MAAM;IAA+B;GACzG;IAAE,OAAO;IAAkB,OAAO;IAA0B,MAAM;;GACnE;EACD,eAAe;EACf,UAAU;EACX,CAAC;AAEF,KAAI,EAAE,SAAS,SAAS,CACtB,QAAO;EAAE,UAAU,EAAE;EAAE,WAAW;EAAM;CAE1C,MAAM,WAAW;AACjB,KAAI,SAAS,WAAW,EACtB,QAAO;EAAE,UAAU,EAAE;EAAE,WAAW;EAAO;AAG3C,KAAI,SAAS,SAAS,GAAG;EACvB,MAAM,IAAI,SAAS;EACnB,MAAM,cAAwB,EAAE;AAChC,OAAK,MAAM,KAAK,SACd,SAAQ,GAAR;GACE,KAAK;AACH,gBAAY,KAAK,qBAAqB,SAAS,GAAG,IAAI,EAAE,CAAC,GAAG,SAAS,GAAG,KAAK,MAAM,KAAK,IAAI,EAAE,EAAE,CAAC,kCAAkC;AACnI;GACF,KAAK;AACH,gBAAY,KAAK,qBAAqB,SAAS,GAAG,IAAI,EAAE,CAAC,GAAG,SAAS,GAAG,KAAK,MAAM,KAAK,IAAI,EAAE,EAAE,CAAC,QAAQ;AACzG;GACF,KAAK;AACH,gBAAY,KAAK,sBAAsB,SAAS,IAAI,IAAI,EAAE,CAAC,QAAQ;AACnE;;AAGN,IAAE,IAAI,KAAK,WAAW,EAAE,eAAe,YAAY,KAAK,KAAK,GAAG;;CAGlE,IAAI;AACJ,KAAI,SAAS,SAAS,SAAS,EAAE;EAC/B,MAAM,UAAU,MAAM,EAAE,KAAK;GAC3B,SAAS;GACT,aAAa;GACd,CAAC;AACF,MAAI,EAAE,SAAS,QAAQ,CACrB,QAAO;GAAE,UAAU,EAAE;GAAE,WAAW;GAAM;EAE1C,MAAM,OAAO,MAAM,EAAE,KAAK;GACxB,SAAS;GACT,aAAa;GACd,CAAC;AACF,MAAI,EAAE,SAAS,KAAK,CAClB,QAAO;GAAE,UAAU,EAAE;GAAE,WAAW;GAAM;AAE1C,iBAAe;GAAW;GAAyB;GAAgB;;AAGrE,QAAO;EAAE;EAAU;EAAc,WAAW;EAAO;;;;;;;AA0BrD,eAAsB,gBAAgB,aAA6B,SAAkB,WAAsD;AACzI,KAAI,aAAa;AAGf,OADkB,MAAM,oBAAoB,EAC9B,MAAK,MAAK,EAAE,OAAO,YAAY,CAC3C,QAAO;GAAE,OAAO;GAAa,UAAU;GAAkB;AAE3D,MAAI,CAAC,eAAe,CAClB,QAAO;;AAIX,KAAI,CAAC,eAAe,CAClB,QAAO;CAIT,MAAM,SAAS,YAAY;CAC3B,MAAM,YAAY,MAAM,oBAAoB;AAE5C,KAAI,UAAU,WAAW,GAAG;AAC1B,IAAE,IAAI,KAAK,kBAAkB;AAC7B,SAAO;;CAIT,IAAI;AACJ,KAAI,OAAO,SAAS,UAAU,MAAK,MAAK,EAAE,OAAO,OAAO,MAAM,CAC5D,gBAAe,OAAO;MAEnB;AACH,MAAI,OAAO,MACT,GAAE,IAAI,KAAK,4BAA4B,OAAO,MAAM,uDAAuD;AAC7G,iBAAgB,UAAU,MAAK,MAAK,EAAE,YAAY,EAAE,MAAM,UAAU,GAAI;;CAG1E,MAAM,mBAAmB,aAAa,aAAa;CAEnD,MAAM,eADmB,UAAU,MAAK,MAAK,EAAE,OAAO,aAAa,EAC5B,gBAAgB;CACvD,MAAM,aAAa,OAAO,UAAU,eAAe,eAAe;CAClE,MAAM,cAAc,eAAe,GAAG,aAAa,KAAK,eAAe;CAGvE,IAAI,iBAAiB;CACrB,IAAI,gBAAgB;AACpB,KAAI,WAAW;EACb,MAAM,OAAO,UAAU,aACjB,UAAU,cAAc,UAAU,aAAa,WAAW,UAAU,YAAY,UAAU,WAAW,GAAG;EAC9G,MAAM,cAAc,SAAS,WAAW,SAAS,gBAAgB,SAAS,cAAc,SAAS,cAAc,SAAS;EAExH,MAAM,WAAqB,EAAE;AAC7B,MAAI,KACF,UAAS,KAAK,KAAK;AACrB,MAAI,UAAU,UAAU;GACtB,MAAM,aAAa,IAAI,KAAK,UAAU,SAAS,CAAC,SAAS;AACzD,OAAI,OAAO,SAAS,WAAW,EAAE;IAC/B,MAAM,OAAO,KAAK,OAAO,KAAK,KAAK,GAAG,cAAc,MAAW;AAC/D,aAAS,KAAK,SAAS,IAAI,UAAU,SAAS,IAAI,WAAW,GAAG,KAAK,OAAO;;;AAGhF,MAAI,UAAU,YACZ,UAAS,KAAK,eAAe;EAK/B,MAAM,OAAO,CAHO,UAAU,cAAc,UAAU,aAClD,GAAG,UAAU,WAAW,KAAK,UAAU,eACvC,MACuB,GAAG,SAAS,CAAC,OAAO,QAAQ,CAAC,KAAK,MAAM;AACnE,MAAI,KACF,kBAAiB,8BAA8B,KAAK;AAGtD,MAAI,UAAU,eAAe,YAC3B,iBAAgB;;CAGpB,MAAM,SAAS,MAAM,EAAE,OAAO;EAC5B,SAAS;EACT,SAAS;GACP;IAAE,OAAO;IAAkB,OAAO;IAAoB,MAAM;IAAa;GACzE;IAAE,OAAO;IAAmB,OAAO;IAAiB,MAAM;IAAoC;GAC9F;IAAE,OAAO;IAAe,OAAO;IAAmB,MAAM;IAAgC;GACxF;IAAE,OAAO;IAAQ,OAAO;IAAiB,MAAM;;GAChD;EACD,GAAI,gBAAgB,EAAE,cAAc,QAAiB,GAAG,EAAA;EACzD,CAAC;AAEF,KAAI,EAAE,SAAS,OAAO,CACpB,QAAO;AAET,KAAI,WAAW,OACb,QAAO;AAET,KAAI,WAAW,UAAU;EACvB,MAAM,EAAE,UAAU,cAAc,cAAc,MAAM,oBAClD,UAAU,GAAG,QAAQ,kBAAkB,wCACxC;AACD,MAAI,aAAa,SAAS,WAAW,EACnC,QAAO;AAET,SAAO;GAAE,OAAO;GAAc;GAAU;GAAc,YAAY;GAAM;;CAG1E,IAAI;AACJ,KAAI,WAAW,QAAQ;EACrB,MAAM,SAAS,MAAM,UAAU,UAAU;AACzC,MAAI,CAAC,OACH,QAAO;AACT,eAAa,EAAE,OAAO,QAAyB,CAAC;AAChD,UAAQ;OAGR,SAAQ;AAEV,KAAI,CAAC,MACH,QAAO;CAET,MAAM,YAAY,aAAa,MAAM;CACrC,MAAM,EAAE,UAAU,cAAc,cAAc,MAAM,oBAClD,UAAU,GAAG,QAAQ,IAAI,UAAU,KAAK,yBAAyB,YAClE;AAED,KAAI,aAAa,SAAS,WAAW,EACnC,QAAO;AAET,QAAO;EAAE;EAAO;EAAU;EAAc;;AA4B1C,eAAsB,oBAAoB,MAAqC;CAC7E,MAAM,EAAE,aAAa,SAAS,UAAU,SAAS,OAAO,UAAU,eAAe,WAAW,gBAAgB,aAAa,cAAc,UAAU,gBAAgB,aAAa,UAAU,OAAO,OAAO,UAAU,cAAc,UAAU,UAAU,OAAO,kBAAkB;CAE3Q,MAAM,oBAAoB;CAE1B,MAAM,SAAS,EAAE,QAAQ,EAAE,OAAO,mBAAmB,eAAe,CAAC;CACrE,MAAM,WAAW,mBAAmB,SAAS;CAE7C,MAAM,EAAE,WAAW,cAAc,OAAO,MAAM,UAAU,OAAO,iBAAiB,MAAM,aAAa;EACjG;EACA;EACA;EACA;EACA,WANgB,aAAa;EAO7B;EACA;EACA;EACA;EACA,gBAAgB;EAChB,SAAS;EACT;EACA;EACA;EACA,UAAU;EACV;EACA;EACA,YAAY,mBAAmB,OAAA;EAChC,CAAC;AAEF,KAAI,cAAc;EAChB,MAAM,YAAsB,EAAE;AAC9B,MAAI,OAAO;GACT,MAAM,SAAS,KAAK,MAAM,MAAM,cAAc,IAAK;AACnD,aAAU,KAAK,GAAG,OAAO,UAAU;;AAErC,MAAI,KACF,WAAU,KAAK,IAAI,KAAK,QAAQ,EAAE,GAAG;EACvC,MAAM,aAAa,UAAU,SAAS,IAAI,KAAK,UAAU,KAAK,KAAK,CAAC,KAAK;AACzE,SAAO,QAAQ,2BAA2B,aAAa;AACvD,MAAI,aACF,GAAE,IAAI,KAAK,eAAe,SAAS,QAAQ,KAAK,EAAE,aAAa,GAAG;AACpE,MAAI,MACF,GAAE,IAAI,KAAK,4BAA4B,MAAM,SAAS;AACxD,MAAI,UAAU,OACZ,MAAK,MAAM,KAAK,SACd,GAAE,IAAI,KAAK,WAAW,EAAE,SAAS;EAErC,MAAM,UAAU,gBAAgB;GAC9B,MAAM;GACN;GACA,YAAY,SAAS;GAErB,UAAU,SAAS;GACnB,MAAM;GACN;GACA;GACA;GACA;GACA;GACA;GACA,gBAAgB;GAChB;GACA,aAAa,cAAc,MAAM;GACjC;GACA;GACA,SAAS,SAAS;GAClB;GACA;GACD,CAAC;AACF,gBAAc,KAAK,UAAU,WAAW,EAAE,QAAQ;OAGlD,QAAO,MAAM,qBAAqB,QAAQ,KAAK,UAAU,KAAK;;;;;;AAyBlE,SAAgB,iBAAiB,MAA+C;CAC9E,MAAM,EAAE,UAAU,UAAU,cAAc,aAAa;CACvD,MAAM,WAAW,mBAAmB,SAAS;CAC7C,MAAM,UAAU,uBAAuB;EACrC,aAAa,KAAK;EAClB;EACA,SAAS,KAAK;EACd,WAAW,KAAK;EAChB,gBAAgB,KAAK;EACrB,aAAa,KAAK;EAClB,cAAc,KAAK;EACnB;EACA,UAAU,KAAK;EACf,gBAAgB,KAAK;EACrB,UAAU,KAAK;EACf;EACA;EACA,eAAe,KAAK;EACpB;EACD,CAAC;CAEF,MAAM,YAAY,KAAK,UAAU,UAAU;AAC3C,WAAU,WAAW,EAAE,WAAW,MAAM,CAAC;AAEzC,MAAK,MAAM,CAAC,SAAS,WAAW,QAC9B,eAAc,KAAK,WAAW,UAAU,QAAQ,KAAK,EAAE,OAAO;CAEhE,MAAM,UAAU,CAAC,GAAG,QAAQ,MAAM,CAAC;AACnC,KAAI,QAAQ,SAAS,GAAG;EACtB,MAAM,SAAS,SAAS,QAAQ,KAAK,EAAE,SAAS;EAChD,MAAM,cAAc,QAAQ,KAAI,MAAK,UAAU,EAAE,KAAK,CAAC,KAAK,KAAK;EACjE,MAAM,iBAAiB,QAAQ,KAAI,MAAK,qBAAqB,GAAG,CAAC,KAAK,KAAK;AAC3E,IAAE,IAAI,KAAK,2BAA2B,OAAO,oDAAoD,YAAY,OAAO,OAAO,wFAAwF,eAAe,8CAA8C;;AAGlR,QAAO"}
|
|
1
|
+
{"version":3,"file":"sync-shared2.mjs","names":["agents"],"sources":["../../src/commands/sync-shared.ts"],"sourcesContent":["import type { AgentType, CustomPrompt, OptimizeModel, SkillSection } from '../agent/index.ts'\nimport type { FeaturesConfig } from '../core/config.ts'\nimport type { ResolvedPackage, ResolveStep } from '../sources/index.ts'\nimport { appendFileSync, copyFileSync, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs'\nimport * as p from '@clack/prompts'\nimport { join, relative, resolve } from 'pathe'\nimport {\n agents,\n buildAllSectionPrompts,\n createToolProgress,\n generateSkillMd,\n getAvailableModels,\n getModelLabel,\n getModelName,\n optimizeDocs,\n SECTION_OUTPUT_FILES,\n} from '../agent/index.ts'\nimport { maxItems, maxLines } from '../agent/prompts/optional/budget.ts'\nimport {\n clearCache,\n getCacheDir,\n getPackageDbPath,\n getRepoCacheDir,\n getShippedSkills,\n hasShippedDocs,\n linkCachedDir,\n linkPkg,\n linkPkgNamed,\n linkRepoCachedDir,\n linkShippedSkill,\n listReferenceFiles,\n readCachedDocs,\n resolvePkgDir,\n writeToCache,\n writeToRepoCache,\n} from '../cache/index.ts'\nimport { isInteractive, NO_MODELS_MESSAGE, pickModel } from '../cli-helpers.ts'\nimport { defaultFeatures, readConfig, registerProject, updateConfig } from '../core/config.ts'\nimport { parsePackages, readLock, writeLock } from '../core/lockfile.ts'\nimport { parseFrontmatter } from '../core/markdown.ts'\nimport { readPackageJsonSafe } from '../core/package-json.ts'\nimport { sanitizeMarkdown } from '../core/sanitize.ts'\nimport { getSharedSkillsDir, semverDiff } from '../core/shared.ts'\nimport { createIndex, listIndexIds, SearchDepsUnavailableError } from '../retriv/index.ts'\nimport {\n downloadLlmsDocs,\n fetchBlogReleases,\n fetchCrawledDocs,\n fetchGitDocs,\n fetchGitHubDiscussions,\n fetchGitHubIssues,\n fetchGitHubRaw,\n fetchLlmsTxt,\n fetchNpmPackage,\n fetchReadmeContent,\n fetchReleaseNotes,\n filterFrameworkDocs,\n formatDiscussionAsMarkdown,\n formatIssueAsMarkdown,\n generateDiscussionIndex,\n generateDocsIndex,\n generateIssueIndex,\n generateReleaseIndex,\n getBlogPreset,\n getPrereleaseChangelogRef,\n isGhAvailable,\n isPrerelease,\n isShallowGitDocs,\n normalizeLlmsLinks,\n parseGitHubUrl,\n resolveEntryFiles,\n resolveLocalPackageDocs,\n toCrawlPattern,\n} from '../sources/index.ts'\n\n/** Max docs sent to the embedding pipeline to prevent oversized indexes */\nconst MAX_INDEX_DOCS = 250\n\nexport const RESOLVE_STEP_LABELS: Record<ResolveStep, string> = {\n 'npm': 'npm registry',\n 'github-docs': 'GitHub docs',\n 'github-meta': 'GitHub meta',\n 'github-search': 'GitHub search',\n 'readme': 'README',\n 'llms.txt': 'llms.txt',\n 'crawl': 'website crawl',\n 'local': 'node_modules',\n}\n\n/** Classify a cached doc path into the right metadata type */\nexport function classifyCachedDoc(path: string): { type: string, number?: number } {\n const issueMatch = path.match(/^issues\\/issue-(\\d+)\\.md$/)\n if (issueMatch)\n return { type: 'issue', number: Number(issueMatch[1]) }\n const discussionMatch = path.match(/^discussions\\/discussion-(\\d+)\\.md$/)\n if (discussionMatch)\n return { type: 'discussion', number: Number(discussionMatch[1]) }\n if (path.startsWith('releases/'))\n return { type: 'release' }\n return { type: 'doc' }\n}\n\nexport async function findRelatedSkills(packageName: string, skillsDir: string): Promise<string[]> {\n const related: string[] = []\n\n const npmInfo = await fetchNpmPackage(packageName)\n if (!npmInfo?.dependencies)\n return related\n\n const deps = new Set(Object.keys(npmInfo.dependencies))\n\n if (!existsSync(skillsDir))\n return related\n\n // Build packageName → dirName map from lockfile for accurate matching\n const lock = readLock(skillsDir)\n const pkgToDirName = new Map<string, string>()\n if (lock) {\n for (const [dirName, info] of Object.entries(lock.skills)) {\n if (info.packageName)\n pkgToDirName.set(info.packageName, dirName)\n for (const pkg of parsePackages(info.packages))\n pkgToDirName.set(pkg.name, dirName)\n }\n }\n\n const installedSkills = readdirSync(skillsDir)\n const installedSet = new Set(installedSkills)\n\n for (const dep of deps) {\n const dirName = pkgToDirName.get(dep)\n if (dirName && installedSet.has(dirName))\n related.push(dirName)\n }\n\n return related.slice(0, 5)\n}\n\n/** Clear cache + db for --force flag */\nexport function forceClearCache(packageName: string, version: string, repoInfo?: { owner: string, repo: string }): void {\n clearCache(packageName, version)\n const forcedDbPath = getPackageDbPath(packageName, version)\n if (existsSync(forcedDbPath))\n rmSync(forcedDbPath, { recursive: true, force: true })\n // Also clear repo-level cache when force is used\n if (repoInfo) {\n const repoDir = getRepoCacheDir(repoInfo.owner, repoInfo.repo)\n if (existsSync(repoDir))\n rmSync(repoDir, { recursive: true, force: true })\n }\n}\n\n/** Link all reference symlinks (pkg, docs, issues, discussions, releases) */\nexport function linkAllReferences(skillDir: string, packageName: string, cwd: string, version: string, docsType: string, extraPackages?: Array<{ name: string, version?: string }>, features?: FeaturesConfig, repoInfo?: { owner: string, repo: string }): void {\n const f = features ?? readConfig().features ?? defaultFeatures\n try {\n linkPkg(skillDir, packageName, cwd, version)\n linkPkgNamed(skillDir, packageName, cwd, version)\n if (!hasShippedDocs(packageName, cwd, version) && docsType !== 'readme') {\n linkCachedDir(skillDir, packageName, version, 'docs')\n }\n // Issues/discussions/releases: use repo cache when available, else package cache\n if (f.issues) {\n if (repoInfo)\n linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, 'issues')\n else\n linkCachedDir(skillDir, packageName, version, 'issues')\n }\n if (f.discussions) {\n if (repoInfo)\n linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, 'discussions')\n else\n linkCachedDir(skillDir, packageName, version, 'discussions')\n }\n if (f.releases) {\n if (repoInfo)\n linkRepoCachedDir(skillDir, repoInfo.owner, repoInfo.repo, 'releases')\n else\n linkCachedDir(skillDir, packageName, version, 'releases')\n }\n linkCachedDir(skillDir, packageName, version, 'sections')\n // Create named symlinks for additional packages in multi-package skills\n if (extraPackages) {\n for (const pkg of extraPackages) {\n if (pkg.name !== packageName)\n linkPkgNamed(skillDir, pkg.name, cwd, pkg.version)\n }\n }\n }\n catch {\n // Symlink may fail on some systems\n }\n}\n\n/** Detect docs type from cached directory contents */\nexport function detectDocsType(packageName: string, version: string, repoUrl?: string, llmsUrl?: string): { docsType: 'docs' | 'llms.txt' | 'readme', docSource?: string } {\n const cacheDir = getCacheDir(packageName, version)\n if (existsSync(join(cacheDir, 'docs', 'index.md')) || existsSync(join(cacheDir, 'docs', 'guide'))) {\n return {\n docsType: 'docs',\n docSource: repoUrl ? `${repoUrl}/tree/v${version}/docs` : 'git',\n }\n }\n if (existsSync(join(cacheDir, 'llms.txt'))) {\n return {\n docsType: 'llms.txt',\n docSource: llmsUrl || 'llms.txt',\n }\n }\n if (existsSync(join(cacheDir, 'docs', 'README.md'))) {\n return { docsType: 'readme' }\n }\n return { docsType: 'readme' }\n}\n\nexport interface HandleShippedResult {\n shipped: Array<{ skillName: string, skillDir: string }>\n baseDir: string\n}\n\n/** Link shipped skills, write lock entries, register project. Returns result or null if no shipped skills. */\nexport function handleShippedSkills(\n packageName: string,\n version: string,\n cwd: string,\n agent: AgentType,\n global: boolean,\n): HandleShippedResult | null {\n const shippedSkills = getShippedSkills(packageName, cwd, version)\n if (shippedSkills.length === 0)\n return null\n\n const baseDir = resolveBaseDir(cwd, agent, global)\n mkdirSync(baseDir, { recursive: true })\n\n for (const shipped of shippedSkills) {\n linkShippedSkill(baseDir, shipped.skillName, shipped.skillDir)\n writeLock(baseDir, shipped.skillName, {\n packageName,\n version,\n source: 'shipped',\n syncedAt: new Date().toISOString().split('T')[0],\n generator: 'skilld',\n })\n }\n\n if (!global)\n registerProject(cwd)\n\n return { shipped: shippedSkills, baseDir }\n}\n\n/** Resolve the base skills directory for an agent */\nexport function resolveBaseDir(cwd: string, agent: AgentType, global: boolean): string {\n if (global) {\n const agentConfig = agents[agent]\n return agentConfig.globalSkillsDir\n }\n const shared = getSharedSkillsDir(cwd)\n if (shared)\n return shared\n const agentConfig = agents[agent]\n return join(cwd, agentConfig.skillsDir)\n}\n\n/** Try resolving a `link:` dependency to local package docs. Returns null if not a link dep or resolution fails. */\nexport async function resolveLocalDep(packageName: string, cwd: string): Promise<ResolvedPackage | null> {\n const result = readPackageJsonSafe(join(cwd, 'package.json'))\n if (!result)\n return null\n\n const pkg = result.parsed\n const deps = { ...pkg.dependencies as Record<string, string>, ...pkg.devDependencies as Record<string, string> }\n const depVersion = deps[packageName]\n\n if (!depVersion?.startsWith('link:'))\n return null\n\n const localPath = resolve(cwd, depVersion.slice(5))\n return resolveLocalPackageDocs(localPath)\n}\n\n/** Detect CHANGELOG.md in a package directory or cached releases */\nexport function detectChangelog(pkgDir: string | null, cacheDir?: string): string | false {\n if (pkgDir) {\n const found = ['CHANGELOG.md', 'changelog.md'].find(f => existsSync(join(pkgDir, f)))\n if (found)\n return `pkg/${found}`\n }\n // Also check cached releases/CHANGELOG.md (fetched from GitHub)\n if (cacheDir && existsSync(join(cacheDir, 'releases', 'CHANGELOG.md')))\n return 'releases/CHANGELOG.md'\n return false\n}\n\n// ── Shared pipeline functions ──\n\nexport interface IndexDoc {\n id: string\n content: string\n metadata: Record<string, any>\n}\n\nexport interface FetchResult {\n docSource: string\n docsType: 'llms.txt' | 'readme' | 'docs'\n docsToIndex: IndexDoc[]\n hasIssues: boolean\n hasDiscussions: boolean\n hasReleases: boolean\n warnings: string[]\n /** Parsed GitHub owner/repo for repo-level cache */\n repoInfo?: { owner: string, repo: string }\n /** Whether this result was served from cache (no fresh fetches) */\n usedCache: boolean\n}\n\n/** Fetch and cache all resources for a package (docs cascade + issues + discussions + releases) */\nexport async function fetchAndCacheResources(opts: {\n packageName: string\n resolved: ResolvedPackage\n version: string\n useCache: boolean\n features?: FeaturesConfig\n /** Lower-bound date for release/issue/discussion collection (ISO date) */\n from?: string\n onProgress: (message: string) => void\n}): Promise<FetchResult> {\n const { packageName, resolved, version, onProgress } = opts\n const features = opts.features ?? readConfig().features ?? defaultFeatures\n\n // Retry fetch if cache is README-only but richer sources exist (likely transient failure)\n const cacheInvalidated = opts.useCache\n && resolved.crawlUrl\n && detectDocsType(packageName, version, resolved.repoUrl, resolved.llmsUrl).docsType === 'readme'\n const useCache = opts.useCache && !cacheInvalidated\n let docSource: string = resolved.readmeUrl || 'readme'\n let docsType: 'llms.txt' | 'readme' | 'docs' = 'readme'\n const docsToIndex: IndexDoc[] = []\n const warnings: string[] = []\n if (cacheInvalidated)\n warnings.push(`Retrying crawl for ${resolved.crawlUrl} (previous attempt only cached README)`)\n\n if (!useCache) {\n const cachedDocs: Array<{ path: string, content: string }> = []\n const isFrameworkDoc = (path: string) => filterFrameworkDocs([path], packageName).length > 0\n\n // Try versioned git docs first\n if (resolved.gitDocsUrl && resolved.repoUrl) {\n const gh = parseGitHubUrl(resolved.repoUrl)\n if (gh) {\n onProgress('Fetching git docs')\n const gitDocs = await fetchGitDocs(gh.owner, gh.repo, version, packageName)\n if (gitDocs?.fallback) {\n warnings.push(`Docs fetched from ${gitDocs.ref} branch (no tag found for v${version})`)\n }\n if (gitDocs && gitDocs.files.length > 0) {\n const BATCH_SIZE = 20\n const results: Array<{ file: string, content: string } | null> = []\n\n for (let i = 0; i < gitDocs.files.length; i += BATCH_SIZE) {\n const batch = gitDocs.files.slice(i, i + BATCH_SIZE)\n onProgress(`Downloading docs ${Math.min(i + BATCH_SIZE, gitDocs.files.length)}/${gitDocs.files.length} from ${gitDocs.ref}`)\n const batchResults = await Promise.all(\n batch.map(async (file) => {\n const url = `${gitDocs.baseUrl}/${file}`\n const content = await fetchGitHubRaw(url)\n if (!content)\n return null\n return { file, content }\n }),\n )\n results.push(...batchResults)\n }\n\n for (const r of results) {\n if (r) {\n const stripped = gitDocs.docsPrefix ? r.file.replace(gitDocs.docsPrefix, '') : r.file\n const cachePath = stripped.startsWith('docs/') ? stripped : `docs/${stripped}`\n cachedDocs.push({ path: cachePath, content: r.content })\n docsToIndex.push({\n id: cachePath,\n content: r.content,\n metadata: { package: packageName, source: cachePath, type: 'doc' },\n })\n }\n }\n\n const downloaded = results.filter(Boolean).length\n if (downloaded > 0) {\n // Shallow git-docs: if < threshold and llms.txt exists, discard and fall through\n if (isShallowGitDocs(downloaded) && resolved.llmsUrl) {\n onProgress(`Shallow git-docs (${downloaded} files), trying llms.txt`)\n cachedDocs.length = 0\n docsToIndex.length = 0\n }\n else {\n docSource = `${resolved.repoUrl}/tree/${gitDocs.ref}/docs`\n docsType = 'docs'\n writeToCache(packageName, version, cachedDocs)\n\n // Always cache llms.txt alongside good git-docs as supplementary reference\n if (resolved.llmsUrl) {\n onProgress('Caching supplementary llms.txt')\n const llmsContent = await fetchLlmsTxt(resolved.llmsUrl)\n if (llmsContent) {\n const baseUrl = resolved.docsUrl || new URL(resolved.llmsUrl).origin\n const supplementary: Array<{ path: string, content: string }> = [\n { path: 'llms.txt', content: normalizeLlmsLinks(llmsContent.raw, baseUrl) },\n ]\n if (llmsContent.links.length > 0) {\n onProgress(`Downloading ${llmsContent.links.length} supplementary docs`)\n const docs = await downloadLlmsDocs(llmsContent, baseUrl, (url, done, total) => {\n onProgress(`Downloading supplementary doc ${done + 1}/${total}`)\n })\n for (const doc of docs) {\n if (!isFrameworkDoc(doc.url))\n continue\n const localPath = doc.url.startsWith('/') ? doc.url.slice(1) : doc.url\n supplementary.push({ path: join('llms-docs', ...localPath.split('/')), content: doc.content })\n }\n }\n writeToCache(packageName, version, supplementary)\n }\n }\n }\n }\n }\n }\n }\n\n // Try website crawl\n if (resolved.crawlUrl && cachedDocs.length === 0) {\n onProgress('Crawling website')\n const crawledDocs = await fetchCrawledDocs(resolved.crawlUrl, onProgress).catch((err) => {\n warnings.push(`Crawl failed for ${resolved.crawlUrl}: ${err?.message || err}`)\n return []\n })\n if (crawledDocs.length === 0 && resolved.crawlUrl) {\n warnings.push(`Crawl returned 0 docs from ${resolved.crawlUrl}`)\n }\n if (crawledDocs.length > 0) {\n for (const doc of crawledDocs) {\n if (!isFrameworkDoc(doc.path))\n continue\n cachedDocs.push(doc)\n docsToIndex.push({\n id: doc.path,\n content: doc.content,\n metadata: { package: packageName, source: doc.path, type: 'doc' },\n })\n }\n docSource = resolved.crawlUrl\n docsType = 'docs'\n writeToCache(packageName, version, cachedDocs)\n }\n }\n\n // Try llms.txt\n if (resolved.llmsUrl && cachedDocs.length === 0) {\n onProgress('Fetching llms.txt')\n const llmsContent = await fetchLlmsTxt(resolved.llmsUrl)\n if (llmsContent) {\n docSource = resolved.llmsUrl!\n docsType = 'llms.txt'\n const baseUrl = resolved.docsUrl || new URL(resolved.llmsUrl).origin\n cachedDocs.push({ path: 'llms.txt', content: normalizeLlmsLinks(llmsContent.raw, baseUrl) })\n\n if (llmsContent.links.length > 0) {\n onProgress(`Downloading ${llmsContent.links.length} linked docs`)\n const docs = await downloadLlmsDocs(llmsContent, baseUrl, (url, done, total) => {\n onProgress(`Downloading linked doc ${done + 1}/${total}`)\n })\n\n for (const doc of docs) {\n if (!isFrameworkDoc(doc.url))\n continue\n const localPath = doc.url.startsWith('/') ? doc.url.slice(1) : doc.url\n const cachePath = join('docs', ...localPath.split('/'))\n cachedDocs.push({ path: cachePath, content: doc.content })\n docsToIndex.push({\n id: doc.url,\n content: doc.content,\n metadata: { package: packageName, source: cachePath, type: 'doc' },\n })\n }\n if (docs.length > 0)\n docsType = 'docs'\n }\n\n writeToCache(packageName, version, cachedDocs)\n }\n }\n\n // Try crawling docsUrl as fallback (when no actual doc files from git/crawl/llms.txt)\n if (resolved.docsUrl && !cachedDocs.some(d => d.path.startsWith('docs/'))) {\n const crawlPattern = resolved.crawlUrl || toCrawlPattern(resolved.docsUrl)\n onProgress('Crawling docs site')\n const crawlMaxPages = resolved.crawlUrl ? 200 : 400\n const crawledDocs = await fetchCrawledDocs(crawlPattern, onProgress, crawlMaxPages).catch((err) => {\n warnings.push(`Crawl failed for ${crawlPattern}: ${err?.message || err}`)\n return []\n })\n if (crawledDocs.length > 0) {\n for (const doc of crawledDocs) {\n if (!isFrameworkDoc(doc.path))\n continue\n cachedDocs.push(doc)\n docsToIndex.push({\n id: doc.path,\n content: doc.content,\n metadata: { package: packageName, source: doc.path, type: 'doc' },\n })\n }\n docSource = crawlPattern\n docsType = 'docs'\n writeToCache(packageName, version, cachedDocs)\n }\n }\n\n // Fallback to README\n if (resolved.readmeUrl && cachedDocs.length === 0) {\n onProgress('Fetching README')\n const content = await fetchReadmeContent(resolved.readmeUrl)\n if (content) {\n cachedDocs.push({ path: 'docs/README.md', content })\n docsToIndex.push({\n id: 'README.md',\n content,\n metadata: { package: packageName, source: 'docs/README.md', type: 'doc' },\n })\n writeToCache(packageName, version, cachedDocs)\n }\n }\n\n // Generate docs index if we have multiple doc files\n if (docsType !== 'readme' && cachedDocs.filter(d => d.path.startsWith('docs/') && d.path.endsWith('.md')).length > 1) {\n const docsIndex = generateDocsIndex(cachedDocs)\n if (docsIndex) {\n writeToCache(packageName, version, [{ path: 'docs/_INDEX.md', content: docsIndex }])\n }\n }\n }\n else {\n // Detect docs type from cache\n onProgress('Loading cached docs')\n const detected = detectDocsType(packageName, version, resolved.repoUrl, resolved.llmsUrl)\n docsType = detected.docsType\n if (detected.docSource)\n docSource = detected.docSource\n\n // Load cached docs for indexing if db doesn't exist yet\n const dbPath = getPackageDbPath(packageName, version)\n if (!existsSync(dbPath)) {\n onProgress('Reading cached docs for indexing')\n const cached = readCachedDocs(packageName, version)\n for (const doc of cached) {\n docsToIndex.push({\n id: doc.path,\n content: doc.content,\n metadata: { package: packageName, source: doc.path, ...classifyCachedDoc(doc.path) },\n })\n }\n }\n\n // Backfill docs index for caches created before this feature\n if (docsType !== 'readme' && !existsSync(join(getCacheDir(packageName, version), 'docs', '_INDEX.md'))) {\n onProgress('Generating docs index')\n const cached = readCachedDocs(packageName, version)\n const docFiles = cached.filter(d => d.path.startsWith('docs/') && d.path.endsWith('.md'))\n if (docFiles.length > 1) {\n const docsIndex = generateDocsIndex(cached)\n if (docsIndex) {\n writeToCache(packageName, version, [{ path: 'docs/_INDEX.md', content: docsIndex }])\n }\n }\n }\n }\n\n // Parse repo info once for repo-level caching\n const gh = resolved.repoUrl ? parseGitHubUrl(resolved.repoUrl) : null\n const repoInfo = gh ? { owner: gh.owner, repo: gh.repo } : undefined\n\n // Determine where repo-level data lives (repo cache if available, else package cache)\n const repoCacheDir = repoInfo ? getRepoCacheDir(repoInfo.owner, repoInfo.repo) : null\n const cacheDir = getCacheDir(packageName, version)\n const issuesDir = repoCacheDir ? join(repoCacheDir, 'issues') : join(cacheDir, 'issues')\n const discussionsDir = repoCacheDir ? join(repoCacheDir, 'discussions') : join(cacheDir, 'discussions')\n const releasesPath = repoCacheDir ? join(repoCacheDir, 'releases') : join(cacheDir, 'releases')\n\n // Issues (independent of useCache — has its own existsSync guard)\n if (features.issues && gh && isGhAvailable() && !existsSync(issuesDir)) {\n onProgress('Fetching issues via GitHub API')\n const issues = await fetchGitHubIssues(gh.owner, gh.repo, 30, resolved.releasedAt, opts.from).catch(() => [])\n if (issues.length > 0) {\n onProgress(`Caching ${issues.length} issues`)\n const issueDocs = [\n ...issues.map(issue => ({\n path: `issues/issue-${issue.number}.md`,\n content: formatIssueAsMarkdown(issue),\n })),\n {\n path: 'issues/_INDEX.md',\n content: generateIssueIndex(issues),\n },\n ]\n if (repoInfo)\n writeToRepoCache(repoInfo.owner, repoInfo.repo, issueDocs)\n else\n writeToCache(packageName, version, issueDocs)\n for (const issue of issues) {\n docsToIndex.push({\n id: `issue-${issue.number}`,\n content: sanitizeMarkdown(`#${issue.number}: ${issue.title}\\n\\n${issue.body || ''}`),\n metadata: { package: packageName, source: `issues/issue-${issue.number}.md`, type: 'issue', number: issue.number },\n })\n }\n }\n }\n\n // Discussions\n if (features.discussions && gh && isGhAvailable() && !existsSync(discussionsDir)) {\n onProgress('Fetching discussions via GitHub API')\n const discussions = await fetchGitHubDiscussions(gh.owner, gh.repo, 20, resolved.releasedAt, opts.from).catch(() => [])\n if (discussions.length > 0) {\n onProgress(`Caching ${discussions.length} discussions`)\n const discussionDocs = [\n ...discussions.map(d => ({\n path: `discussions/discussion-${d.number}.md`,\n content: formatDiscussionAsMarkdown(d),\n })),\n {\n path: 'discussions/_INDEX.md',\n content: generateDiscussionIndex(discussions),\n },\n ]\n if (repoInfo)\n writeToRepoCache(repoInfo.owner, repoInfo.repo, discussionDocs)\n else\n writeToCache(packageName, version, discussionDocs)\n for (const d of discussions) {\n docsToIndex.push({\n id: `discussion-${d.number}`,\n content: sanitizeMarkdown(`#${d.number}: ${d.title}\\n\\n${d.body || ''}`),\n metadata: { package: packageName, source: `discussions/discussion-${d.number}.md`, type: 'discussion', number: d.number },\n })\n }\n }\n }\n\n // Releases (GitHub releases + blog releases + CHANGELOG → unified releases/ dir)\n if (features.releases && gh && isGhAvailable() && !existsSync(releasesPath)) {\n onProgress('Fetching releases via GitHub API')\n const changelogRef = isPrerelease(version) ? getPrereleaseChangelogRef(packageName) : undefined\n const releaseDocs = await fetchReleaseNotes(gh.owner, gh.repo, version, resolved.gitRef, packageName, opts.from, changelogRef).catch(() => [])\n\n // Fetch blog releases into same releases/ dir\n let blogDocs: Array<{ path: string, content: string }> = []\n if (getBlogPreset(packageName)) {\n onProgress('Fetching blog release notes')\n blogDocs = await fetchBlogReleases(packageName, version).catch(() => [])\n }\n\n const allDocs = [...releaseDocs, ...blogDocs]\n\n // Parse blog release metadata for index generation\n const blogEntries = blogDocs\n .filter(d => !d.path.endsWith('_INDEX.md'))\n .map((d) => {\n const versionMatch = d.path.match(/blog-(.+)\\.md$/)\n const fm = parseFrontmatter(d.content)\n return {\n version: versionMatch?.[1] ?? '',\n title: fm.title ?? `Release ${versionMatch?.[1]}`,\n date: fm.date ?? '',\n }\n })\n .filter(b => b.version)\n\n // Parse GitHub releases for index (extract from frontmatter)\n const ghReleases = releaseDocs\n .filter(d => d.path.startsWith('releases/') && !d.path.endsWith('CHANGELOG.md'))\n .map((d) => {\n const fm = parseFrontmatter(d.content)\n const tag = fm.tag ?? ''\n const name = fm.name ?? tag\n const published = fm.published ?? ''\n return { id: 0, tag, name, prerelease: false, createdAt: published, publishedAt: published, markdown: '' }\n })\n .filter(r => r.tag)\n\n const hasChangelog = allDocs.some(d => d.path === 'releases/CHANGELOG.md')\n\n // Generate unified _INDEX.md\n if (ghReleases.length > 0 || blogEntries.length > 0) {\n allDocs.push({\n path: 'releases/_INDEX.md',\n content: generateReleaseIndex({ releases: ghReleases, packageName, blogReleases: blogEntries, hasChangelog }),\n })\n }\n\n if (allDocs.length > 0) {\n onProgress(`Caching ${allDocs.length} releases`)\n if (repoInfo)\n writeToRepoCache(repoInfo.owner, repoInfo.repo, allDocs)\n else\n writeToCache(packageName, version, allDocs)\n for (const doc of allDocs) {\n docsToIndex.push({\n id: doc.path,\n content: doc.content,\n metadata: { package: packageName, source: doc.path, type: 'release' },\n })\n }\n }\n }\n\n return {\n docSource,\n docsType,\n docsToIndex,\n hasIssues: features.issues && existsSync(issuesDir),\n hasDiscussions: features.discussions && existsSync(discussionsDir),\n hasReleases: features.releases && existsSync(releasesPath),\n warnings,\n repoInfo,\n usedCache: useCache,\n }\n}\n\n/**\n * Extract the parent document ID from a chunk ID.\n * Chunk IDs have the form \"docId#chunk-N\"; non-chunk IDs return as-is.\n */\nfunction parentDocId(id: string): string {\n const idx = id.indexOf('#chunk-')\n return idx === -1 ? id : id.slice(0, idx)\n}\n\n/** Cap and sort docs by type priority, mutates and truncates allDocs in place */\nfunction capDocs(allDocs: IndexDoc[], max: number, onProgress: (msg: string) => void): void {\n if (allDocs.length <= max)\n return\n const TYPE_PRIORITY: Record<string, number> = { doc: 0, issue: 1, discussion: 2, release: 3, source: 4, types: 5 }\n allDocs.sort((a, b) => {\n const ta = TYPE_PRIORITY[a.metadata?.type || 'doc'] ?? 3\n const tb = TYPE_PRIORITY[b.metadata?.type || 'doc'] ?? 3\n if (ta !== tb)\n return ta - tb\n return a.id.localeCompare(b.id)\n })\n onProgress(`Indexing capped at ${max}/${allDocs.length} docs (prioritized by type)`)\n allDocs.length = max\n}\n\n/** Index all resources into the search database, with incremental support */\nexport async function indexResources(opts: {\n packageName: string\n version: string\n cwd: string\n docsToIndex: IndexDoc[]\n features?: FeaturesConfig\n onProgress: (message: string) => void\n}): Promise<void> {\n const { packageName, version, cwd, onProgress } = opts\n const features = opts.features ?? readConfig().features ?? defaultFeatures\n\n if (!features.search)\n return\n\n const dbPath = getPackageDbPath(packageName, version)\n const dbExists = existsSync(dbPath)\n\n const allDocs = [...opts.docsToIndex]\n\n // Add entry files\n const pkgDir = resolvePkgDir(packageName, cwd, version)\n if (features.search && pkgDir) {\n onProgress('Scanning exports')\n const entryFiles = await resolveEntryFiles(pkgDir)\n for (const e of entryFiles) {\n allDocs.push({\n id: e.path,\n content: e.content,\n metadata: { package: packageName, source: `pkg/${e.path}`, type: e.type },\n })\n }\n }\n\n if (allDocs.length === 0)\n return\n\n capDocs(allDocs, MAX_INDEX_DOCS, onProgress)\n\n // Full build when no existing DB\n if (!dbExists) {\n onProgress(`Building search index (${allDocs.length} docs)`)\n try {\n await createIndex(allDocs, {\n dbPath,\n onProgress: ({ phase, current, total }) => {\n if (phase === 'storing') {\n const d = allDocs[current - 1]\n const type = d?.metadata?.type === 'source' || d?.metadata?.type === 'types' ? 'code' : (d?.metadata?.type || 'doc')\n onProgress(`Storing ${type} (${current}/${total})`)\n }\n else if (phase === 'embedding') {\n onProgress(`Creating embeddings (${current}/${total})`)\n }\n },\n })\n }\n catch (err) {\n if (err instanceof SearchDepsUnavailableError)\n onProgress('Search indexing skipped (native deps unavailable)')\n else\n throw err\n }\n return\n }\n\n // Incremental update: diff incoming docs against existing index\n let existingIds: string[]\n try {\n existingIds = await listIndexIds({ dbPath })\n }\n catch (err) {\n if (err instanceof SearchDepsUnavailableError) {\n onProgress('Search indexing skipped (native deps unavailable)')\n return\n }\n throw err\n }\n\n // Group existing chunk IDs by parent doc ID\n const existingParentIds = new Set(existingIds.map(parentDocId))\n const incomingIds = new Set(allDocs.map(d => d.id))\n\n // Docs to add: in incoming but not in existing index\n const newDocs = allDocs.filter(d => !existingParentIds.has(d.id))\n\n // Chunk IDs to remove: their parent doc is no longer in incoming set\n const removeIds = existingIds.filter(id => !incomingIds.has(parentDocId(id)))\n\n if (newDocs.length === 0 && removeIds.length === 0) {\n onProgress('Search index up to date')\n return\n }\n\n const parts: string[] = []\n if (newDocs.length > 0)\n parts.push(`+${newDocs.length} new`)\n if (removeIds.length > 0)\n parts.push(`-${removeIds.length} stale`)\n onProgress(`Updating search index (${parts.join(', ')})`)\n\n try {\n await createIndex(newDocs, {\n dbPath,\n removeIds,\n onProgress: ({ phase, current, total }) => {\n if (phase === 'storing') {\n const d = newDocs[current - 1]\n const type = d?.metadata?.type === 'source' || d?.metadata?.type === 'types' ? 'code' : (d?.metadata?.type || 'doc')\n onProgress(`Storing ${type} (${current}/${total})`)\n }\n else if (phase === 'embedding') {\n onProgress(`Creating embeddings (${current}/${total})`)\n }\n },\n })\n }\n catch (err) {\n if (err instanceof SearchDepsUnavailableError)\n onProgress('Search indexing skipped (native deps unavailable)')\n else\n throw err\n }\n}\n\n/**\n * Eject references: copy cached files as real files into references/ dir.\n * Used for portable skills (git repos, sharing). Replaces symlinks with copies.\n * Does NOT copy pkg files — those reference node_modules directly.\n */\nexport function ejectReferences(skillDir: string, packageName: string, cwd: string, version: string, docsType: string, features?: FeaturesConfig, repoInfo?: { owner: string, repo: string }): void {\n const f = features ?? readConfig().features ?? defaultFeatures\n const cacheDir = getCacheDir(packageName, version)\n const refsDir = join(skillDir, 'references')\n // Repo-level data source (falls back to package cache)\n const repoDir = repoInfo ? getRepoCacheDir(repoInfo.owner, repoInfo.repo) : cacheDir\n\n // Copy cached docs (skip pkg — eject is for portable sharing, pkg references node_modules)\n if (!hasShippedDocs(packageName, cwd, version) && docsType !== 'readme')\n copyCachedSubdir(cacheDir, refsDir, 'docs')\n\n if (f.issues)\n copyCachedSubdir(repoDir, refsDir, 'issues')\n if (f.discussions)\n copyCachedSubdir(repoDir, refsDir, 'discussions')\n if (f.releases)\n copyCachedSubdir(repoDir, refsDir, 'releases')\n}\n\n/** Recursively copy a cached subdirectory into the references dir */\nfunction copyCachedSubdir(cacheDir: string, refsDir: string, subdir: string): void {\n const srcDir = join(cacheDir, subdir)\n if (!existsSync(srcDir))\n return\n\n const destDir = join(refsDir, subdir)\n mkdirSync(destDir, { recursive: true })\n\n function walk(dir: string, rel: string) {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n const srcPath = join(dir, entry.name)\n const destPath = join(destDir, rel ? `${rel}/${entry.name}` : entry.name)\n if (entry.isDirectory()) {\n mkdirSync(destPath, { recursive: true })\n walk(srcPath, rel ? `${rel}/${entry.name}` : entry.name)\n }\n else {\n copyFileSync(srcPath, destPath)\n }\n }\n }\n\n walk(srcDir, '')\n}\n\n// ── Shared UI + LLM functions (used by sync.ts, sync-git.ts, sync-parallel.ts, etc.) ──\n\n/**\n * Check if .gitignore has `.skilld` entry.\n * If missing, prompt to add it. Skipped for global installs.\n */\nexport async function ensureGitignore(skillsDir: string, cwd: string, isGlobal: boolean): Promise<void> {\n if (isGlobal)\n return\n\n const gitignorePath = join(cwd, '.gitignore')\n const pattern = '.skilld'\n\n // Check if already ignored\n if (existsSync(gitignorePath)) {\n const content = readFileSync(gitignorePath, 'utf-8')\n if (content.split('\\n').some(line => line.trim() === pattern))\n return\n }\n\n // Non-interactive: auto-add (default is true anyway)\n if (!isInteractive()) {\n const entry = `\\n# Skilld references (recreated by \\`skilld install\\`)\\n${pattern}\\n`\n if (existsSync(gitignorePath)) {\n const existing = readFileSync(gitignorePath, 'utf-8')\n const separator = existing.endsWith('\\n') ? '' : '\\n'\n appendFileSync(gitignorePath, `${separator}${entry}`)\n }\n else {\n writeFileSync(gitignorePath, entry)\n }\n return\n }\n\n // Show guidance\n const relSkillsDir = relative(cwd, skillsDir) || '.'\n p.log.info(\n `\\x1B[1mGit guidance:\\x1B[0m\\n`\n + ` \\x1B[32m✓\\x1B[0m Commit: \\x1B[36m${relSkillsDir}/*/SKILL.md\\x1B[0m\\n`\n + ` \\x1B[32m✓\\x1B[0m Commit: \\x1B[36m${relSkillsDir}/skilld-lock.yaml\\x1B[0m\\n`\n + ` \\x1B[31m✗\\x1B[0m Ignore: \\x1B[36m${pattern}\\x1B[0m \\x1B[90m(recreated by \\`skilld install\\`)\\x1B[0m`,\n )\n\n const add = await p.confirm({\n message: `Add \\`${pattern}\\` to .gitignore?`,\n initialValue: true,\n })\n\n if (p.isCancel(add) || !add)\n return\n\n const entry = `\\n# Skilld references (recreated by \\`skilld install\\`)\\n${pattern}\\n`\n if (existsSync(gitignorePath)) {\n const existing = readFileSync(gitignorePath, 'utf-8')\n const separator = existing.endsWith('\\n') ? '' : '\\n'\n appendFileSync(gitignorePath, `${separator}${entry}`)\n }\n else {\n writeFileSync(gitignorePath, entry)\n }\n\n p.log.success('Updated .gitignore')\n}\n\nexport const SKILLD_MARKER_START = '<!-- skilld -->'\nexport const SKILLD_MARKER_END = '<!-- /skilld -->'\n\nconst DEFAULT_SKILL_HINT = 'Before modifying code, evaluate each installed skill against the current task.\\nFor each skill, determine YES/NO relevance and invoke all YES skills before proceeding.'\n\nfunction getSkillInstructions(agent: AgentType): string {\n const hint = agents[agent].skillActivationHint || DEFAULT_SKILL_HINT\n return `${SKILLD_MARKER_START}\\n${hint}\\n${SKILLD_MARKER_END}`\n}\n\nfunction getMdcSkillInstructions(agent: AgentType): string {\n const hint = agents[agent].skillActivationHint || DEFAULT_SKILL_HINT\n return `---\\ndescription: \"Activates installed skilld skills before code changes\"\\nalwaysApply: true\\n---\\n\\n${hint}`\n}\n\n/**\n * Check if agent instruction file has skilld skill-activation snippet.\n * If missing, prompt to add it. Skipped for global installs or agents without an instructionFile.\n */\nexport async function ensureAgentInstructions(agent: AgentType, cwd: string, isGlobal: boolean): Promise<void> {\n if (isGlobal)\n return\n\n const agentConfig = agents[agent]\n if (!agentConfig.instructionFile)\n return\n\n const filePath = join(cwd, agentConfig.instructionFile)\n const isMdc = agentConfig.instructionFile.endsWith('.mdc')\n\n // MDC format: dedicated file, no markers needed\n if (isMdc) {\n if (existsSync(filePath))\n return\n\n const content = `${getMdcSkillInstructions(agent)}\\n`\n\n if (!isInteractive()) {\n mkdirSync(join(filePath, '..'), { recursive: true })\n writeFileSync(filePath, content)\n return\n }\n\n p.note(\n `This tells your agent to check installed skills before making\\n`\n + `code changes. Without it, skills are available but may not\\n`\n + `activate automatically.\\n`\n + `\\n`\n + `\\x1B[90m${getMdcSkillInstructions(agent)}\\x1B[0m`,\n `Create ${agentConfig.instructionFile}`,\n )\n\n const add = await p.confirm({\n message: `Create ${agentConfig.instructionFile} with skill activation instructions?`,\n initialValue: true,\n })\n\n if (p.isCancel(add) || !add)\n return\n\n mkdirSync(join(filePath, '..'), { recursive: true })\n writeFileSync(filePath, content)\n p.log.success(`Created ${agentConfig.instructionFile}`)\n return\n }\n\n // Check if marker already present\n if (existsSync(filePath)) {\n const content = readFileSync(filePath, 'utf-8')\n if (content.includes(SKILLD_MARKER_START))\n return\n }\n\n // Non-interactive: auto-add\n if (!isInteractive()) {\n if (existsSync(filePath)) {\n const existing = readFileSync(filePath, 'utf-8')\n const separator = existing.endsWith('\\n') ? '' : '\\n'\n appendFileSync(filePath, `${separator}\\n${getSkillInstructions(agent)}\\n`)\n }\n else {\n writeFileSync(filePath, `${getSkillInstructions(agent)}\\n`)\n }\n return\n }\n\n const fileExists = existsSync(filePath)\n const action = fileExists ? 'Append to' : 'Create'\n p.note(\n `This tells your agent to check installed skills before making\\n`\n + `code changes. Without it, skills are available but may not\\n`\n + `activate automatically.\\n`\n + `\\n`\n + `\\x1B[90m${getSkillInstructions(agent).replace(/\\n/g, '\\n')}\\x1B[0m`,\n `${action} ${agentConfig.instructionFile}`,\n )\n\n const add = await p.confirm({\n message: `${action} ${agentConfig.instructionFile} with skill activation instructions?`,\n initialValue: true,\n })\n\n if (p.isCancel(add) || !add)\n return\n\n if (existsSync(filePath)) {\n const existing = readFileSync(filePath, 'utf-8')\n const separator = existing.endsWith('\\n') ? '' : '\\n'\n appendFileSync(filePath, `${separator}\\n${getSkillInstructions(agent)}\\n`)\n }\n else {\n writeFileSync(filePath, `${getSkillInstructions(agent)}\\n`)\n }\n\n p.log.success(`Updated ${agentConfig.instructionFile}`)\n}\n\n/** Select LLM model for SKILL.md generation (independent of target agent) */\nexport async function selectModel(skipPrompt: boolean): Promise<OptimizeModel | null> {\n const config = readConfig()\n const available = await getAvailableModels()\n\n if (available.length === 0) {\n p.log.warn(NO_MODELS_MESSAGE)\n return null\n }\n\n // Use config model if set and available (only when not prompting)\n if (skipPrompt) {\n if (config.model && available.some(m => m.id === config.model))\n return config.model\n // Warn if configured model is unavailable (auth revoked, CLI uninstalled, etc.)\n if (config.model)\n p.log.warn(`Configured model \\x1B[36m${config.model}\\x1B[0m is unavailable — using auto-selected fallback`)\n return available.find(m => m.recommended)?.id ?? available[0]!.id\n }\n\n // Smart provider → model (skips provider step when only 1 provider)\n const choice = await pickModel(available)\n if (!choice)\n return null\n\n // Remember choice for next time\n updateConfig({ model: choice as OptimizeModel })\n\n return choice as OptimizeModel\n}\n\n/** Default sections when model is pre-set (non-interactive) */\nexport const DEFAULT_SECTIONS: SkillSection[] = ['best-practices', 'api-changes']\n\nexport async function selectSkillSections(message = 'Enhance SKILL.md'): Promise<{ sections: SkillSection[], customPrompt?: CustomPrompt, cancelled: boolean }> {\n p.log.info('Budgets adapt to package release density.')\n const selected = await p.multiselect({\n message,\n options: [\n { label: 'API changes', value: 'api-changes' as SkillSection, hint: 'new/deprecated APIs from version history' },\n { label: 'Best practices', value: 'best-practices' as SkillSection, hint: 'gotchas, pitfalls, patterns' },\n { label: 'Custom section', value: 'custom' as SkillSection, hint: 'add your own section' },\n ],\n initialValues: DEFAULT_SECTIONS,\n required: false,\n })\n\n if (p.isCancel(selected))\n return { sections: [], cancelled: true }\n\n const sections = selected as SkillSection[]\n if (sections.length === 0)\n return { sections: [], cancelled: false }\n\n // Show per-section budget based on selection count\n if (sections.length > 1) {\n const n = sections.length\n const budgetLines: string[] = []\n for (const s of sections) {\n switch (s) {\n case 'api-changes':\n budgetLines.push(` API changes ${maxItems(6, 12, n)}–${maxItems(6, Math.round(12 * 1.6), n)} items (adapts to release churn)`)\n break\n case 'best-practices':\n budgetLines.push(` Best practices ${maxItems(4, 10, n)}–${maxItems(4, Math.round(10 * 1.3), n)} items`)\n break\n case 'custom':\n budgetLines.push(` Custom ≤${maxLines(50, 80, n)} lines`)\n break\n }\n }\n p.log.info(`Budget (${n} sections):\\n${budgetLines.join('\\n')}`)\n }\n\n let customPrompt: CustomPrompt | undefined\n if (sections.includes('custom')) {\n const heading = await p.text({\n message: 'Section heading',\n placeholder: 'e.g. \"Migration from v2\" or \"SSR Patterns\"',\n })\n if (p.isCancel(heading))\n return { sections: [], cancelled: true }\n\n const body = await p.text({\n message: 'Instructions for this section',\n placeholder: 'e.g. \"Document breaking changes and migration steps from v2 to v3\"',\n })\n if (p.isCancel(body))\n return { sections: [], cancelled: true }\n\n customPrompt = { heading: heading as string, body: body as string }\n }\n\n return { sections, customPrompt, cancelled: false }\n}\n\nexport interface LlmConfig {\n model: OptimizeModel\n sections: SkillSection[]\n customPrompt?: CustomPrompt\n promptOnly?: boolean\n}\n\n/** Context about the existing skill when running an update (not a fresh add). */\nexport interface UpdateContext {\n oldVersion?: string\n newVersion?: string\n syncedAt?: string\n /** Whether the existing SKILL.md was LLM-enhanced (has generated_by in frontmatter). */\n wasEnhanced: boolean\n /** Pre-computed bump type (used by parallel sync to pass the max across packages). */\n bumpType?: string\n}\n\n/**\n * Resolve sections + model for LLM enhancement.\n * If presetModel is provided, uses DEFAULT_SECTIONS without prompting.\n * Returns null if cancelled or no sections/model selected.\n */\nexport async function selectLlmConfig(presetModel?: OptimizeModel, message?: string, updateCtx?: UpdateContext): Promise<LlmConfig | null> {\n if (presetModel) {\n // Validate preset model is still available (env/OAuth may have changed)\n const available = await getAvailableModels()\n if (available.some(m => m.id === presetModel))\n return { model: presetModel, sections: DEFAULT_SECTIONS }\n // Fall through to interactive selection if preset unavailable\n if (!isInteractive())\n return null\n }\n\n // Non-interactive (CI, agent, no TTY): skip generation unless model explicitly provided\n if (!isInteractive()) {\n return null\n }\n\n // Resolve default model (configured or recommended) without prompting\n const config = readConfig()\n const available = await getAvailableModels()\n\n if (available.length === 0) {\n p.log.warn(NO_MODELS_MESSAGE)\n return null\n }\n\n // Inline the skipPrompt logic from selectModel to avoid a second getAvailableModels() call\n let defaultModel: OptimizeModel\n if (config.model && available.some(m => m.id === config.model)) {\n defaultModel = config.model\n }\n else {\n if (config.model)\n p.log.warn(`Configured model \\x1B[36m${config.model}\\x1B[0m is unavailable — using auto-selected fallback`)\n defaultModel = (available.find(m => m.recommended)?.id ?? available[0]!.id) as OptimizeModel\n }\n\n const defaultModelName = getModelName(defaultModel)\n const defaultModelInfo = available.find(m => m.id === defaultModel)\n const providerHint = defaultModelInfo?.providerName ?? ''\n const sourceHint = config.model === defaultModel ? 'configured' : 'recommended'\n const defaultHint = providerHint ? `${providerHint} · ${sourceHint}` : sourceHint\n\n // Build update context hint for the prompt message\n let enhanceMessage = message ? `${message}?` : 'Enhance SKILL.md?'\n let defaultToSkip = false\n if (updateCtx) {\n const diff = updateCtx.bumpType\n ?? (updateCtx.oldVersion && updateCtx.newVersion ? semverDiff(updateCtx.oldVersion, updateCtx.newVersion) : null)\n const isSmallBump = diff === 'patch' || diff === 'prerelease' || diff === 'prepatch' || diff === 'preminor' || diff === 'premajor'\n\n const ageParts: string[] = []\n if (diff)\n ageParts.push(diff)\n if (updateCtx.syncedAt) {\n const syncedAtMs = new Date(updateCtx.syncedAt).getTime()\n if (Number.isFinite(syncedAtMs)) {\n const days = Math.floor((Date.now() - syncedAtMs) / 86_400_000)\n ageParts.push(days === 0 ? 'today' : days === 1 ? '1d ago' : `${days}d ago`)\n }\n }\n if (updateCtx.wasEnhanced)\n ageParts.push('LLM-enhanced')\n\n const versionHint = updateCtx.oldVersion && updateCtx.newVersion\n ? `${updateCtx.oldVersion} → ${updateCtx.newVersion}`\n : null\n const hint = [versionHint, ...ageParts].filter(Boolean).join(' · ')\n if (hint)\n enhanceMessage = `Enhance SKILL.md? \\x1B[90m(${hint})\\x1B[0m`\n\n // Default to Skip for patch/prerelease bumps on already-enhanced skills\n if (updateCtx.wasEnhanced && isSmallBump)\n defaultToSkip = true\n }\n\n const choice = await p.select({\n message: enhanceMessage,\n options: [\n { label: defaultModelName, value: 'default' as const, hint: defaultHint },\n { label: 'Different model', value: 'pick' as const, hint: 'choose another enhancement model' },\n { label: 'Prompt only', value: 'prompt' as const, hint: 'write prompts for manual use' },\n { label: 'Skip', value: 'skip' as const, hint: 'base skill with docs, issues, and types' },\n ],\n ...(defaultToSkip ? { initialValue: 'skip' as const } : {}),\n })\n\n if (p.isCancel(choice))\n return null\n\n if (choice === 'skip')\n return null\n\n if (choice === 'prompt') {\n const { sections, customPrompt, cancelled } = await selectSkillSections(\n message ? `${message} (prompt only)` : 'Select sections for prompt generation',\n )\n if (cancelled || sections.length === 0)\n return null\n // model is unused for prompt-only but required by type — use defaultModel as placeholder\n return { model: defaultModel, sections, customPrompt, promptOnly: true }\n }\n\n let model: OptimizeModel\n if (choice === 'pick') {\n const picked = await pickModel(available)\n if (!picked)\n return null\n updateConfig({ model: picked as OptimizeModel })\n model = picked as OptimizeModel\n }\n else {\n model = defaultModel\n }\n if (!model)\n return null\n\n const modelName = getModelName(model)\n const { sections, customPrompt, cancelled } = await selectSkillSections(\n message ? `${message} (${modelName})` : `Enhance SKILL.md with ${modelName}`,\n )\n\n if (cancelled || sections.length === 0)\n return null\n\n return { model, sections, customPrompt }\n}\n\nexport interface EnhanceOptions {\n packageName: string\n version: string\n skillDir: string\n dirName?: string\n model: OptimizeModel\n resolved: { repoUrl?: string, llmsUrl?: string, releasedAt?: string, docsUrl?: string, gitRef?: string, dependencies?: Record<string, string>, distTags?: Record<string, { version: string, releasedAt?: string }> }\n relatedSkills: string[]\n hasIssues: boolean\n hasDiscussions: boolean\n hasReleases: boolean\n hasChangelog: string | false\n docsType: 'llms.txt' | 'readme' | 'docs'\n hasShippedDocs: boolean\n pkgFiles: string[]\n force?: boolean\n debug?: boolean\n sections?: SkillSection[]\n customPrompt?: CustomPrompt\n packages?: Array<{ name: string }>\n features?: FeaturesConfig\n eject?: boolean\n overheadLines?: number\n}\n\nexport async function enhanceSkillWithLLM(opts: EnhanceOptions): Promise<void> {\n const { packageName, version, skillDir, dirName, model, resolved, relatedSkills, hasIssues, hasDiscussions, hasReleases, hasChangelog, docsType, hasShippedDocs: shippedDocs, pkgFiles, force, debug, sections, customPrompt, packages, features, eject, overheadLines } = opts\n\n const effectiveFeatures = features\n\n const llmLog = p.taskLog({ title: `Agent exploring ${packageName}`, limit: 3 })\n const docFiles = listReferenceFiles(skillDir)\n const hasGithub = hasIssues || hasDiscussions\n const { optimized, wasOptimized, usage, cost, warnings, error, debugLogsDir } = await optimizeDocs({\n packageName,\n skillDir,\n model,\n version,\n hasGithub,\n hasReleases,\n hasChangelog,\n docFiles,\n docsType,\n hasShippedDocs: shippedDocs,\n noCache: force,\n debug,\n sections,\n customPrompt,\n features: effectiveFeatures,\n pkgFiles,\n overheadLines,\n onProgress: createToolProgress(llmLog),\n })\n\n if (wasOptimized) {\n const costParts: string[] = []\n if (usage) {\n const totalK = Math.round(usage.totalTokens / 1000)\n costParts.push(`${totalK}k tokens`)\n }\n if (cost)\n costParts.push(`$${cost.toFixed(2)}`)\n const costSuffix = costParts.length > 0 ? ` (${costParts.join(', ')})` : ''\n llmLog.success(`Generated best practices${costSuffix}`)\n if (debugLogsDir)\n p.log.info(`Debug logs: ${relative(process.cwd(), debugLogsDir)}`)\n if (error)\n p.log.warn(`\\x1B[33mPartial failure: ${error}\\x1B[0m`)\n if (warnings?.length) {\n for (const w of warnings)\n p.log.warn(`\\x1B[33m${w}\\x1B[0m`)\n }\n const skillMd = generateSkillMd({\n name: packageName,\n version,\n releasedAt: resolved.releasedAt,\n\n distTags: resolved.distTags,\n body: optimized,\n relatedSkills,\n hasIssues,\n hasDiscussions,\n hasReleases,\n hasChangelog,\n docsType,\n hasShippedDocs: shippedDocs,\n pkgFiles,\n generatedBy: getModelLabel(model),\n dirName,\n packages,\n repoUrl: resolved.repoUrl,\n features,\n eject,\n })\n writeFileSync(join(skillDir, 'SKILL.md'), skillMd)\n }\n else {\n if (error && /\\b429\\b|rate.?limit|exhausted.*capacity|quota.*reset/i.test(error))\n llmLog.error(`Rate limited by LLM provider. Try again shortly or use a different model via \\`skilld config\\``)\n else\n llmLog.error(`Enhancement failed${error ? `: ${error}` : ''}`)\n }\n}\n\nexport interface WritePromptFilesOptions {\n packageName: string\n skillDir: string\n version: string\n hasIssues: boolean\n hasDiscussions: boolean\n hasReleases: boolean\n hasChangelog: string | false\n docsType: 'llms.txt' | 'readme' | 'docs'\n hasShippedDocs: boolean\n pkgFiles: string[]\n sections: SkillSection[]\n customPrompt?: CustomPrompt\n features?: FeaturesConfig\n overheadLines?: number\n}\n\n/**\n * Build and write PROMPT_*.md files for manual LLM use.\n * Returns the list of sections that had prompts written.\n */\nexport function writePromptFiles(opts: WritePromptFilesOptions): SkillSection[] {\n const { skillDir, sections, customPrompt, features } = opts\n const docFiles = listReferenceFiles(skillDir)\n const prompts = buildAllSectionPrompts({\n packageName: opts.packageName,\n skillDir,\n version: opts.version,\n hasIssues: opts.hasIssues,\n hasDiscussions: opts.hasDiscussions,\n hasReleases: opts.hasReleases,\n hasChangelog: opts.hasChangelog,\n docFiles,\n docsType: opts.docsType,\n hasShippedDocs: opts.hasShippedDocs,\n pkgFiles: opts.pkgFiles,\n customPrompt,\n features,\n overheadLines: opts.overheadLines,\n sections,\n })\n\n const skilldDir = join(skillDir, '.skilld')\n mkdirSync(skilldDir, { recursive: true })\n\n for (const [section, prompt] of prompts)\n writeFileSync(join(skilldDir, `PROMPT_${section}.md`), prompt)\n\n const written = [...prompts.keys()]\n if (written.length > 0) {\n const relDir = relative(process.cwd(), skillDir)\n const promptFiles = written.map(s => `PROMPT_${s}.md`).join(', ')\n const outputFileList = written.map(s => SECTION_OUTPUT_FILES[s]).join(', ')\n p.log.info(`Prompt files written to ${relDir}/.skilld/\\n\\x1B[2m\\x1B[3m Read each prompt file (${promptFiles}) in ${relDir}/.skilld/, read the\\n referenced files, then write your output to the matching file (${outputFileList}).\\n When done, run: skilld assemble\\x1B[0m`)\n }\n\n return written\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AA4EA,MAAM,iBAAiB;AAEvB,MAAa,sBAAmD;CAC9D,OAAO;CACP,eAAe;CACf,eAAe;CACf,iBAAiB;CACjB,UAAU;CACV,YAAY;CACZ,SAAS;CACT,SAAS;CACV;;AAGD,SAAgB,kBAAkB,MAAiD;CACjF,MAAM,aAAa,KAAK,MAAM,4BAA4B;AAC1D,KAAI,WACF,QAAO;EAAE,MAAM;EAAS,QAAQ,OAAO,WAAW,GAAA;EAAK;CACzD,MAAM,kBAAkB,KAAK,MAAM,sCAAsC;AACzE,KAAI,gBACF,QAAO;EAAE,MAAM;EAAc,QAAQ,OAAO,gBAAgB,GAAA;EAAK;AACnE,KAAI,KAAK,WAAW,YAAY,CAC9B,QAAO,EAAE,MAAM,WAAW;AAC5B,QAAO,EAAE,MAAM,OAAO;;AAGxB,eAAsB,kBAAkB,aAAqB,WAAsC;CACjG,MAAM,UAAoB,EAAE;CAE5B,MAAM,UAAU,MAAM,gBAAgB,YAAY;AAClD,KAAI,CAAC,SAAS,aACZ,QAAO;CAET,MAAM,OAAO,IAAI,IAAI,OAAO,KAAK,QAAQ,aAAa,CAAC;AAEvD,KAAI,CAAC,WAAW,UAAU,CACxB,QAAO;CAGT,MAAM,OAAO,SAAS,UAAU;CAChC,MAAM,+BAAe,IAAI,KAAqB;AAC9C,KAAI,KACF,MAAK,MAAM,CAAC,SAAS,SAAS,OAAO,QAAQ,KAAK,OAAO,EAAE;AACzD,MAAI,KAAK,YACP,cAAa,IAAI,KAAK,aAAa,QAAQ;AAC7C,OAAK,MAAM,OAAO,cAAc,KAAK,SAAS,CAC5C,cAAa,IAAI,IAAI,MAAM,QAAQ;;CAIzC,MAAM,kBAAkB,YAAY,UAAU;CAC9C,MAAM,eAAe,IAAI,IAAI,gBAAgB;AAE7C,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,UAAU,aAAa,IAAI,IAAI;AACrC,MAAI,WAAW,aAAa,IAAI,QAAQ,CACtC,SAAQ,KAAK,QAAQ;;AAGzB,QAAO,QAAQ,MAAM,GAAG,EAAE;;;AAI5B,SAAgB,gBAAgB,aAAqB,SAAiB,UAAkD;AACtH,YAAW,aAAa,QAAQ;CAChC,MAAM,eAAe,iBAAiB,aAAa,QAAQ;AAC3D,KAAI,WAAW,aAAa,CAC1B,QAAO,cAAc;EAAE,WAAW;EAAM,OAAO;EAAM,CAAC;AAExD,KAAI,UAAU;EACZ,MAAM,UAAU,gBAAgB,SAAS,OAAO,SAAS,KAAK;AAC9D,MAAI,WAAW,QAAQ,CACrB,QAAO,SAAS;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;;;;AAKvD,SAAgB,kBAAkB,UAAkB,aAAqB,KAAa,SAAiB,UAAkB,eAA2D,UAA2B,UAAkD;CAC/P,MAAM,IAAI,YAAY,YAAY,CAAC,YAAY;AAC/C,KAAI;AACF,UAAQ,UAAU,aAAa,KAAK,QAAQ;AAC5C,eAAa,UAAU,aAAa,KAAK,QAAQ;AACjD,MAAI,CAAC,eAAe,aAAa,KAAK,QAAQ,IAAI,aAAa,SAC7D,eAAc,UAAU,aAAa,SAAS,OAAO;AAGvD,MAAI,EAAE,OACJ,KAAI,SACF,mBAAkB,UAAU,SAAS,OAAO,SAAS,MAAM,SAAS;MAEpE,eAAc,UAAU,aAAa,SAAS,SAAS;AAE3D,MAAI,EAAE,YACJ,KAAI,SACF,mBAAkB,UAAU,SAAS,OAAO,SAAS,MAAM,cAAc;MAEzE,eAAc,UAAU,aAAa,SAAS,cAAc;AAEhE,MAAI,EAAE,SACJ,KAAI,SACF,mBAAkB,UAAU,SAAS,OAAO,SAAS,MAAM,WAAW;MAEtE,eAAc,UAAU,aAAa,SAAS,WAAW;AAE7D,gBAAc,UAAU,aAAa,SAAS,WAAW;AAEzD,MAAI;QACG,MAAM,OAAO,cAChB,KAAI,IAAI,SAAS,YACf,cAAa,UAAU,IAAI,MAAM,KAAK,IAAI,QAAQ;;SAIpD;;;AAMR,SAAgB,eAAe,aAAqB,SAAiB,SAAkB,SAAoF;CACzK,MAAM,WAAW,YAAY,aAAa,QAAQ;AAClD,KAAI,WAAW,KAAK,UAAU,QAAQ,WAAW,CAAC,IAAI,WAAW,KAAK,UAAU,QAAQ,QAAQ,CAAC,CAC/F,QAAO;EACL,UAAU;EACV,WAAW,UAAU,GAAG,QAAQ,SAAS,QAAQ,SAAS;EAC3D;AAEH,KAAI,WAAW,KAAK,UAAU,WAAW,CAAC,CACxC,QAAO;EACL,UAAU;EACV,WAAW,WAAW;EACvB;AAEH,KAAI,WAAW,KAAK,UAAU,QAAQ,YAAY,CAAC,CACjD,QAAO,EAAE,UAAU,UAAU;AAE/B,QAAO,EAAE,UAAU,UAAU;;;AAS/B,SAAgB,oBACd,aACA,SACA,KACA,OACA,QAC4B;CAC5B,MAAM,gBAAgB,iBAAiB,aAAa,KAAK,QAAQ;AACjE,KAAI,cAAc,WAAW,EAC3B,QAAO;CAET,MAAM,UAAU,eAAe,KAAK,OAAO,OAAO;AAClD,WAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AAEvC,MAAK,MAAM,WAAW,eAAe;AACnC,mBAAiB,SAAS,QAAQ,WAAW,QAAQ,SAAS;AAC9D,YAAU,SAAS,QAAQ,WAAW;GACpC;GACA;GACA,QAAQ;GACR,2BAAU,IAAI,MAAM,EAAC,aAAa,CAAC,MAAM,IAAI,CAAC;GAC9C,WAAW;GACZ,CAAC;;AAGJ,KAAI,CAAC,OACH,iBAAgB,IAAI;AAEtB,QAAO;EAAE,SAAS;EAAe;EAAS;;;AAI5C,SAAgB,eAAe,KAAa,OAAkB,QAAyB;AACrF,KAAI,OAEF,QADoBA,QAAO,OACR;CAErB,MAAM,SAAS,mBAAmB,IAAI;AACtC,KAAI,OACF,QAAO;CACT,MAAM,cAAcA,QAAO;AAC3B,QAAO,KAAK,KAAK,YAAY,UAAU;;;AAIzC,eAAsB,gBAAgB,aAAqB,KAA8C;CACvG,MAAM,SAAS,oBAAoB,KAAK,KAAK,eAAe,CAAC;AAC7D,KAAI,CAAC,OACH,QAAO;CAET,MAAM,MAAM,OAAO;CAEnB,MAAM,aADO;EAAE,GAAG,IAAI;EAAwC,GAAG,IAAI;EAA2C,CACxF;AAExB,KAAI,CAAC,YAAY,WAAW,QAAQ,CAClC,QAAO;AAGT,QAAO,wBADW,QAAQ,KAAK,WAAW,MAAM,EAAE,CAAC,CACV;;;AAI3C,SAAgB,gBAAgB,QAAuB,UAAmC;AACxF,KAAI,QAAQ;EACV,MAAM,QAAQ,CAAC,gBAAgB,eAAe,CAAC,MAAK,MAAK,WAAW,KAAK,QAAQ,EAAE,CAAC,CAAC;AACrF,MAAI,MACF,QAAO,OAAO;;AAGlB,KAAI,YAAY,WAAW,KAAK,UAAU,YAAY,eAAe,CAAC,CACpE,QAAO;AACT,QAAO;;;AA0BT,eAAsB,uBAAuB,MASpB;CACvB,MAAM,EAAE,aAAa,UAAU,SAAS,eAAe;CACvD,MAAM,WAAW,KAAK,YAAY,YAAY,CAAC,YAAY;CAG3D,MAAM,mBAAmB,KAAK,YACzB,SAAS,YACT,eAAe,aAAa,SAAS,SAAS,SAAS,SAAS,QAAQ,CAAC,aAAa;CAC3F,MAAM,WAAW,KAAK,YAAY,CAAC;CACnC,IAAI,YAAoB,SAAS,aAAa;CAC9C,IAAI,WAA2C;CAC/C,MAAM,cAA0B,EAAE;CAClC,MAAM,WAAqB,EAAE;AAC7B,KAAI,iBACF,UAAS,KAAK,sBAAsB,SAAS,SAAS,wCAAwC;AAEhG,KAAI,CAAC,UAAU;EACb,MAAM,aAAuD,EAAE;EAC/D,MAAM,kBAAkB,SAAiB,oBAAoB,CAAC,KAAK,EAAE,YAAY,CAAC,SAAS;AAG3F,MAAI,SAAS,cAAc,SAAS,SAAS;GAC3C,MAAM,KAAK,eAAe,SAAS,QAAQ;AAC3C,OAAI,IAAI;AACN,eAAW,oBAAoB;IAC/B,MAAM,UAAU,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,SAAS,YAAY;AAC3E,QAAI,SAAS,SACX,UAAS,KAAK,qBAAqB,QAAQ,IAAI,6BAA6B,QAAQ,GAAG;AAEzF,QAAI,WAAW,QAAQ,MAAM,SAAS,GAAG;KACvC,MAAM,aAAa;KACnB,MAAM,UAA2D,EAAE;AAEnE,UAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,MAAM,QAAQ,KAAK,YAAY;MACzD,MAAM,QAAQ,QAAQ,MAAM,MAAM,GAAG,IAAI,WAAW;AACpD,iBAAW,oBAAoB,KAAK,IAAI,IAAI,YAAY,QAAQ,MAAM,OAAO,CAAC,GAAG,QAAQ,MAAM,OAAO,QAAQ,QAAQ,MAAM;MAC5H,MAAM,eAAe,MAAM,QAAQ,IACjC,MAAM,IAAI,OAAO,SAAS;OAExB,MAAM,UAAU,MAAM,eADV,GAAG,QAAQ,QAAQ,GAAG,OACO;AACzC,WAAI,CAAC,QACH,QAAO;AACT,cAAO;QAAE;QAAM;QAAS;QACxB,CACH;AACD,cAAQ,KAAK,GAAG,aAAa;;AAG/B,UAAK,MAAM,KAAK,QACd,KAAI,GAAG;MACL,MAAM,WAAW,QAAQ,aAAa,EAAE,KAAK,QAAQ,QAAQ,YAAY,GAAG,GAAG,EAAE;MACjF,MAAM,YAAY,SAAS,WAAW,QAAQ,GAAG,WAAW,QAAQ;AACpE,iBAAW,KAAK;OAAE,MAAM;OAAW,SAAS,EAAE;OAAS,CAAC;AACxD,kBAAY,KAAK;OACf,IAAI;OACJ,SAAS,EAAE;OACX,UAAU;QAAE,SAAS;QAAa,QAAQ;QAAW,MAAM;;OAC5D,CAAC;;KAIN,MAAM,aAAa,QAAQ,OAAO,QAAQ,CAAC;AAC3C,SAAI,aAAa,EAEf,KAAI,iBAAiB,WAAW,IAAI,SAAS,SAAS;AACpD,iBAAW,qBAAqB,WAAW,0BAA0B;AACrE,iBAAW,SAAS;AACpB,kBAAY,SAAS;YAElB;AACH,kBAAY,GAAG,SAAS,QAAQ,QAAQ,QAAQ,IAAI;AACpD,iBAAW;AACX,mBAAa,aAAa,SAAS,WAAW;AAG9C,UAAI,SAAS,SAAS;AACpB,kBAAW,iCAAiC;OAC5C,MAAM,cAAc,MAAM,aAAa,SAAS,QAAQ;AACxD,WAAI,aAAa;QACf,MAAM,UAAU,SAAS,WAAW,IAAI,IAAI,SAAS,QAAQ,CAAC;QAC9D,MAAM,gBAA0D,CAC9D;SAAE,MAAM;SAAY,SAAS,mBAAmB,YAAY,KAAK,QAAA;SAAU,CAC5E;AACD,YAAI,YAAY,MAAM,SAAS,GAAG;AAChC,oBAAW,eAAe,YAAY,MAAM,OAAO,qBAAqB;SACxE,MAAM,OAAO,MAAM,iBAAiB,aAAa,UAAU,KAAK,MAAM,UAAU;AAC9E,qBAAW,iCAAiC,OAAO,EAAE,GAAG,QAAQ;WAChE;AACF,cAAK,MAAM,OAAO,MAAM;AACtB,cAAI,CAAC,eAAe,IAAI,IAAI,CAC1B;UACF,MAAM,YAAY,IAAI,IAAI,WAAW,IAAI,GAAG,IAAI,IAAI,MAAM,EAAE,GAAG,IAAI;AACnE,wBAAc,KAAK;WAAE,MAAM,KAAK,aAAa,GAAG,UAAU,MAAM,IAAI,CAAC;WAAE,SAAS,IAAI;WAAS,CAAC;;;AAGlG,qBAAa,aAAa,SAAS,cAAc;;;;;;;AAU/D,MAAI,SAAS,YAAY,WAAW,WAAW,GAAG;AAChD,cAAW,mBAAmB;GAC9B,MAAM,cAAc,MAAM,iBAAiB,SAAS,UAAU,WAAW,CAAC,OAAO,QAAQ;AACvF,aAAS,KAAK,oBAAoB,SAAS,SAAS,IAAI,KAAK,WAAW,MAAM;AAC9E,WAAO,EAAE;KACT;AACF,OAAI,YAAY,WAAW,KAAK,SAAS,SACvC,UAAS,KAAK,8BAA8B,SAAS,WAAW;AAElE,OAAI,YAAY,SAAS,GAAG;AAC1B,SAAK,MAAM,OAAO,aAAa;AAC7B,SAAI,CAAC,eAAe,IAAI,KAAK,CAC3B;AACF,gBAAW,KAAK,IAAI;AACpB,iBAAY,KAAK;MACf,IAAI,IAAI;MACR,SAAS,IAAI;MACb,UAAU;OAAE,SAAS;OAAa,QAAQ,IAAI;OAAM,MAAM;;MAC3D,CAAC;;AAEJ,gBAAY,SAAS;AACrB,eAAW;AACX,iBAAa,aAAa,SAAS,WAAW;;;AAKlD,MAAI,SAAS,WAAW,WAAW,WAAW,GAAG;AAC/C,cAAW,oBAAoB;GAC/B,MAAM,cAAc,MAAM,aAAa,SAAS,QAAQ;AACxD,OAAI,aAAa;AACf,gBAAY,SAAS;AACrB,eAAW;IACX,MAAM,UAAU,SAAS,WAAW,IAAI,IAAI,SAAS,QAAQ,CAAC;AAC9D,eAAW,KAAK;KAAE,MAAM;KAAY,SAAS,mBAAmB,YAAY,KAAK,QAAA;KAAU,CAAC;AAE5F,QAAI,YAAY,MAAM,SAAS,GAAG;AAChC,gBAAW,eAAe,YAAY,MAAM,OAAO,cAAc;KACjE,MAAM,OAAO,MAAM,iBAAiB,aAAa,UAAU,KAAK,MAAM,UAAU;AAC9E,iBAAW,0BAA0B,OAAO,EAAE,GAAG,QAAQ;OACzD;AAEF,UAAK,MAAM,OAAO,MAAM;AACtB,UAAI,CAAC,eAAe,IAAI,IAAI,CAC1B;MAEF,MAAM,YAAY,KAAK,QAAQ,IADb,IAAI,IAAI,WAAW,IAAI,GAAG,IAAI,IAAI,MAAM,EAAE,GAAG,IAAI,KACvB,MAAM,IAAI,CAAC;AACvD,iBAAW,KAAK;OAAE,MAAM;OAAW,SAAS,IAAI;OAAS,CAAC;AAC1D,kBAAY,KAAK;OACf,IAAI,IAAI;OACR,SAAS,IAAI;OACb,UAAU;QAAE,SAAS;QAAa,QAAQ;QAAW,MAAM;;OAC5D,CAAC;;AAEJ,SAAI,KAAK,SAAS,EAChB,YAAW;;AAGf,iBAAa,aAAa,SAAS,WAAW;;;AAKlD,MAAI,SAAS,WAAW,CAAC,WAAW,MAAK,MAAK,EAAE,KAAK,WAAW,QAAQ,CAAC,EAAE;GACzE,MAAM,eAAe,SAAS,YAAY,eAAe,SAAS,QAAQ;AAC1E,cAAW,qBAAqB;GAEhC,MAAM,cAAc,MAAM,iBAAiB,cAAc,YADnC,SAAS,WAAW,MAAM,IACmC,CAAC,OAAO,QAAQ;AACjG,aAAS,KAAK,oBAAoB,aAAa,IAAI,KAAK,WAAW,MAAM;AACzE,WAAO,EAAE;KACT;AACF,OAAI,YAAY,SAAS,GAAG;AAC1B,SAAK,MAAM,OAAO,aAAa;AAC7B,SAAI,CAAC,eAAe,IAAI,KAAK,CAC3B;AACF,gBAAW,KAAK,IAAI;AACpB,iBAAY,KAAK;MACf,IAAI,IAAI;MACR,SAAS,IAAI;MACb,UAAU;OAAE,SAAS;OAAa,QAAQ,IAAI;OAAM,MAAM;;MAC3D,CAAC;;AAEJ,gBAAY;AACZ,eAAW;AACX,iBAAa,aAAa,SAAS,WAAW;;;AAKlD,MAAI,SAAS,aAAa,WAAW,WAAW,GAAG;AACjD,cAAW,kBAAkB;GAC7B,MAAM,UAAU,MAAM,mBAAmB,SAAS,UAAU;AAC5D,OAAI,SAAS;AACX,eAAW,KAAK;KAAE,MAAM;KAAkB;KAAS,CAAC;AACpD,gBAAY,KAAK;KACf,IAAI;KACJ;KACA,UAAU;MAAE,SAAS;MAAa,QAAQ;MAAkB,MAAM;;KACnE,CAAC;AACF,iBAAa,aAAa,SAAS,WAAW;;;AAKlD,MAAI,aAAa,YAAY,WAAW,QAAO,MAAK,EAAE,KAAK,WAAW,QAAQ,IAAI,EAAE,KAAK,SAAS,MAAM,CAAC,CAAC,SAAS,GAAG;GACpH,MAAM,YAAY,kBAAkB,WAAW;AAC/C,OAAI,UACF,cAAa,aAAa,SAAS,CAAC;IAAE,MAAM;IAAkB,SAAS;IAAW,CAAC,CAAC;;QAIrF;AAEH,aAAW,sBAAsB;EACjC,MAAM,WAAW,eAAe,aAAa,SAAS,SAAS,SAAS,SAAS,QAAQ;AACzF,aAAW,SAAS;AACpB,MAAI,SAAS,UACX,aAAY,SAAS;AAIvB,MAAI,CAAC,WADU,iBAAiB,aAAa,QAAQ,CAC9B,EAAE;AACvB,cAAW,mCAAmC;GAC9C,MAAM,SAAS,eAAe,aAAa,QAAQ;AACnD,QAAK,MAAM,OAAO,OAChB,aAAY,KAAK;IACf,IAAI,IAAI;IACR,SAAS,IAAI;IACb,UAAU;KAAE,SAAS;KAAa,QAAQ,IAAI;KAAM,GAAG,kBAAkB,IAAI,KAAA;;IAC9E,CAAC;;AAKN,MAAI,aAAa,YAAY,CAAC,WAAW,KAAK,YAAY,aAAa,QAAQ,EAAE,QAAQ,YAAY,CAAC,EAAE;AACtG,cAAW,wBAAwB;GACnC,MAAM,SAAS,eAAe,aAAa,QAAQ;AAEnD,OADiB,OAAO,QAAO,MAAK,EAAE,KAAK,WAAW,QAAQ,IAAI,EAAE,KAAK,SAAS,MAAM,CAAC,CAC5E,SAAS,GAAG;IACvB,MAAM,YAAY,kBAAkB,OAAO;AAC3C,QAAI,UACF,cAAa,aAAa,SAAS,CAAC;KAAE,MAAM;KAAkB,SAAS;KAAW,CAAC,CAAC;;;;CAO5F,MAAM,KAAK,SAAS,UAAU,eAAe,SAAS,QAAQ,GAAG;CACjE,MAAM,WAAW,KAAK;EAAE,OAAO,GAAG;EAAO,MAAM,GAAG;EAAM,GAAG,KAAA;CAG3D,MAAM,eAAe,WAAW,gBAAgB,SAAS,OAAO,SAAS,KAAK,GAAG;CACjF,MAAM,WAAW,YAAY,aAAa,QAAQ;CAClD,MAAM,YAAY,eAAe,KAAK,cAAc,SAAS,GAAG,KAAK,UAAU,SAAS;CACxF,MAAM,iBAAiB,eAAe,KAAK,cAAc,cAAc,GAAG,KAAK,UAAU,cAAc;CACvG,MAAM,eAAe,eAAe,KAAK,cAAc,WAAW,GAAG,KAAK,UAAU,WAAW;AAG/F,KAAI,SAAS,UAAU,MAAM,eAAe,IAAI,CAAC,WAAW,UAAU,EAAE;AACtE,aAAW,iCAAiC;EAC5C,MAAM,SAAS,MAAM,kBAAkB,GAAG,OAAO,GAAG,MAAM,IAAI,SAAS,YAAY,KAAK,KAAK,CAAC,YAAY,EAAE,CAAC;AAC7G,MAAI,OAAO,SAAS,GAAG;AACrB,cAAW,WAAW,OAAO,OAAO,SAAS;GAC7C,MAAM,YAAY,CAChB,GAAG,OAAO,KAAI,WAAU;IACtB,MAAM,gBAAgB,MAAM,OAAO;IACnC,SAAS,sBAAsB,MAAA;IAChC,EAAE,EACH;IACE,MAAM;IACN,SAAS,mBAAmB,OAAA;IAC7B,CACF;AACD,OAAI,SACF,kBAAiB,SAAS,OAAO,SAAS,MAAM,UAAU;OAE1D,cAAa,aAAa,SAAS,UAAU;AAC/C,QAAK,MAAM,SAAS,OAClB,aAAY,KAAK;IACf,IAAI,SAAS,MAAM;IACnB,SAAS,iBAAiB,IAAI,MAAM,OAAO,IAAI,MAAM,MAAM,MAAM,MAAM,QAAQ,KAAK;IACpF,UAAU;KAAE,SAAS;KAAa,QAAQ,gBAAgB,MAAM,OAAO;KAAM,MAAM;KAAS,QAAQ,MAAM;;IAC3G,CAAC;;;AAMR,KAAI,SAAS,eAAe,MAAM,eAAe,IAAI,CAAC,WAAW,eAAe,EAAE;AAChF,aAAW,sCAAsC;EACjD,MAAM,cAAc,MAAM,uBAAuB,GAAG,OAAO,GAAG,MAAM,IAAI,SAAS,YAAY,KAAK,KAAK,CAAC,YAAY,EAAE,CAAC;AACvH,MAAI,YAAY,SAAS,GAAG;AAC1B,cAAW,WAAW,YAAY,OAAO,cAAc;GACvD,MAAM,iBAAiB,CACrB,GAAG,YAAY,KAAI,OAAM;IACvB,MAAM,0BAA0B,EAAE,OAAO;IACzC,SAAS,2BAA2B,EAAA;IACrC,EAAE,EACH;IACE,MAAM;IACN,SAAS,wBAAwB,YAAA;IAClC,CACF;AACD,OAAI,SACF,kBAAiB,SAAS,OAAO,SAAS,MAAM,eAAe;OAE/D,cAAa,aAAa,SAAS,eAAe;AACpD,QAAK,MAAM,KAAK,YACd,aAAY,KAAK;IACf,IAAI,cAAc,EAAE;IACpB,SAAS,iBAAiB,IAAI,EAAE,OAAO,IAAI,EAAE,MAAM,MAAM,EAAE,QAAQ,KAAK;IACxE,UAAU;KAAE,SAAS;KAAa,QAAQ,0BAA0B,EAAE,OAAO;KAAM,MAAM;KAAc,QAAQ,EAAE;;IAClH,CAAC;;;AAMR,KAAI,SAAS,YAAY,MAAM,eAAe,IAAI,CAAC,WAAW,aAAa,EAAE;AAC3E,aAAW,mCAAmC;EAC9C,MAAM,eAAe,aAAa,QAAQ,GAAG,0BAA0B,YAAY,GAAG,KAAA;EACtF,MAAM,cAAc,MAAM,kBAAkB,GAAG,OAAO,GAAG,MAAM,SAAS,SAAS,QAAQ,aAAa,KAAK,MAAM,aAAa,CAAC,YAAY,EAAE,CAAC;EAG9I,IAAI,WAAqD,EAAE;AAC3D,MAAI,cAAc,YAAY,EAAE;AAC9B,cAAW,8BAA8B;AACzC,cAAW,MAAM,kBAAkB,aAAa,QAAQ,CAAC,YAAY,EAAE,CAAC;;EAG1E,MAAM,UAAU,CAAC,GAAG,aAAa,GAAG,SAAS;EAG7C,MAAM,cAAc,SACjB,QAAO,MAAK,CAAC,EAAE,KAAK,SAAS,YAAY,CAAC,CAC1C,KAAK,MAAM;GACV,MAAM,eAAe,EAAE,KAAK,MAAM,iBAAiB;GACnD,MAAM,KAAK,iBAAiB,EAAE,QAAQ;AACtC,UAAO;IACL,SAAS,eAAe,MAAM;IAC9B,OAAO,GAAG,SAAS,WAAW,eAAe;IAC7C,MAAM,GAAG,QAAQ;IAClB;IACD,CACD,QAAO,MAAK,EAAE,QAAQ;EAGzB,MAAM,aAAa,YAChB,QAAO,MAAK,EAAE,KAAK,WAAW,YAAY,IAAI,CAAC,EAAE,KAAK,SAAS,eAAe,CAAC,CAC/E,KAAK,MAAM;GACV,MAAM,KAAK,iBAAiB,EAAE,QAAQ;GACtC,MAAM,MAAM,GAAG,OAAO;GACtB,MAAM,OAAO,GAAG,QAAQ;GACxB,MAAM,YAAY,GAAG,aAAa;AAClC,UAAO;IAAE,IAAI;IAAG;IAAK;IAAM,YAAY;IAAO,WAAW;IAAW,aAAa;IAAW,UAAU;IAAI;IAC1G,CACD,QAAO,MAAK,EAAE,IAAI;EAErB,MAAM,eAAe,QAAQ,MAAK,MAAK,EAAE,SAAS,wBAAwB;AAG1E,MAAI,WAAW,SAAS,KAAK,YAAY,SAAS,EAChD,SAAQ,KAAK;GACX,MAAM;GACN,SAAS,qBAAqB;IAAE,UAAU;IAAY;IAAa,cAAc;IAAa;IAAc,CAAA;GAC7G,CAAC;AAGJ,MAAI,QAAQ,SAAS,GAAG;AACtB,cAAW,WAAW,QAAQ,OAAO,WAAW;AAChD,OAAI,SACF,kBAAiB,SAAS,OAAO,SAAS,MAAM,QAAQ;OAExD,cAAa,aAAa,SAAS,QAAQ;AAC7C,QAAK,MAAM,OAAO,QAChB,aAAY,KAAK;IACf,IAAI,IAAI;IACR,SAAS,IAAI;IACb,UAAU;KAAE,SAAS;KAAa,QAAQ,IAAI;KAAM,MAAM;;IAC3D,CAAC;;;AAKR,QAAO;EACL;EACA;EACA;EACA,WAAW,SAAS,UAAU,WAAW,UAAU;EACnD,gBAAgB,SAAS,eAAe,WAAW,eAAe;EAClE,aAAa,SAAS,YAAY,WAAW,aAAa;EAC1D;EACA;EACA,WAAW;EACZ;;;;;;AAOH,SAAS,YAAY,IAAoB;CACvC,MAAM,MAAM,GAAG,QAAQ,UAAU;AACjC,QAAO,QAAQ,KAAK,KAAK,GAAG,MAAM,GAAG,IAAI;;;AAI3C,SAAS,QAAQ,SAAqB,KAAa,YAAyC;AAC1F,KAAI,QAAQ,UAAU,IACpB;CACF,MAAM,gBAAwC;EAAE,KAAK;EAAG,OAAO;EAAG,YAAY;EAAG,SAAS;EAAG,QAAQ;EAAG,OAAO;EAAG;AAClH,SAAQ,MAAM,GAAG,MAAM;EACrB,MAAM,KAAK,cAAc,EAAE,UAAU,QAAQ,UAAU;EACvD,MAAM,KAAK,cAAc,EAAE,UAAU,QAAQ,UAAU;AACvD,MAAI,OAAO,GACT,QAAO,KAAK;AACd,SAAO,EAAE,GAAG,cAAc,EAAE,GAAG;GAC/B;AACF,YAAW,sBAAsB,IAAI,GAAG,QAAQ,OAAO,6BAA6B;AACpF,SAAQ,SAAS;;;AAInB,eAAsB,eAAe,MAOnB;CAChB,MAAM,EAAE,aAAa,SAAS,KAAK,eAAe;CAClD,MAAM,WAAW,KAAK,YAAY,YAAY,CAAC,YAAY;AAE3D,KAAI,CAAC,SAAS,OACZ;CAEF,MAAM,SAAS,iBAAiB,aAAa,QAAQ;CACrD,MAAM,WAAW,WAAW,OAAO;CAEnC,MAAM,UAAU,CAAC,GAAG,KAAK,YAAY;CAGrC,MAAM,SAAS,cAAc,aAAa,KAAK,QAAQ;AACvD,KAAI,SAAS,UAAU,QAAQ;AAC7B,aAAW,mBAAmB;EAC9B,MAAM,aAAa,MAAM,kBAAkB,OAAO;AAClD,OAAK,MAAM,KAAK,WACd,SAAQ,KAAK;GACX,IAAI,EAAE;GACN,SAAS,EAAE;GACX,UAAU;IAAE,SAAS;IAAa,QAAQ,OAAO,EAAE;IAAQ,MAAM,EAAE;;GACpE,CAAC;;AAIN,KAAI,QAAQ,WAAW,EACrB;AAEF,SAAQ,SAAS,gBAAgB,WAAW;AAG5C,KAAI,CAAC,UAAU;AACb,aAAW,0BAA0B,QAAQ,OAAO,QAAQ;AAC5D,MAAI;AACF,SAAM,YAAY,SAAS;IACzB;IACA,aAAa,EAAE,OAAO,SAAS,YAAY;AACzC,SAAI,UAAU,WAAW;MACvB,MAAM,IAAI,QAAQ,UAAU;AAE5B,iBAAW,WADE,GAAG,UAAU,SAAS,YAAY,GAAG,UAAU,SAAS,UAAU,SAAU,GAAG,UAAU,QAAQ,MACnF,IAAI,QAAQ,GAAG,MAAM,GAAG;gBAE5C,UAAU,YACjB,YAAW,wBAAwB,QAAQ,GAAG,MAAM,GAAG;;IAG5D,CAAC;WAEG,KAAK;AACV,OAAI,eAAe,2BACjB,YAAW,oDAAoD;OAE/D,OAAM;;AAEV;;CAIF,IAAI;AACJ,KAAI;AACF,gBAAc,MAAM,aAAa,EAAE,QAAQ,CAAC;UAEvC,KAAK;AACV,MAAI,eAAe,4BAA4B;AAC7C,cAAW,oDAAoD;AAC/D;;AAEF,QAAM;;CAIR,MAAM,oBAAoB,IAAI,IAAI,YAAY,IAAI,YAAY,CAAC;CAC/D,MAAM,cAAc,IAAI,IAAI,QAAQ,KAAI,MAAK,EAAE,GAAG,CAAC;CAGnD,MAAM,UAAU,QAAQ,QAAO,MAAK,CAAC,kBAAkB,IAAI,EAAE,GAAG,CAAC;CAGjE,MAAM,YAAY,YAAY,QAAO,OAAM,CAAC,YAAY,IAAI,YAAY,GAAG,CAAC,CAAC;AAE7E,KAAI,QAAQ,WAAW,KAAK,UAAU,WAAW,GAAG;AAClD,aAAW,0BAA0B;AACrC;;CAGF,MAAM,QAAkB,EAAE;AAC1B,KAAI,QAAQ,SAAS,EACnB,OAAM,KAAK,IAAI,QAAQ,OAAO,MAAM;AACtC,KAAI,UAAU,SAAS,EACrB,OAAM,KAAK,IAAI,UAAU,OAAO,QAAQ;AAC1C,YAAW,0BAA0B,MAAM,KAAK,KAAK,CAAC,GAAG;AAEzD,KAAI;AACF,QAAM,YAAY,SAAS;GACzB;GACA;GACA,aAAa,EAAE,OAAO,SAAS,YAAY;AACzC,QAAI,UAAU,WAAW;KACvB,MAAM,IAAI,QAAQ,UAAU;AAE5B,gBAAW,WADE,GAAG,UAAU,SAAS,YAAY,GAAG,UAAU,SAAS,UAAU,SAAU,GAAG,UAAU,QAAQ,MACnF,IAAI,QAAQ,GAAG,MAAM,GAAG;eAE5C,UAAU,YACjB,YAAW,wBAAwB,QAAQ,GAAG,MAAM,GAAG;;GAG5D,CAAC;UAEG,KAAK;AACV,MAAI,eAAe,2BACjB,YAAW,oDAAoD;MAE/D,OAAM;;;;;;;;AASZ,SAAgB,gBAAgB,UAAkB,aAAqB,KAAa,SAAiB,UAAkB,UAA2B,UAAkD;CAClM,MAAM,IAAI,YAAY,YAAY,CAAC,YAAY;CAC/C,MAAM,WAAW,YAAY,aAAa,QAAQ;CAClD,MAAM,UAAU,KAAK,UAAU,aAAa;CAE5C,MAAM,UAAU,WAAW,gBAAgB,SAAS,OAAO,SAAS,KAAK,GAAG;AAG5E,KAAI,CAAC,eAAe,aAAa,KAAK,QAAQ,IAAI,aAAa,SAC7D,kBAAiB,UAAU,SAAS,OAAO;AAE7C,KAAI,EAAE,OACJ,kBAAiB,SAAS,SAAS,SAAS;AAC9C,KAAI,EAAE,YACJ,kBAAiB,SAAS,SAAS,cAAc;AACnD,KAAI,EAAE,SACJ,kBAAiB,SAAS,SAAS,WAAW;;;AAIlD,SAAS,iBAAiB,UAAkB,SAAiB,QAAsB;CACjF,MAAM,SAAS,KAAK,UAAU,OAAO;AACrC,KAAI,CAAC,WAAW,OAAO,CACrB;CAEF,MAAM,UAAU,KAAK,SAAS,OAAO;AACrC,WAAU,SAAS,EAAE,WAAW,MAAM,CAAC;CAEvC,SAAS,KAAK,KAAa,KAAa;AACtC,OAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;GAC7D,MAAM,UAAU,KAAK,KAAK,MAAM,KAAK;GACrC,MAAM,WAAW,KAAK,SAAS,MAAM,GAAG,IAAI,GAAG,MAAM,SAAS,MAAM,KAAK;AACzE,OAAI,MAAM,aAAa,EAAE;AACvB,cAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AACxC,SAAK,SAAS,MAAM,GAAG,IAAI,GAAG,MAAM,SAAS,MAAM,KAAK;SAGxD,cAAa,SAAS,SAAS;;;AAKrC,MAAK,QAAQ,GAAG;;;;;;AASlB,eAAsB,gBAAgB,WAAmB,KAAa,UAAkC;AACtG,KAAI,SACF;CAEF,MAAM,gBAAgB,KAAK,KAAK,aAAa;CAC7C,MAAM,UAAU;AAGhB,KAAI,WAAW,cAAc;MACX,aAAa,eAAe,QAAQ,CACxC,MAAM,KAAK,CAAC,MAAK,SAAQ,KAAK,MAAM,KAAK,QAAQ,CAC3D;;AAIJ,KAAI,CAAC,eAAe,EAAE;EACpB,MAAM,QAAQ,4DAA4D,QAAQ;AAClF,MAAI,WAAW,cAAc,CAG3B,gBAAe,eAAe,GAFb,aAAa,eAAe,QAAQ,CAC1B,SAAS,KAAK,GAAG,KAAK,OACJ,QAAQ;MAGrD,eAAc,eAAe,MAAM;AAErC;;CAIF,MAAM,eAAe,SAAS,KAAK,UAAU,IAAI;AACjD,GAAE,IAAI,KACJ,mEACwC,aAAa,yDACb,aAAa,+DACb,QAAQ,0DACjD;CAED,MAAM,MAAM,MAAM,EAAE,QAAQ;EAC1B,SAAS,SAAS,QAAQ;EAC1B,cAAc;EACf,CAAC;AAEF,KAAI,EAAE,SAAS,IAAI,IAAI,CAAC,IACtB;CAEF,MAAM,QAAQ,4DAA4D,QAAQ;AAClF,KAAI,WAAW,cAAc,CAG3B,gBAAe,eAAe,GAFb,aAAa,eAAe,QAAQ,CAC1B,SAAS,KAAK,GAAG,KAAK,OACJ,QAAQ;KAGrD,eAAc,eAAe,MAAM;AAGrC,GAAE,IAAI,QAAQ,qBAAqB;;AAGrC,MAAa,sBAAsB;AACnC,MAAa,oBAAoB;AAEjC,MAAM,qBAAqB;AAE3B,SAAS,qBAAqB,OAA0B;AAEtD,QAAO,GAAG,oBAAoB,IADjBA,QAAO,OAAO,uBAAuB,mBACX,IAAI;;AAG7C,SAAS,wBAAwB,OAA0B;AAEzD,QAAO,wGADMA,QAAO,OAAO,uBAAuB;;;;;;AAQpD,eAAsB,wBAAwB,OAAkB,KAAa,UAAkC;AAC7G,KAAI,SACF;CAEF,MAAM,cAAcA,QAAO;AAC3B,KAAI,CAAC,YAAY,gBACf;CAEF,MAAM,WAAW,KAAK,KAAK,YAAY,gBAAgB;AAIvD,KAHc,YAAY,gBAAgB,SAAS,OAAO,EAG/C;AACT,MAAI,WAAW,SAAS,CACtB;EAEF,MAAM,UAAU,GAAG,wBAAwB,MAAM,CAAC;AAElD,MAAI,CAAC,eAAe,EAAE;AACpB,aAAU,KAAK,UAAU,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;AACpD,iBAAc,UAAU,QAAQ;AAChC;;AAGF,IAAE,KACA;;;;UAIa,wBAAwB,MAAM,CAAC,UAC5C,UAAU,YAAY,kBACvB;EAED,MAAM,MAAM,MAAM,EAAE,QAAQ;GAC1B,SAAS,UAAU,YAAY,gBAAgB;GAC/C,cAAc;GACf,CAAC;AAEF,MAAI,EAAE,SAAS,IAAI,IAAI,CAAC,IACtB;AAEF,YAAU,KAAK,UAAU,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;AACpD,gBAAc,UAAU,QAAQ;AAChC,IAAE,IAAI,QAAQ,WAAW,YAAY,kBAAkB;AACvD;;AAIF,KAAI,WAAW,SAAS;MACN,aAAa,UAAU,QAAQ,CACnC,SAAA,kBAA6B,CACvC;;AAIJ,KAAI,CAAC,eAAe,EAAE;AACpB,MAAI,WAAW,SAAS,CAGtB,gBAAe,UAAU,GAFR,aAAa,UAAU,QAAQ,CACrB,SAAS,KAAK,GAAG,KAAK,KACX,IAAI,qBAAqB,MAAM,CAAC,IAAI;MAG1E,eAAc,UAAU,GAAG,qBAAqB,MAAM,CAAC,IAAI;AAE7D;;CAIF,MAAM,SADa,WAAW,SAAS,GACX,cAAc;AAC1C,GAAE,KACA;;;;UAIa,qBAAqB,MAAM,CAAC,QAAQ,OAAO,KAAK,CAAC,UAC9D,GAAG,OAAO,GAAG,YAAY,kBAC1B;CAED,MAAM,MAAM,MAAM,EAAE,QAAQ;EAC1B,SAAS,GAAG,OAAO,GAAG,YAAY,gBAAgB;EAClD,cAAc;EACf,CAAC;AAEF,KAAI,EAAE,SAAS,IAAI,IAAI,CAAC,IACtB;AAEF,KAAI,WAAW,SAAS,CAGtB,gBAAe,UAAU,GAFR,aAAa,UAAU,QAAQ,CACrB,SAAS,KAAK,GAAG,KAAK,KACX,IAAI,qBAAqB,MAAM,CAAC,IAAI;KAG1E,eAAc,UAAU,GAAG,qBAAqB,MAAM,CAAC,IAAI;AAG7D,GAAE,IAAI,QAAQ,WAAW,YAAY,kBAAkB;;;AAmCzD,MAAa,mBAAmC,CAAC,kBAAkB,cAAc;AAEjF,eAAsB,oBAAoB,UAAU,oBAA4G;AAC9J,GAAE,IAAI,KAAK,4CAA4C;CACvD,MAAM,WAAW,MAAM,EAAE,YAAY;EACnC;EACA,SAAS;GACP;IAAE,OAAO;IAAe,OAAO;IAA+B,MAAM;IAA4C;GAChH;IAAE,OAAO;IAAkB,OAAO;IAAkC,MAAM;IAA+B;GACzG;IAAE,OAAO;IAAkB,OAAO;IAA0B,MAAM;;GACnE;EACD,eAAe;EACf,UAAU;EACX,CAAC;AAEF,KAAI,EAAE,SAAS,SAAS,CACtB,QAAO;EAAE,UAAU,EAAE;EAAE,WAAW;EAAM;CAE1C,MAAM,WAAW;AACjB,KAAI,SAAS,WAAW,EACtB,QAAO;EAAE,UAAU,EAAE;EAAE,WAAW;EAAO;AAG3C,KAAI,SAAS,SAAS,GAAG;EACvB,MAAM,IAAI,SAAS;EACnB,MAAM,cAAwB,EAAE;AAChC,OAAK,MAAM,KAAK,SACd,SAAQ,GAAR;GACE,KAAK;AACH,gBAAY,KAAK,qBAAqB,SAAS,GAAG,IAAI,EAAE,CAAC,GAAG,SAAS,GAAG,KAAK,MAAM,KAAK,IAAI,EAAE,EAAE,CAAC,kCAAkC;AACnI;GACF,KAAK;AACH,gBAAY,KAAK,qBAAqB,SAAS,GAAG,IAAI,EAAE,CAAC,GAAG,SAAS,GAAG,KAAK,MAAM,KAAK,IAAI,EAAE,EAAE,CAAC,QAAQ;AACzG;GACF,KAAK;AACH,gBAAY,KAAK,sBAAsB,SAAS,IAAI,IAAI,EAAE,CAAC,QAAQ;AACnE;;AAGN,IAAE,IAAI,KAAK,WAAW,EAAE,eAAe,YAAY,KAAK,KAAK,GAAG;;CAGlE,IAAI;AACJ,KAAI,SAAS,SAAS,SAAS,EAAE;EAC/B,MAAM,UAAU,MAAM,EAAE,KAAK;GAC3B,SAAS;GACT,aAAa;GACd,CAAC;AACF,MAAI,EAAE,SAAS,QAAQ,CACrB,QAAO;GAAE,UAAU,EAAE;GAAE,WAAW;GAAM;EAE1C,MAAM,OAAO,MAAM,EAAE,KAAK;GACxB,SAAS;GACT,aAAa;GACd,CAAC;AACF,MAAI,EAAE,SAAS,KAAK,CAClB,QAAO;GAAE,UAAU,EAAE;GAAE,WAAW;GAAM;AAE1C,iBAAe;GAAW;GAAyB;GAAgB;;AAGrE,QAAO;EAAE;EAAU;EAAc,WAAW;EAAO;;;;;;;AA0BrD,eAAsB,gBAAgB,aAA6B,SAAkB,WAAsD;AACzI,KAAI,aAAa;AAGf,OADkB,MAAM,oBAAoB,EAC9B,MAAK,MAAK,EAAE,OAAO,YAAY,CAC3C,QAAO;GAAE,OAAO;GAAa,UAAU;GAAkB;AAE3D,MAAI,CAAC,eAAe,CAClB,QAAO;;AAIX,KAAI,CAAC,eAAe,CAClB,QAAO;CAIT,MAAM,SAAS,YAAY;CAC3B,MAAM,YAAY,MAAM,oBAAoB;AAE5C,KAAI,UAAU,WAAW,GAAG;AAC1B,IAAE,IAAI,KAAK,kBAAkB;AAC7B,SAAO;;CAIT,IAAI;AACJ,KAAI,OAAO,SAAS,UAAU,MAAK,MAAK,EAAE,OAAO,OAAO,MAAM,CAC5D,gBAAe,OAAO;MAEnB;AACH,MAAI,OAAO,MACT,GAAE,IAAI,KAAK,4BAA4B,OAAO,MAAM,uDAAuD;AAC7G,iBAAgB,UAAU,MAAK,MAAK,EAAE,YAAY,EAAE,MAAM,UAAU,GAAI;;CAG1E,MAAM,mBAAmB,aAAa,aAAa;CAEnD,MAAM,eADmB,UAAU,MAAK,MAAK,EAAE,OAAO,aAAa,EAC5B,gBAAgB;CACvD,MAAM,aAAa,OAAO,UAAU,eAAe,eAAe;CAClE,MAAM,cAAc,eAAe,GAAG,aAAa,KAAK,eAAe;CAGvE,IAAI,iBAAiB,UAAU,GAAG,QAAQ,KAAK;CAC/C,IAAI,gBAAgB;AACpB,KAAI,WAAW;EACb,MAAM,OAAO,UAAU,aACjB,UAAU,cAAc,UAAU,aAAa,WAAW,UAAU,YAAY,UAAU,WAAW,GAAG;EAC9G,MAAM,cAAc,SAAS,WAAW,SAAS,gBAAgB,SAAS,cAAc,SAAS,cAAc,SAAS;EAExH,MAAM,WAAqB,EAAE;AAC7B,MAAI,KACF,UAAS,KAAK,KAAK;AACrB,MAAI,UAAU,UAAU;GACtB,MAAM,aAAa,IAAI,KAAK,UAAU,SAAS,CAAC,SAAS;AACzD,OAAI,OAAO,SAAS,WAAW,EAAE;IAC/B,MAAM,OAAO,KAAK,OAAO,KAAK,KAAK,GAAG,cAAc,MAAW;AAC/D,aAAS,KAAK,SAAS,IAAI,UAAU,SAAS,IAAI,WAAW,GAAG,KAAK,OAAO;;;AAGhF,MAAI,UAAU,YACZ,UAAS,KAAK,eAAe;EAK/B,MAAM,OAAO,CAHO,UAAU,cAAc,UAAU,aAClD,GAAG,UAAU,WAAW,KAAK,UAAU,eACvC,MACuB,GAAG,SAAS,CAAC,OAAO,QAAQ,CAAC,KAAK,MAAM;AACnE,MAAI,KACF,kBAAiB,8BAA8B,KAAK;AAGtD,MAAI,UAAU,eAAe,YAC3B,iBAAgB;;CAGpB,MAAM,SAAS,MAAM,EAAE,OAAO;EAC5B,SAAS;EACT,SAAS;GACP;IAAE,OAAO;IAAkB,OAAO;IAAoB,MAAM;IAAa;GACzE;IAAE,OAAO;IAAmB,OAAO;IAAiB,MAAM;IAAoC;GAC9F;IAAE,OAAO;IAAe,OAAO;IAAmB,MAAM;IAAgC;GACxF;IAAE,OAAO;IAAQ,OAAO;IAAiB,MAAM;;GAChD;EACD,GAAI,gBAAgB,EAAE,cAAc,QAAiB,GAAG,EAAA;EACzD,CAAC;AAEF,KAAI,EAAE,SAAS,OAAO,CACpB,QAAO;AAET,KAAI,WAAW,OACb,QAAO;AAET,KAAI,WAAW,UAAU;EACvB,MAAM,EAAE,UAAU,cAAc,cAAc,MAAM,oBAClD,UAAU,GAAG,QAAQ,kBAAkB,wCACxC;AACD,MAAI,aAAa,SAAS,WAAW,EACnC,QAAO;AAET,SAAO;GAAE,OAAO;GAAc;GAAU;GAAc,YAAY;GAAM;;CAG1E,IAAI;AACJ,KAAI,WAAW,QAAQ;EACrB,MAAM,SAAS,MAAM,UAAU,UAAU;AACzC,MAAI,CAAC,OACH,QAAO;AACT,eAAa,EAAE,OAAO,QAAyB,CAAC;AAChD,UAAQ;OAGR,SAAQ;AAEV,KAAI,CAAC,MACH,QAAO;CAET,MAAM,YAAY,aAAa,MAAM;CACrC,MAAM,EAAE,UAAU,cAAc,cAAc,MAAM,oBAClD,UAAU,GAAG,QAAQ,IAAI,UAAU,KAAK,yBAAyB,YAClE;AAED,KAAI,aAAa,SAAS,WAAW,EACnC,QAAO;AAET,QAAO;EAAE;EAAO;EAAU;EAAc;;AA4B1C,eAAsB,oBAAoB,MAAqC;CAC7E,MAAM,EAAE,aAAa,SAAS,UAAU,SAAS,OAAO,UAAU,eAAe,WAAW,gBAAgB,aAAa,cAAc,UAAU,gBAAgB,aAAa,UAAU,OAAO,OAAO,UAAU,cAAc,UAAU,UAAU,OAAO,kBAAkB;CAE3Q,MAAM,oBAAoB;CAE1B,MAAM,SAAS,EAAE,QAAQ;EAAE,OAAO,mBAAmB;EAAe,OAAO;EAAG,CAAC;CAC/E,MAAM,WAAW,mBAAmB,SAAS;CAE7C,MAAM,EAAE,WAAW,cAAc,OAAO,MAAM,UAAU,OAAO,iBAAiB,MAAM,aAAa;EACjG;EACA;EACA;EACA;EACA,WANgB,aAAa;EAO7B;EACA;EACA;EACA;EACA,gBAAgB;EAChB,SAAS;EACT;EACA;EACA;EACA,UAAU;EACV;EACA;EACA,YAAY,mBAAmB,OAAA;EAChC,CAAC;AAEF,KAAI,cAAc;EAChB,MAAM,YAAsB,EAAE;AAC9B,MAAI,OAAO;GACT,MAAM,SAAS,KAAK,MAAM,MAAM,cAAc,IAAK;AACnD,aAAU,KAAK,GAAG,OAAO,UAAU;;AAErC,MAAI,KACF,WAAU,KAAK,IAAI,KAAK,QAAQ,EAAE,GAAG;EACvC,MAAM,aAAa,UAAU,SAAS,IAAI,KAAK,UAAU,KAAK,KAAK,CAAC,KAAK;AACzE,SAAO,QAAQ,2BAA2B,aAAa;AACvD,MAAI,aACF,GAAE,IAAI,KAAK,eAAe,SAAS,QAAQ,KAAK,EAAE,aAAa,GAAG;AACpE,MAAI,MACF,GAAE,IAAI,KAAK,4BAA4B,MAAM,SAAS;AACxD,MAAI,UAAU,OACZ,MAAK,MAAM,KAAK,SACd,GAAE,IAAI,KAAK,WAAW,EAAE,SAAS;EAErC,MAAM,UAAU,gBAAgB;GAC9B,MAAM;GACN;GACA,YAAY,SAAS;GAErB,UAAU,SAAS;GACnB,MAAM;GACN;GACA;GACA;GACA;GACA;GACA;GACA,gBAAgB;GAChB;GACA,aAAa,cAAc,MAAM;GACjC;GACA;GACA,SAAS,SAAS;GAClB;GACA;GACD,CAAC;AACF,gBAAc,KAAK,UAAU,WAAW,EAAE,QAAQ;YAG9C,SAAS,wDAAwD,KAAK,MAAM,CAC9E,QAAO,MAAM,iGAAiG;KAE9G,QAAO,MAAM,qBAAqB,QAAQ,KAAK,UAAU,KAAK;;;;;;AAyBpE,SAAgB,iBAAiB,MAA+C;CAC9E,MAAM,EAAE,UAAU,UAAU,cAAc,aAAa;CACvD,MAAM,WAAW,mBAAmB,SAAS;CAC7C,MAAM,UAAU,uBAAuB;EACrC,aAAa,KAAK;EAClB;EACA,SAAS,KAAK;EACd,WAAW,KAAK;EAChB,gBAAgB,KAAK;EACrB,aAAa,KAAK;EAClB,cAAc,KAAK;EACnB;EACA,UAAU,KAAK;EACf,gBAAgB,KAAK;EACrB,UAAU,KAAK;EACf;EACA;EACA,eAAe,KAAK;EACpB;EACD,CAAC;CAEF,MAAM,YAAY,KAAK,UAAU,UAAU;AAC3C,WAAU,WAAW,EAAE,WAAW,MAAM,CAAC;AAEzC,MAAK,MAAM,CAAC,SAAS,WAAW,QAC9B,eAAc,KAAK,WAAW,UAAU,QAAQ,KAAK,EAAE,OAAO;CAEhE,MAAM,UAAU,CAAC,GAAG,QAAQ,MAAM,CAAC;AACnC,KAAI,QAAQ,SAAS,GAAG;EACtB,MAAM,SAAS,SAAS,QAAQ,KAAK,EAAE,SAAS;EAChD,MAAM,cAAc,QAAQ,KAAI,MAAK,UAAU,EAAE,KAAK,CAAC,KAAK,KAAK;EACjE,MAAM,iBAAiB,QAAQ,KAAI,MAAK,qBAAqB,GAAG,CAAC,KAAK,KAAK;AAC3E,IAAE,IAAI,KAAK,2BAA2B,OAAO,oDAAoD,YAAY,OAAO,OAAO,wFAAwF,eAAe,8CAA8C;;AAGlR,QAAO"}
|
package/dist/_chunks/sync.mjs
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { a as getModelLabel, i as getAvailableModels, s as optimizeDocs, t as detectImportedPackages } from "./agent.mjs";
|
|
2
1
|
import { c as getVersionKey, o as getCacheDir, t as CACHE_DIR } from "./config.mjs";
|
|
3
2
|
import { r as resolvePkgDir } from "./prepare.mjs";
|
|
4
3
|
import { n as sanitizeMarkdown } from "./sanitize.mjs";
|
|
@@ -8,6 +7,7 @@ import { a as semverDiff, n as getSharedSkillsDir, t as SHARED_SKILLS_DIR } from
|
|
|
8
7
|
import { A as parseGitSkillInput, f as resolvePackageDocsWithAttempts, i as fetchPkgDist, k as fetchGitSkills, nt as parsePackageSpec, p as searchNpmPackages, q as isPrerelease, s as readLocalDependencies, x as resolveGitHubRepo } from "./sources.mjs";
|
|
9
8
|
import { a as targets } from "./detect.mjs";
|
|
10
9
|
import { a as sanitizeName, c as SECTION_OUTPUT_FILES, i as linkSkillToAgents, l as buildAllSectionPrompts, m as wrapSection, n as computeSkillDirName, p as portabilizePrompt, s as SECTION_MERGE_ORDER, t as generateSkillMd } from "./prompts.mjs";
|
|
10
|
+
import { a as getModelLabel, i as getAvailableModels, s as optimizeDocs, t as detectImportedPackages } from "./agent.mjs";
|
|
11
11
|
import { E as hasCompletedWizard, O as readConfig, S as suggestPrepareHook, _ as promptForAgent, b as resolveAgent, f as introLine, k as registerProject, o as getInstalledGenerators, p as isInteractive, w as defaultFeatures, x as sharedArgs } from "./cli-helpers.mjs";
|
|
12
12
|
import { a as removeLockEntry, i as readLock, n as parsePackages, s as writeLock } from "./lockfile.mjs";
|
|
13
13
|
import { t as getProjectState } from "./skills.mjs";
|
package/dist/_chunks/sync2.mjs
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import "./agent.mjs";
|
|
2
1
|
import "./config.mjs";
|
|
3
2
|
import "./package-json.mjs";
|
|
4
3
|
import "./prepare.mjs";
|
|
@@ -11,6 +10,8 @@ import "./shared.mjs";
|
|
|
11
10
|
import "./sources.mjs";
|
|
12
11
|
import "./detect.mjs";
|
|
13
12
|
import "./prompts.mjs";
|
|
13
|
+
import "./agent.mjs";
|
|
14
|
+
import "./libs/@sinclair/typebox.mjs";
|
|
14
15
|
import "./cli-helpers.mjs";
|
|
15
16
|
import "./lockfile.mjs";
|
|
16
17
|
import "./skills.mjs";
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import "./agent.mjs";
|
|
2
1
|
import { t as CACHE_DIR } from "./config.mjs";
|
|
3
2
|
import "./package-json.mjs";
|
|
4
3
|
import "./prepare.mjs";
|
|
@@ -11,6 +10,8 @@ import { r as mapInsert, t as SHARED_SKILLS_DIR } from "./shared.mjs";
|
|
|
11
10
|
import "./sources.mjs";
|
|
12
11
|
import { a as targets } from "./detect.mjs";
|
|
13
12
|
import "./prompts.mjs";
|
|
13
|
+
import "./agent.mjs";
|
|
14
|
+
import "./libs/@sinclair/typebox.mjs";
|
|
14
15
|
import { A as unregisterProject, T as getRegisteredProjects, p as isInteractive, x as sharedArgs } from "./cli-helpers.mjs";
|
|
15
16
|
import { i as readLock } from "./lockfile.mjs";
|
|
16
17
|
import "./skills.mjs";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"uninstall.mjs","names":["agents"],"sources":["../../src/commands/uninstall.ts"],"sourcesContent":["import type { AgentType } from '../agent/index.ts'\nimport { existsSync, readdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs'\nimport * as p from '@clack/prompts'\nimport { defineCommand } from 'citty'\nimport { join } from 'pathe'\nimport { agents } from '../agent/index.ts'\nimport { CACHE_DIR } from '../cache/index.ts'\nimport { isInteractive, sharedArgs } from '../cli-helpers.ts'\nimport { getRegisteredProjects, unregisterProject } from '../core/config.ts'\nimport { readLock } from '../core/lockfile.ts'\nimport { mapInsert, SHARED_SKILLS_DIR } from '../core/shared.ts'\nimport { SKILLD_MARKER_END, SKILLD_MARKER_START } from './sync.ts'\n\n/**\n * Remove the skilld marker block from an agent's instruction file.\n * For .mdc files (dedicated skilld files), delete the entire file.\n * Also cleans up legacy .cursorrules markers for backwards compat.\n */\nfunction removeAgentInstructions(agent: AgentType, projectPath: string): boolean {\n const agentConfig = agents[agent]\n if (!agentConfig.instructionFile)\n return false\n\n let removed = false\n\n // Handle current instruction file\n const filePath = join(projectPath, agentConfig.instructionFile)\n if (agentConfig.instructionFile.endsWith('.mdc')) {\n // MDC files are dedicated skilld files - just delete\n if (existsSync(filePath)) {\n rmSync(filePath)\n removed = true\n }\n // Also clean up legacy .cursorrules markers (cursor-specific)\n if (agent === 'cursor')\n removed = removeMarkerBlock(join(projectPath, '.cursorrules')) || removed\n }\n else if (existsSync(filePath)) {\n removed = removeMarkerBlock(filePath)\n }\n\n return removed\n}\n\nfunction removeMarkerBlock(filePath: string): boolean {\n if (!existsSync(filePath))\n return false\n\n const content = readFileSync(filePath, 'utf-8')\n const startIdx = content.indexOf(SKILLD_MARKER_START)\n if (startIdx === -1)\n return false\n\n const endIdx = content.indexOf(SKILLD_MARKER_END, startIdx)\n if (endIdx === -1)\n return false\n\n // Remove marker block plus surrounding blank lines\n const before = content.slice(0, startIdx).replace(/\\n+$/, '')\n const after = content.slice(endIdx + SKILLD_MARKER_END.length).replace(/^\\n+/, '')\n const updated = before + (before && after ? '\\n' : '') + after\n\n if (updated.trim() === '') {\n rmSync(filePath)\n }\n else {\n writeFileSync(filePath, updated.endsWith('\\n') ? updated : `${updated}\\n`)\n }\n return true\n}\n\nexport interface UninstallOptions {\n scope?: 'project' | 'all'\n agent?: AgentType\n yes: boolean\n}\n\n/**\n * Uninstall skilld skills by scope:\n * - project: Remove project skills (cwd)\n * - all: All registered projects + global skills + cache\n */\nexport async function uninstallCommand(opts: UninstallOptions): Promise<void> {\n let scope = opts.scope\n const registeredProjects = getRegisteredProjects()\n\n // Prompt for scope if not provided\n if (!scope) {\n if (!isInteractive()) {\n scope = 'project'\n }\n else {\n const allHint = registeredProjects.length > 0\n ? `${registeredProjects.length} projects + global + cache`\n : 'global skills + cache'\n\n const selected = await p.select({\n message: 'What do you want to uninstall?',\n options: [\n { label: 'This project', value: 'project', hint: 'current project only' },\n { label: 'Everything', value: 'all', hint: allHint },\n ],\n })\n\n if (p.isCancel(selected)) {\n p.cancel('Cancelled')\n return\n }\n scope = selected as 'project' | 'all'\n }\n }\n\n interface RemoveItem { label: string, path: string, version?: string }\n const toRemove: RemoveItem[] = []\n const seenPaths = new Set<string>()\n const projectsToUnregister: string[] = []\n const agentFilter = opts.agent ? [opts.agent] : undefined\n\n const addToRemove = (label: string, path: string, version?: string) => {\n if (seenPaths.has(path))\n return\n seenPaths.add(path)\n toRemove.push({ label, path, version })\n }\n\n // Helper to add skills from a lockfile\n const addSkillsFromLock = (skillsDir: string, label: string): string[] => {\n const trackedNames: string[] = []\n const lock = readLock(skillsDir)\n\n if (lock?.skills) {\n for (const [skillName, info] of Object.entries(lock.skills)) {\n trackedNames.push(skillName)\n const skillDir = join(skillsDir, skillName)\n if (existsSync(skillDir)) {\n const version = info.version ? `${info.version.split('.').slice(0, 2).join('.')}.x` : undefined\n addToRemove(`${label}: ${skillName}`, skillDir, version)\n }\n }\n\n // Also add the lockfile itself\n const lockPath = join(skillsDir, 'skilld-lock.yaml')\n if (existsSync(lockPath)) {\n addToRemove(`${label}: skilld-lock.yaml`, lockPath)\n }\n }\n\n return trackedNames\n }\n\n // Helper to find untracked skills in a directory\n const findUntrackedSkills = (skillsDir: string, trackedNames: string[]): string[] => {\n if (!existsSync(skillsDir))\n return []\n const tracked = new Set(trackedNames)\n return readdirSync(skillsDir)\n .filter(f => !f.startsWith('.') && f !== 'skilld-lock.yaml' && !tracked.has(f))\n }\n\n // Track untracked skills per directory (dedupe by path)\n const untrackedByDir = new Map<string, { label: string, skills: string[] }>()\n const processedDirs = new Set<string>()\n\n // Helper to process a skills directory (with deduping)\n const processSkillsDir = (skillsDir: string, label: string) => {\n if (processedDirs.has(skillsDir))\n return\n processedDirs.add(skillsDir)\n\n const tracked = addSkillsFromLock(skillsDir, label)\n const untracked = findUntrackedSkills(skillsDir, tracked)\n if (untracked.length > 0) {\n untrackedByDir.set(skillsDir, { label, skills: untracked })\n }\n }\n\n // Project skills\n if (scope === 'project') {\n // Shared dir\n const sharedDir = join(process.cwd(), SHARED_SKILLS_DIR)\n if (existsSync(sharedDir))\n processSkillsDir(sharedDir, 'project (.skills)')\n for (const [name, agent] of Object.entries(agents)) {\n if (agentFilter && !agentFilter.includes(name as AgentType))\n continue\n processSkillsDir(join(process.cwd(), agent.skillsDir), 'project')\n }\n projectsToUnregister.push(process.cwd())\n }\n\n // All registered projects + global\n if (scope === 'all') {\n const projectPaths = registeredProjects.length > 0 ? registeredProjects : [process.cwd()]\n\n // Show which projects will be affected\n if (registeredProjects.length > 0) {\n p.log.info('Projects to uninstall from:')\n for (const proj of projectPaths) {\n p.log.message(` ${proj}`)\n }\n }\n\n // Project skills from lockfiles\n for (const projectPath of projectPaths) {\n if (!existsSync(projectPath))\n continue\n\n const shortPath = projectPath.replace(process.env.HOME || '', '~')\n\n // Shared dir\n const sharedDir = join(projectPath, SHARED_SKILLS_DIR)\n if (existsSync(sharedDir))\n processSkillsDir(sharedDir, `${shortPath} (.skills)`)\n\n for (const [name, agent] of Object.entries(agents)) {\n if (agentFilter && !agentFilter.includes(name as AgentType))\n continue\n processSkillsDir(join(projectPath, agent.skillsDir), shortPath)\n }\n\n projectsToUnregister.push(projectPath)\n }\n\n // Global skills from lockfiles\n for (const [name, agent] of Object.entries(agents)) {\n if (agentFilter && !agentFilter.includes(name as AgentType))\n continue\n if (!agent.globalSkillsDir)\n continue\n processSkillsDir(agent.globalSkillsDir, 'user')\n }\n\n // Cache directory\n if (existsSync(CACHE_DIR)) {\n addToRemove('~/.skilld cache', CACHE_DIR)\n }\n }\n\n // Warn about untracked skills that will remain (grouped by label, deduped)\n if (untrackedByDir.size > 0) {\n const groupedUntracked = new Map<string, Set<string>>()\n for (const [_dir, { label, skills }] of untrackedByDir) {\n const set = mapInsert(groupedUntracked, label, () => new Set())\n for (const s of skills) set.add(s)\n }\n\n const totalUntracked = [...groupedUntracked.values()].reduce((sum, s) => sum + s.size, 0)\n p.log.warn(`${totalUntracked} untracked skill(s) will remain (not managed by skilld):`)\n for (const [label, skills] of groupedUntracked) {\n p.log.message(` ${label}: ${[...skills].join(', ')}`)\n }\n }\n\n if (toRemove.length === 0) {\n p.log.info('Nothing to uninstall')\n return\n }\n\n // Group by prefix for display\n const groups = new Map<string, Array<{ name: string, version?: string }>>()\n for (const item of toRemove) {\n const [prefix, name] = item.label.includes(': ')\n ? item.label.split(': ', 2)\n : ['other', item.label]\n mapInsert(groups, prefix!, () => []).push({ name: name!, version: item.version })\n }\n\n const formatGroup = (items: Array<{ name: string, version?: string }>) =>\n items.map(i => i.version ? `${i.name}@${i.version}` : i.name).join(', ')\n\n p.log.info(`Will remove ${toRemove.length} items:`)\n for (const [prefix, items] of groups) {\n p.log.message(` ${prefix}: ${formatGroup(items)}`)\n }\n\n if (!opts.yes && isInteractive()) {\n const confirmed = await p.confirm({\n message: 'Proceed with uninstall?',\n })\n\n if (p.isCancel(confirmed) || !confirmed) {\n p.cancel('Cancelled')\n return\n }\n }\n\n // Remove all items\n for (const item of toRemove) {\n rmSync(item.path, { recursive: true, force: true })\n }\n\n // Show grouped removal summary\n for (const [prefix, items] of groups) {\n p.log.success(`Removed ${prefix}: ${formatGroup(items)}`)\n }\n\n // Remove skilld instructions from agent instruction files\n const agentTypes = agentFilter || (Object.keys(agents) as AgentType[])\n for (const proj of projectsToUnregister) {\n for (const agent of agentTypes) {\n if (removeAgentInstructions(agent, proj)) {\n const file = agents[agent].instructionFile!\n p.log.success(`Cleaned ${file}`)\n }\n }\n }\n\n // Unregister projects from config (skip if cache dir was removed — config is gone)\n if (scope !== 'all') {\n for (const proj of projectsToUnregister) {\n unregisterProject(proj)\n }\n }\n\n p.outro('skilld uninstalled')\n}\n\nexport const uninstallCommandDef = defineCommand({\n meta: { name: 'uninstall', description: 'Remove skilld data' },\n args: {\n ...sharedArgs,\n },\n async run({ args }) {\n p.intro(`\\x1B[1m\\x1B[35mskilld\\x1B[0m uninstall`)\n return uninstallCommand({\n scope: args.global ? 'all' : undefined,\n agent: args.agent as AgentType | undefined,\n yes: args.yes,\n })\n },\n})\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkBA,SAAS,wBAAwB,OAAkB,aAA8B;CAC/E,MAAM,cAAcA,QAAO;AAC3B,KAAI,CAAC,YAAY,gBACf,QAAO;CAET,IAAI,UAAU;CAGd,MAAM,WAAW,KAAK,aAAa,YAAY,gBAAgB;AAC/D,KAAI,YAAY,gBAAgB,SAAS,OAAO,EAAE;AAEhD,MAAI,WAAW,SAAS,EAAE;AACxB,UAAO,SAAS;AAChB,aAAU;;AAGZ,MAAI,UAAU,SACZ,WAAU,kBAAkB,KAAK,aAAa,eAAe,CAAC,IAAI;YAE7D,WAAW,SAAS,CAC3B,WAAU,kBAAkB,SAAS;AAGvC,QAAO;;AAGT,SAAS,kBAAkB,UAA2B;AACpD,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO;CAET,MAAM,UAAU,aAAa,UAAU,QAAQ;CAC/C,MAAM,WAAW,QAAQ,QAAQ,oBAAoB;AACrD,KAAI,aAAa,GACf,QAAO;CAET,MAAM,SAAS,QAAQ,QAAQ,mBAAmB,SAAS;AAC3D,KAAI,WAAW,GACb,QAAO;CAGT,MAAM,SAAS,QAAQ,MAAM,GAAG,SAAS,CAAC,QAAQ,QAAQ,GAAG;CAC7D,MAAM,QAAQ,QAAQ,MAAM,SAAS,kBAAkB,OAAO,CAAC,QAAQ,QAAQ,GAAG;CAClF,MAAM,UAAU,UAAU,UAAU,QAAQ,OAAO,MAAM;AAEzD,KAAI,QAAQ,MAAM,KAAK,GACrB,QAAO,SAAS;KAGhB,eAAc,UAAU,QAAQ,SAAS,KAAK,GAAG,UAAU,GAAG,QAAQ,IAAI;AAE5E,QAAO;;;;;;;AAcT,eAAsB,iBAAiB,MAAuC;CAC5E,IAAI,QAAQ,KAAK;CACjB,MAAM,qBAAqB,uBAAuB;AAGlD,KAAI,CAAC,MACH,KAAI,CAAC,eAAe,CAClB,SAAQ;MAEL;EACH,MAAM,UAAU,mBAAmB,SAAS,IACxC,GAAG,mBAAmB,OAAO,8BAC7B;EAEJ,MAAM,WAAW,MAAM,EAAE,OAAO;GAC9B,SAAS;GACT,SAAS,CACP;IAAE,OAAO;IAAgB,OAAO;IAAW,MAAM;IAAwB,EACzE;IAAE,OAAO;IAAc,OAAO;IAAO,MAAM;IAAS,CAAA;GAEvD,CAAC;AAEF,MAAI,EAAE,SAAS,SAAS,EAAE;AACxB,KAAE,OAAO,YAAY;AACrB;;AAEF,UAAQ;;CAKZ,MAAM,WAAyB,EAAE;CACjC,MAAM,4BAAY,IAAI,KAAa;CACnC,MAAM,uBAAiC,EAAE;CACzC,MAAM,cAAc,KAAK,QAAQ,CAAC,KAAK,MAAM,GAAG,KAAA;CAEhD,MAAM,eAAe,OAAe,MAAc,YAAqB;AACrE,MAAI,UAAU,IAAI,KAAK,CACrB;AACF,YAAU,IAAI,KAAK;AACnB,WAAS,KAAK;GAAE;GAAO;GAAM;GAAS,CAAC;;CAIzC,MAAM,qBAAqB,WAAmB,UAA4B;EACxE,MAAM,eAAyB,EAAE;EACjC,MAAM,OAAO,SAAS,UAAU;AAEhC,MAAI,MAAM,QAAQ;AAChB,QAAK,MAAM,CAAC,WAAW,SAAS,OAAO,QAAQ,KAAK,OAAO,EAAE;AAC3D,iBAAa,KAAK,UAAU;IAC5B,MAAM,WAAW,KAAK,WAAW,UAAU;AAC3C,QAAI,WAAW,SAAS,EAAE;KACxB,MAAM,UAAU,KAAK,UAAU,GAAG,KAAK,QAAQ,MAAM,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,IAAI,CAAC,MAAM,KAAA;AACtF,iBAAY,GAAG,MAAM,IAAI,aAAa,UAAU,QAAQ;;;GAK5D,MAAM,WAAW,KAAK,WAAW,mBAAmB;AACpD,OAAI,WAAW,SAAS,CACtB,aAAY,GAAG,MAAM,qBAAqB,SAAS;;AAIvD,SAAO;;CAIT,MAAM,uBAAuB,WAAmB,iBAAqC;AACnF,MAAI,CAAC,WAAW,UAAU,CACxB,QAAO,EAAE;EACX,MAAM,UAAU,IAAI,IAAI,aAAa;AACrC,SAAO,YAAY,UAAU,CAC1B,QAAO,MAAK,CAAC,EAAE,WAAW,IAAI,IAAI,MAAM,sBAAsB,CAAC,QAAQ,IAAI,EAAE,CAAC;;CAInF,MAAM,iCAAiB,IAAI,KAAkD;CAC7E,MAAM,gCAAgB,IAAI,KAAa;CAGvC,MAAM,oBAAoB,WAAmB,UAAkB;AAC7D,MAAI,cAAc,IAAI,UAAU,CAC9B;AACF,gBAAc,IAAI,UAAU;EAG5B,MAAM,YAAY,oBAAoB,WADtB,kBAAkB,WAAW,MAAM,CACM;AACzD,MAAI,UAAU,SAAS,EACrB,gBAAe,IAAI,WAAW;GAAE;GAAO,QAAQ;GAAW,CAAC;;AAK/D,KAAI,UAAU,WAAW;EAEvB,MAAM,YAAY,KAAK,QAAQ,KAAK,EAAE,kBAAkB;AACxD,MAAI,WAAW,UAAU,CACvB,kBAAiB,WAAW,oBAAoB;AAClD,OAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQA,QAAO,EAAE;AAClD,OAAI,eAAe,CAAC,YAAY,SAAS,KAAkB,CACzD;AACF,oBAAiB,KAAK,QAAQ,KAAK,EAAE,MAAM,UAAU,EAAE,UAAU;;AAEnE,uBAAqB,KAAK,QAAQ,KAAK,CAAC;;AAI1C,KAAI,UAAU,OAAO;EACnB,MAAM,eAAe,mBAAmB,SAAS,IAAI,qBAAqB,CAAC,QAAQ,KAAK,CAAC;AAGzF,MAAI,mBAAmB,SAAS,GAAG;AACjC,KAAE,IAAI,KAAK,8BAA8B;AACzC,QAAK,MAAM,QAAQ,aACjB,GAAE,IAAI,QAAQ,KAAK,OAAO;;AAK9B,OAAK,MAAM,eAAe,cAAc;AACtC,OAAI,CAAC,WAAW,YAAY,CAC1B;GAEF,MAAM,YAAY,YAAY,QAAQ,QAAQ,IAAI,QAAQ,IAAI,IAAI;GAGlE,MAAM,YAAY,KAAK,aAAa,kBAAkB;AACtD,OAAI,WAAW,UAAU,CACvB,kBAAiB,WAAW,GAAG,UAAU,YAAY;AAEvD,QAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQA,QAAO,EAAE;AAClD,QAAI,eAAe,CAAC,YAAY,SAAS,KAAkB,CACzD;AACF,qBAAiB,KAAK,aAAa,MAAM,UAAU,EAAE,UAAU;;AAGjE,wBAAqB,KAAK,YAAY;;AAIxC,OAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQA,QAAO,EAAE;AAClD,OAAI,eAAe,CAAC,YAAY,SAAS,KAAkB,CACzD;AACF,OAAI,CAAC,MAAM,gBACT;AACF,oBAAiB,MAAM,iBAAiB,OAAO;;AAIjD,MAAI,WAAW,UAAU,CACvB,aAAY,mBAAmB,UAAU;;AAK7C,KAAI,eAAe,OAAO,GAAG;EAC3B,MAAM,mCAAmB,IAAI,KAA0B;AACvD,OAAK,MAAM,CAAC,MAAM,EAAE,OAAO,aAAa,gBAAgB;GACtD,MAAM,MAAM,UAAU,kBAAkB,6BAAa,IAAI,KAAK,CAAC;AAC/D,QAAK,MAAM,KAAK,OAAQ,KAAI,IAAI,EAAE;;EAGpC,MAAM,iBAAiB,CAAC,GAAG,iBAAiB,QAAQ,CAAC,CAAC,QAAQ,KAAK,MAAM,MAAM,EAAE,MAAM,EAAE;AACzF,IAAE,IAAI,KAAK,GAAG,eAAe,0DAA0D;AACvF,OAAK,MAAM,CAAC,OAAO,WAAW,iBAC5B,GAAE,IAAI,QAAQ,KAAK,MAAM,IAAI,CAAC,GAAG,OAAO,CAAC,KAAK,KAAK,GAAG;;AAI1D,KAAI,SAAS,WAAW,GAAG;AACzB,IAAE,IAAI,KAAK,uBAAuB;AAClC;;CAIF,MAAM,yBAAS,IAAI,KAAwD;AAC3E,MAAK,MAAM,QAAQ,UAAU;EAC3B,MAAM,CAAC,QAAQ,QAAQ,KAAK,MAAM,SAAS,KAAK,GAC5C,KAAK,MAAM,MAAM,MAAM,EAAE,GACzB,CAAC,SAAS,KAAK,MAAM;AACzB,YAAU,QAAQ,cAAe,EAAE,CAAC,CAAC,KAAK;GAAQ;GAAO,SAAS,KAAK;GAAS,CAAC;;CAGnF,MAAM,eAAe,UACnB,MAAM,KAAI,MAAK,EAAE,UAAU,GAAG,EAAE,KAAK,GAAG,EAAE,YAAY,EAAE,KAAK,CAAC,KAAK,KAAK;AAE1E,GAAE,IAAI,KAAK,eAAe,SAAS,OAAO,SAAS;AACnD,MAAK,MAAM,CAAC,QAAQ,UAAU,OAC5B,GAAE,IAAI,QAAQ,KAAK,OAAO,IAAI,YAAY,MAAM,GAAG;AAGrD,KAAI,CAAC,KAAK,OAAO,eAAe,EAAE;EAChC,MAAM,YAAY,MAAM,EAAE,QAAQ,EAChC,SAAS,2BACV,CAAC;AAEF,MAAI,EAAE,SAAS,UAAU,IAAI,CAAC,WAAW;AACvC,KAAE,OAAO,YAAY;AACrB;;;AAKJ,MAAK,MAAM,QAAQ,SACjB,QAAO,KAAK,MAAM;EAAE,WAAW;EAAM,OAAO;EAAM,CAAC;AAIrD,MAAK,MAAM,CAAC,QAAQ,UAAU,OAC5B,GAAE,IAAI,QAAQ,WAAW,OAAO,IAAI,YAAY,MAAM,GAAG;CAI3D,MAAM,aAAa,eAAgB,OAAO,KAAKA,QAAO;AACtD,MAAK,MAAM,QAAQ,qBACjB,MAAK,MAAM,SAAS,WAClB,KAAI,wBAAwB,OAAO,KAAK,EAAE;EACxC,MAAM,OAAOA,QAAO,OAAO;AAC3B,IAAE,IAAI,QAAQ,WAAW,OAAO;;AAMtC,KAAI,UAAU,MACZ,MAAK,MAAM,QAAQ,qBACjB,mBAAkB,KAAK;AAI3B,GAAE,MAAM,qBAAqB;;AAG/B,MAAa,sBAAsB,cAAc;CAC/C,MAAM;EAAE,MAAM;EAAa,aAAa;EAAsB;CAC9D,MAAM,EACJ,GAAG,YACJ;CACD,MAAM,IAAI,EAAE,QAAQ;AAClB,IAAE,MAAM,yCAAyC;AACjD,SAAO,iBAAiB;GACtB,OAAO,KAAK,SAAS,QAAQ,KAAA;GAC7B,OAAO,KAAK;GACZ,KAAK,KAAK;GACX,CAAC;;CAEL,CAAC"}
|
|
1
|
+
{"version":3,"file":"uninstall.mjs","names":["agents"],"sources":["../../src/commands/uninstall.ts"],"sourcesContent":["import type { AgentType } from '../agent/index.ts'\nimport { existsSync, readdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs'\nimport * as p from '@clack/prompts'\nimport { defineCommand } from 'citty'\nimport { join } from 'pathe'\nimport { agents } from '../agent/index.ts'\nimport { CACHE_DIR } from '../cache/index.ts'\nimport { isInteractive, sharedArgs } from '../cli-helpers.ts'\nimport { getRegisteredProjects, unregisterProject } from '../core/config.ts'\nimport { readLock } from '../core/lockfile.ts'\nimport { mapInsert, SHARED_SKILLS_DIR } from '../core/shared.ts'\nimport { SKILLD_MARKER_END, SKILLD_MARKER_START } from './sync.ts'\n\n/**\n * Remove the skilld marker block from an agent's instruction file.\n * For .mdc files (dedicated skilld files), delete the entire file.\n * Also cleans up legacy .cursorrules markers for backwards compat.\n */\nfunction removeAgentInstructions(agent: AgentType, projectPath: string): boolean {\n const agentConfig = agents[agent]\n if (!agentConfig.instructionFile)\n return false\n\n let removed = false\n\n // Handle current instruction file\n const filePath = join(projectPath, agentConfig.instructionFile)\n if (agentConfig.instructionFile.endsWith('.mdc')) {\n // MDC files are dedicated skilld files - just delete\n if (existsSync(filePath)) {\n rmSync(filePath)\n removed = true\n }\n // Also clean up legacy .cursorrules markers (cursor-specific)\n if (agent === 'cursor')\n removed = removeMarkerBlock(join(projectPath, '.cursorrules')) || removed\n }\n else if (existsSync(filePath)) {\n removed = removeMarkerBlock(filePath)\n }\n\n return removed\n}\n\nfunction removeMarkerBlock(filePath: string): boolean {\n if (!existsSync(filePath))\n return false\n\n const content = readFileSync(filePath, 'utf-8')\n const startIdx = content.indexOf(SKILLD_MARKER_START)\n if (startIdx === -1)\n return false\n\n const endIdx = content.indexOf(SKILLD_MARKER_END, startIdx)\n if (endIdx === -1)\n return false\n\n // Remove marker block plus surrounding blank lines\n const before = content.slice(0, startIdx).replace(/\\n+$/, '')\n const after = content.slice(endIdx + SKILLD_MARKER_END.length).replace(/^\\n+/, '')\n const updated = before + (before && after ? '\\n' : '') + after\n\n if (updated.trim() === '') {\n rmSync(filePath)\n }\n else {\n writeFileSync(filePath, updated.endsWith('\\n') ? updated : `${updated}\\n`)\n }\n return true\n}\n\nexport interface UninstallOptions {\n scope?: 'project' | 'all'\n agent?: AgentType\n yes: boolean\n}\n\n/**\n * Uninstall skilld skills by scope:\n * - project: Remove project skills (cwd)\n * - all: All registered projects + global skills + cache\n */\nexport async function uninstallCommand(opts: UninstallOptions): Promise<void> {\n let scope = opts.scope\n const registeredProjects = getRegisteredProjects()\n\n // Prompt for scope if not provided\n if (!scope) {\n if (!isInteractive()) {\n scope = 'project'\n }\n else {\n const allHint = registeredProjects.length > 0\n ? `${registeredProjects.length} projects + global + cache`\n : 'global skills + cache'\n\n const selected = await p.select({\n message: 'What do you want to uninstall?',\n options: [\n { label: 'This project', value: 'project', hint: 'current project only' },\n { label: 'Everything', value: 'all', hint: allHint },\n ],\n })\n\n if (p.isCancel(selected)) {\n p.cancel('Cancelled')\n return\n }\n scope = selected as 'project' | 'all'\n }\n }\n\n interface RemoveItem { label: string, path: string, version?: string }\n const toRemove: RemoveItem[] = []\n const seenPaths = new Set<string>()\n const projectsToUnregister: string[] = []\n const agentFilter = opts.agent ? [opts.agent] : undefined\n\n const addToRemove = (label: string, path: string, version?: string) => {\n if (seenPaths.has(path))\n return\n seenPaths.add(path)\n toRemove.push({ label, path, version })\n }\n\n // Helper to add skills from a lockfile\n const addSkillsFromLock = (skillsDir: string, label: string): string[] => {\n const trackedNames: string[] = []\n const lock = readLock(skillsDir)\n\n if (lock?.skills) {\n for (const [skillName, info] of Object.entries(lock.skills)) {\n trackedNames.push(skillName)\n const skillDir = join(skillsDir, skillName)\n if (existsSync(skillDir)) {\n const version = info.version ? `${info.version.split('.').slice(0, 2).join('.')}.x` : undefined\n addToRemove(`${label}: ${skillName}`, skillDir, version)\n }\n }\n\n // Also add the lockfile itself\n const lockPath = join(skillsDir, 'skilld-lock.yaml')\n if (existsSync(lockPath)) {\n addToRemove(`${label}: skilld-lock.yaml`, lockPath)\n }\n }\n\n return trackedNames\n }\n\n // Helper to find untracked skills in a directory\n const findUntrackedSkills = (skillsDir: string, trackedNames: string[]): string[] => {\n if (!existsSync(skillsDir))\n return []\n const tracked = new Set(trackedNames)\n return readdirSync(skillsDir)\n .filter(f => !f.startsWith('.') && f !== 'skilld-lock.yaml' && !tracked.has(f))\n }\n\n // Track untracked skills per directory (dedupe by path)\n const untrackedByDir = new Map<string, { label: string, skills: string[] }>()\n const processedDirs = new Set<string>()\n\n // Helper to process a skills directory (with deduping)\n const processSkillsDir = (skillsDir: string, label: string) => {\n if (processedDirs.has(skillsDir))\n return\n processedDirs.add(skillsDir)\n\n const tracked = addSkillsFromLock(skillsDir, label)\n const untracked = findUntrackedSkills(skillsDir, tracked)\n if (untracked.length > 0) {\n untrackedByDir.set(skillsDir, { label, skills: untracked })\n }\n }\n\n // Project skills\n if (scope === 'project') {\n // Shared dir\n const sharedDir = join(process.cwd(), SHARED_SKILLS_DIR)\n if (existsSync(sharedDir))\n processSkillsDir(sharedDir, 'project (.skills)')\n for (const [name, agent] of Object.entries(agents)) {\n if (agentFilter && !agentFilter.includes(name as AgentType))\n continue\n processSkillsDir(join(process.cwd(), agent.skillsDir), 'project')\n }\n projectsToUnregister.push(process.cwd())\n }\n\n // All registered projects + global\n if (scope === 'all') {\n const projectPaths = registeredProjects.length > 0 ? registeredProjects : [process.cwd()]\n\n // Show which projects will be affected\n if (registeredProjects.length > 0) {\n p.log.info('Projects to uninstall from:')\n for (const proj of projectPaths) {\n p.log.message(` ${proj}`)\n }\n }\n\n // Project skills from lockfiles\n for (const projectPath of projectPaths) {\n if (!existsSync(projectPath))\n continue\n\n const shortPath = projectPath.replace(process.env.HOME || '', '~')\n\n // Shared dir\n const sharedDir = join(projectPath, SHARED_SKILLS_DIR)\n if (existsSync(sharedDir))\n processSkillsDir(sharedDir, `${shortPath} (.skills)`)\n\n for (const [name, agent] of Object.entries(agents)) {\n if (agentFilter && !agentFilter.includes(name as AgentType))\n continue\n processSkillsDir(join(projectPath, agent.skillsDir), shortPath)\n }\n\n projectsToUnregister.push(projectPath)\n }\n\n // Global skills from lockfiles\n for (const [name, agent] of Object.entries(agents)) {\n if (agentFilter && !agentFilter.includes(name as AgentType))\n continue\n if (!agent.globalSkillsDir)\n continue\n processSkillsDir(agent.globalSkillsDir, 'user')\n }\n\n // Cache directory\n if (existsSync(CACHE_DIR)) {\n addToRemove('~/.skilld cache', CACHE_DIR)\n }\n }\n\n // Warn about untracked skills that will remain (grouped by label, deduped)\n if (untrackedByDir.size > 0) {\n const groupedUntracked = new Map<string, Set<string>>()\n for (const [_dir, { label, skills }] of untrackedByDir) {\n const set = mapInsert(groupedUntracked, label, () => new Set())\n for (const s of skills) set.add(s)\n }\n\n const totalUntracked = [...groupedUntracked.values()].reduce((sum, s) => sum + s.size, 0)\n p.log.warn(`${totalUntracked} untracked skill(s) will remain (not managed by skilld):`)\n for (const [label, skills] of groupedUntracked) {\n p.log.message(` ${label}: ${[...skills].join(', ')}`)\n }\n }\n\n if (toRemove.length === 0) {\n p.log.info('Nothing to uninstall')\n return\n }\n\n // Group by prefix for display\n const groups = new Map<string, Array<{ name: string, version?: string }>>()\n for (const item of toRemove) {\n const [prefix, name] = item.label.includes(': ')\n ? item.label.split(': ', 2)\n : ['other', item.label]\n mapInsert(groups, prefix!, () => []).push({ name: name!, version: item.version })\n }\n\n const formatGroup = (items: Array<{ name: string, version?: string }>) =>\n items.map(i => i.version ? `${i.name}@${i.version}` : i.name).join(', ')\n\n p.log.info(`Will remove ${toRemove.length} items:`)\n for (const [prefix, items] of groups) {\n p.log.message(` ${prefix}: ${formatGroup(items)}`)\n }\n\n if (!opts.yes && isInteractive()) {\n const confirmed = await p.confirm({\n message: 'Proceed with uninstall?',\n })\n\n if (p.isCancel(confirmed) || !confirmed) {\n p.cancel('Cancelled')\n return\n }\n }\n\n // Remove all items\n for (const item of toRemove) {\n rmSync(item.path, { recursive: true, force: true })\n }\n\n // Show grouped removal summary\n for (const [prefix, items] of groups) {\n p.log.success(`Removed ${prefix}: ${formatGroup(items)}`)\n }\n\n // Remove skilld instructions from agent instruction files\n const agentTypes = agentFilter || (Object.keys(agents) as AgentType[])\n for (const proj of projectsToUnregister) {\n for (const agent of agentTypes) {\n if (removeAgentInstructions(agent, proj)) {\n const file = agents[agent].instructionFile!\n p.log.success(`Cleaned ${file}`)\n }\n }\n }\n\n // Unregister projects from config (skip if cache dir was removed — config is gone)\n if (scope !== 'all') {\n for (const proj of projectsToUnregister) {\n unregisterProject(proj)\n }\n }\n\n p.outro('skilld uninstalled')\n}\n\nexport const uninstallCommandDef = defineCommand({\n meta: { name: 'uninstall', description: 'Remove skilld data' },\n args: {\n ...sharedArgs,\n },\n async run({ args }) {\n p.intro(`\\x1B[1m\\x1B[35mskilld\\x1B[0m uninstall`)\n return uninstallCommand({\n scope: args.global ? 'all' : undefined,\n agent: args.agent as AgentType | undefined,\n yes: args.yes,\n })\n },\n})\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkBA,SAAS,wBAAwB,OAAkB,aAA8B;CAC/E,MAAM,cAAcA,QAAO;AAC3B,KAAI,CAAC,YAAY,gBACf,QAAO;CAET,IAAI,UAAU;CAGd,MAAM,WAAW,KAAK,aAAa,YAAY,gBAAgB;AAC/D,KAAI,YAAY,gBAAgB,SAAS,OAAO,EAAE;AAEhD,MAAI,WAAW,SAAS,EAAE;AACxB,UAAO,SAAS;AAChB,aAAU;;AAGZ,MAAI,UAAU,SACZ,WAAU,kBAAkB,KAAK,aAAa,eAAe,CAAC,IAAI;YAE7D,WAAW,SAAS,CAC3B,WAAU,kBAAkB,SAAS;AAGvC,QAAO;;AAGT,SAAS,kBAAkB,UAA2B;AACpD,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO;CAET,MAAM,UAAU,aAAa,UAAU,QAAQ;CAC/C,MAAM,WAAW,QAAQ,QAAQ,oBAAoB;AACrD,KAAI,aAAa,GACf,QAAO;CAET,MAAM,SAAS,QAAQ,QAAQ,mBAAmB,SAAS;AAC3D,KAAI,WAAW,GACb,QAAO;CAGT,MAAM,SAAS,QAAQ,MAAM,GAAG,SAAS,CAAC,QAAQ,QAAQ,GAAG;CAC7D,MAAM,QAAQ,QAAQ,MAAM,SAAS,kBAAkB,OAAO,CAAC,QAAQ,QAAQ,GAAG;CAClF,MAAM,UAAU,UAAU,UAAU,QAAQ,OAAO,MAAM;AAEzD,KAAI,QAAQ,MAAM,KAAK,GACrB,QAAO,SAAS;KAGhB,eAAc,UAAU,QAAQ,SAAS,KAAK,GAAG,UAAU,GAAG,QAAQ,IAAI;AAE5E,QAAO;;;;;;;AAcT,eAAsB,iBAAiB,MAAuC;CAC5E,IAAI,QAAQ,KAAK;CACjB,MAAM,qBAAqB,uBAAuB;AAGlD,KAAI,CAAC,MACH,KAAI,CAAC,eAAe,CAClB,SAAQ;MAEL;EACH,MAAM,UAAU,mBAAmB,SAAS,IACxC,GAAG,mBAAmB,OAAO,8BAC7B;EAEJ,MAAM,WAAW,MAAM,EAAE,OAAO;GAC9B,SAAS;GACT,SAAS,CACP;IAAE,OAAO;IAAgB,OAAO;IAAW,MAAM;IAAwB,EACzE;IAAE,OAAO;IAAc,OAAO;IAAO,MAAM;IAAS,CAAA;GAEvD,CAAC;AAEF,MAAI,EAAE,SAAS,SAAS,EAAE;AACxB,KAAE,OAAO,YAAY;AACrB;;AAEF,UAAQ;;CAKZ,MAAM,WAAyB,EAAE;CACjC,MAAM,4BAAY,IAAI,KAAa;CACnC,MAAM,uBAAiC,EAAE;CACzC,MAAM,cAAc,KAAK,QAAQ,CAAC,KAAK,MAAM,GAAG,KAAA;CAEhD,MAAM,eAAe,OAAe,MAAc,YAAqB;AACrE,MAAI,UAAU,IAAI,KAAK,CACrB;AACF,YAAU,IAAI,KAAK;AACnB,WAAS,KAAK;GAAE;GAAO;GAAM;GAAS,CAAC;;CAIzC,MAAM,qBAAqB,WAAmB,UAA4B;EACxE,MAAM,eAAyB,EAAE;EACjC,MAAM,OAAO,SAAS,UAAU;AAEhC,MAAI,MAAM,QAAQ;AAChB,QAAK,MAAM,CAAC,WAAW,SAAS,OAAO,QAAQ,KAAK,OAAO,EAAE;AAC3D,iBAAa,KAAK,UAAU;IAC5B,MAAM,WAAW,KAAK,WAAW,UAAU;AAC3C,QAAI,WAAW,SAAS,EAAE;KACxB,MAAM,UAAU,KAAK,UAAU,GAAG,KAAK,QAAQ,MAAM,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,IAAI,CAAC,MAAM,KAAA;AACtF,iBAAY,GAAG,MAAM,IAAI,aAAa,UAAU,QAAQ;;;GAK5D,MAAM,WAAW,KAAK,WAAW,mBAAmB;AACpD,OAAI,WAAW,SAAS,CACtB,aAAY,GAAG,MAAM,qBAAqB,SAAS;;AAIvD,SAAO;;CAIT,MAAM,uBAAuB,WAAmB,iBAAqC;AACnF,MAAI,CAAC,WAAW,UAAU,CACxB,QAAO,EAAE;EACX,MAAM,UAAU,IAAI,IAAI,aAAa;AACrC,SAAO,YAAY,UAAU,CAC1B,QAAO,MAAK,CAAC,EAAE,WAAW,IAAI,IAAI,MAAM,sBAAsB,CAAC,QAAQ,IAAI,EAAE,CAAC;;CAInF,MAAM,iCAAiB,IAAI,KAAkD;CAC7E,MAAM,gCAAgB,IAAI,KAAa;CAGvC,MAAM,oBAAoB,WAAmB,UAAkB;AAC7D,MAAI,cAAc,IAAI,UAAU,CAC9B;AACF,gBAAc,IAAI,UAAU;EAG5B,MAAM,YAAY,oBAAoB,WADtB,kBAAkB,WAAW,MAAM,CACM;AACzD,MAAI,UAAU,SAAS,EACrB,gBAAe,IAAI,WAAW;GAAE;GAAO,QAAQ;GAAW,CAAC;;AAK/D,KAAI,UAAU,WAAW;EAEvB,MAAM,YAAY,KAAK,QAAQ,KAAK,EAAE,kBAAkB;AACxD,MAAI,WAAW,UAAU,CACvB,kBAAiB,WAAW,oBAAoB;AAClD,OAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQA,QAAO,EAAE;AAClD,OAAI,eAAe,CAAC,YAAY,SAAS,KAAkB,CACzD;AACF,oBAAiB,KAAK,QAAQ,KAAK,EAAE,MAAM,UAAU,EAAE,UAAU;;AAEnE,uBAAqB,KAAK,QAAQ,KAAK,CAAC;;AAI1C,KAAI,UAAU,OAAO;EACnB,MAAM,eAAe,mBAAmB,SAAS,IAAI,qBAAqB,CAAC,QAAQ,KAAK,CAAC;AAGzF,MAAI,mBAAmB,SAAS,GAAG;AACjC,KAAE,IAAI,KAAK,8BAA8B;AACzC,QAAK,MAAM,QAAQ,aACjB,GAAE,IAAI,QAAQ,KAAK,OAAO;;AAK9B,OAAK,MAAM,eAAe,cAAc;AACtC,OAAI,CAAC,WAAW,YAAY,CAC1B;GAEF,MAAM,YAAY,YAAY,QAAQ,QAAQ,IAAI,QAAQ,IAAI,IAAI;GAGlE,MAAM,YAAY,KAAK,aAAa,kBAAkB;AACtD,OAAI,WAAW,UAAU,CACvB,kBAAiB,WAAW,GAAG,UAAU,YAAY;AAEvD,QAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQA,QAAO,EAAE;AAClD,QAAI,eAAe,CAAC,YAAY,SAAS,KAAkB,CACzD;AACF,qBAAiB,KAAK,aAAa,MAAM,UAAU,EAAE,UAAU;;AAGjE,wBAAqB,KAAK,YAAY;;AAIxC,OAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQA,QAAO,EAAE;AAClD,OAAI,eAAe,CAAC,YAAY,SAAS,KAAkB,CACzD;AACF,OAAI,CAAC,MAAM,gBACT;AACF,oBAAiB,MAAM,iBAAiB,OAAO;;AAIjD,MAAI,WAAW,UAAU,CACvB,aAAY,mBAAmB,UAAU;;AAK7C,KAAI,eAAe,OAAO,GAAG;EAC3B,MAAM,mCAAmB,IAAI,KAA0B;AACvD,OAAK,MAAM,CAAC,MAAM,EAAE,OAAO,aAAa,gBAAgB;GACtD,MAAM,MAAM,UAAU,kBAAkB,6BAAa,IAAI,KAAK,CAAC;AAC/D,QAAK,MAAM,KAAK,OAAQ,KAAI,IAAI,EAAE;;EAGpC,MAAM,iBAAiB,CAAC,GAAG,iBAAiB,QAAQ,CAAC,CAAC,QAAQ,KAAK,MAAM,MAAM,EAAE,MAAM,EAAE;AACzF,IAAE,IAAI,KAAK,GAAG,eAAe,0DAA0D;AACvF,OAAK,MAAM,CAAC,OAAO,WAAW,iBAC5B,GAAE,IAAI,QAAQ,KAAK,MAAM,IAAI,CAAC,GAAG,OAAO,CAAC,KAAK,KAAK,GAAG;;AAI1D,KAAI,SAAS,WAAW,GAAG;AACzB,IAAE,IAAI,KAAK,uBAAuB;AAClC;;CAIF,MAAM,yBAAS,IAAI,KAAwD;AAC3E,MAAK,MAAM,QAAQ,UAAU;EAC3B,MAAM,CAAC,QAAQ,QAAQ,KAAK,MAAM,SAAS,KAAK,GAC5C,KAAK,MAAM,MAAM,MAAM,EAAE,GACzB,CAAC,SAAS,KAAK,MAAM;AACzB,YAAU,QAAQ,cAAe,EAAE,CAAC,CAAC,KAAK;GAAQ;GAAO,SAAS,KAAK;GAAS,CAAC;;CAGnF,MAAM,eAAe,UACnB,MAAM,KAAI,MAAK,EAAE,UAAU,GAAG,EAAE,KAAK,GAAG,EAAE,YAAY,EAAE,KAAK,CAAC,KAAK,KAAK;AAE1E,GAAE,IAAI,KAAK,eAAe,SAAS,OAAO,SAAS;AACnD,MAAK,MAAM,CAAC,QAAQ,UAAU,OAC5B,GAAE,IAAI,QAAQ,KAAK,OAAO,IAAI,YAAY,MAAM,GAAG;AAGrD,KAAI,CAAC,KAAK,OAAO,eAAe,EAAE;EAChC,MAAM,YAAY,MAAM,EAAE,QAAQ,EAChC,SAAS,2BACV,CAAC;AAEF,MAAI,EAAE,SAAS,UAAU,IAAI,CAAC,WAAW;AACvC,KAAE,OAAO,YAAY;AACrB;;;AAKJ,MAAK,MAAM,QAAQ,SACjB,QAAO,KAAK,MAAM;EAAE,WAAW;EAAM,OAAO;EAAM,CAAC;AAIrD,MAAK,MAAM,CAAC,QAAQ,UAAU,OAC5B,GAAE,IAAI,QAAQ,WAAW,OAAO,IAAI,YAAY,MAAM,GAAG;CAI3D,MAAM,aAAa,eAAgB,OAAO,KAAKA,QAAO;AACtD,MAAK,MAAM,QAAQ,qBACjB,MAAK,MAAM,SAAS,WAClB,KAAI,wBAAwB,OAAO,KAAK,EAAE;EACxC,MAAM,OAAOA,QAAO,OAAO;AAC3B,IAAE,IAAI,QAAQ,WAAW,OAAO;;AAMtC,KAAI,UAAU,MACZ,MAAK,MAAM,QAAQ,qBACjB,mBAAkB,KAAK;AAI3B,GAAE,MAAM,qBAAqB;;AAG/B,MAAa,sBAAsB,cAAc;CAC/C,MAAM;EAAE,MAAM;EAAa,aAAa;EAAsB;CAC9D,MAAM,EACJ,GAAG,YACJ;CACD,MAAM,IAAI,EAAE,QAAQ;AAClB,IAAE,MAAM,yCAAyC;AACjD,SAAO,iBAAiB;GACtB,OAAO,KAAK,SAAS,QAAQ,KAAA;GAC7B,OAAO,KAAK;GACZ,KAAK,KAAK;GACX,CAAC;;CAEL,CAAC"}
|
package/dist/_chunks/wizard.mjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { c as getOAuthProviderList, i as getAvailableModels, l as loginOAuthProvider, o as getModelName } from "./agent.mjs";
|
|
2
1
|
import { a as targets } from "./detect.mjs";
|
|
2
|
+
import { c as getOAuthProviderList, i as getAvailableModels, l as loginOAuthProvider, o as getModelName } from "./agent.mjs";
|
|
3
3
|
import { g as pickModel, j as updateConfig, n as NO_MODELS_MESSAGE, p as isInteractive, r as OAUTH_NOTE, w as defaultFeatures } from "./cli-helpers.mjs";
|
|
4
4
|
import { execSync } from "node:child_process";
|
|
5
5
|
import * as p from "@clack/prompts";
|