skilld 0.6.1 → 0.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -246,7 +246,8 @@ function generateIssueIndex(issues) {
246
246
  for (const issue of group) {
247
247
  const reactions = issue.reactions > 0 ? ` (+${issue.reactions})` : "";
248
248
  const state = issue.state === "open" ? "" : " [closed]";
249
- sections.push(`- [#${issue.number}](./issue-${issue.number}.md): ${issue.title}${reactions}${state}`);
249
+ const date = isoDate(issue.createdAt);
250
+ sections.push(`- [#${issue.number}](./issue-${issue.number}.md): ${issue.title}${reactions}${state} (${date})`);
250
251
  }
251
252
  sections.push("");
252
253
  }
@@ -708,7 +709,8 @@ function generateDiscussionIndex(discussions) {
708
709
  for (const d of group) {
709
710
  const upvotes = d.upvoteCount > 0 ? ` (+${d.upvoteCount})` : "";
710
711
  const answered = d.answer ? " [answered]" : "";
711
- sections.push(`- [#${d.number}](./discussion-${d.number}.md): ${d.title}${upvotes}${answered}`);
712
+ const date = isoDate(d.createdAt);
713
+ sections.push(`- [#${d.number}](./discussion-${d.number}.md): ${d.title}${upvotes}${answered} (${date})`);
712
714
  }
713
715
  sections.push("");
714
716
  }
@@ -1 +1 @@
1
- {"version":3,"file":"npm.mjs","names":["fsExistsSync","fsReadFileSync"],"sources":["../../src/sources/github-common.ts","../../src/sources/issues.ts","../../src/sources/utils.ts","../../src/sources/releases.ts","../../src/sources/blog-releases.ts","../../src/sources/discussions.ts","../../src/sources/entries.ts","../../src/sources/git-skills.ts","../../src/sources/github.ts","../../src/sources/llms.ts","../../src/sources/npm.ts"],"sourcesContent":["/**\n * Shared constants and helpers for GitHub source modules (issues, discussions, releases)\n */\n\nexport const BOT_USERS = new Set([\n 'renovate[bot]',\n 'dependabot[bot]',\n 'renovate-bot',\n 'dependabot',\n 'github-actions[bot]',\n])\n\n/** Extract YYYY-MM-DD date from an ISO timestamp */\nexport const isoDate = (iso: string) => iso.split('T')[0]\n\n/** Build YAML frontmatter from a key-value object, auto-quoting strings with special chars */\nexport function buildFrontmatter(fields: Record<string, string | number | boolean | undefined>): string {\n const lines = ['---']\n for (const [k, v] of Object.entries(fields)) {\n if (v !== undefined)\n lines.push(`${k}: ${typeof v === 'string' && /[:\"[\\]]/.test(v) ? `\"${v.replace(/\"/g, '\\\\\"')}\"` : v}`)\n }\n lines.push('---')\n return lines.join('\\n')\n}\n","/**\n * GitHub issues fetching via gh CLI Search API\n * Sorted by reactions (upvotes), 75% open / 25% closed (within last year)\n * Categorized by labels, noise filtered out\n */\n\nimport { spawnSync } from 'node:child_process'\n\nimport { BOT_USERS, buildFrontmatter, isoDate } from './github-common'\n\nexport type IssueType = 'bug' | 'question' | 'docs' | 'feature' | 'other'\n\nexport interface IssueComment {\n body: string\n author: string\n reactions: number\n}\n\nexport interface GitHubIssue {\n number: number\n title: string\n state: string\n labels: string[]\n body: string\n createdAt: string\n url: string\n reactions: number\n comments: number\n type: IssueType\n topComments: IssueComment[]\n}\n\nlet _ghAvailable: boolean | undefined\n\n/**\n * Check if gh CLI is installed and authenticated (cached)\n */\nexport function isGhAvailable(): boolean {\n if (_ghAvailable !== undefined)\n return _ghAvailable\n const { status } = spawnSync('gh', ['auth', 'status'], { stdio: 'ignore' })\n return (_ghAvailable = status === 0)\n}\n\n/** Labels that indicate noise — filter these out entirely */\nconst NOISE_LABELS = new Set([\n 'duplicate',\n 'stale',\n 'invalid',\n 'wontfix',\n 'won\\'t fix',\n 'spam',\n 'off-topic',\n 'needs triage',\n 'triage',\n])\n\n/** Labels that indicate feature requests — deprioritize */\nconst FEATURE_LABELS = new Set([\n 'enhancement',\n 'feature',\n 'feature request',\n 'feature-request',\n 'proposal',\n 'rfc',\n 'idea',\n 'suggestion',\n])\n\nconst BUG_LABELS = new Set([\n 'bug',\n 'defect',\n 'regression',\n 'error',\n 'crash',\n 'fix',\n 'confirmed',\n 'verified',\n])\n\nconst QUESTION_LABELS = new Set([\n 'question',\n 'help wanted',\n 'support',\n 'usage',\n 'how-to',\n 'help',\n 'assistance',\n])\n\nconst DOCS_LABELS = new Set([\n 'documentation',\n 'docs',\n 'doc',\n 'typo',\n])\n\n/**\n * Classify an issue by its labels into a type useful for skill generation\n */\nexport function classifyIssue(labels: string[]): IssueType {\n const lower = labels.map(l => l.toLowerCase())\n if (lower.some(l => BUG_LABELS.has(l)))\n return 'bug'\n if (lower.some(l => QUESTION_LABELS.has(l)))\n return 'question'\n if (lower.some(l => DOCS_LABELS.has(l)))\n return 'docs'\n if (lower.some(l => FEATURE_LABELS.has(l)))\n return 'feature'\n return 'other'\n}\n\n/**\n * Check if an issue should be filtered out entirely\n */\nfunction isNoiseIssue(issue: { labels: string[], title: string, body: string }): boolean {\n const lower = issue.labels.map(l => l.toLowerCase())\n if (lower.some(l => NOISE_LABELS.has(l)))\n return true\n // Tracking/umbrella issues — low signal for skill generation\n if (issue.title.startsWith('☂️') || issue.title.startsWith('[META]') || issue.title.startsWith('[Tracking]'))\n return true\n return false\n}\n\n/**\n * Body truncation limit based on reactions — high-reaction issues deserve more space\n */\nfunction bodyLimit(reactions: number): number {\n if (reactions >= 10)\n return 2000\n if (reactions >= 5)\n return 1500\n return 800\n}\n\n/**\n * Fetch issues for a state using GitHub Search API sorted by reactions\n */\nfunction fetchIssuesByState(\n owner: string,\n repo: string,\n state: 'open' | 'closed',\n count: number,\n releasedAt?: string,\n): GitHubIssue[] {\n const fetchCount = Math.min(count * 3, 100)\n let datePart = ''\n if (state === 'closed') {\n if (releasedAt) {\n // For older versions, include issues closed up to 6 months after release\n const date = new Date(releasedAt)\n date.setMonth(date.getMonth() + 6)\n datePart = `+closed:<=${isoDate(date.toISOString())}`\n }\n else {\n datePart = `+closed:>${oneYearAgo()}`\n }\n }\n else if (releasedAt) {\n // For older versions, only include issues created around or before release\n const date = new Date(releasedAt)\n date.setMonth(date.getMonth() + 6)\n datePart = `+created:<=${isoDate(date.toISOString())}`\n }\n\n const q = `repo:${owner}/${repo}+is:issue+is:${state}${datePart}`\n\n const { stdout: result } = spawnSync('gh', [\n 'api',\n `search/issues?q=${q}&sort=reactions&order=desc&per_page=${fetchCount}`,\n '-q',\n '.items[] | {number, title, state, labels: [.labels[]?.name], body, createdAt: .created_at, url: .html_url, reactions: .reactions[\"+1\"], comments: .comments, user: .user.login, userType: .user.type}',\n ], { encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 })\n\n if (!result)\n return []\n\n return result\n .trim()\n .split('\\n')\n .filter(Boolean)\n .map(line => JSON.parse(line) as GitHubIssue & { user: string, userType: string })\n .filter(issue => !BOT_USERS.has(issue.user) && issue.userType !== 'Bot')\n .filter(issue => !isNoiseIssue(issue))\n .map(({ user: _, userType: __, ...issue }) => ({\n ...issue,\n type: classifyIssue(issue.labels),\n topComments: [] as IssueComment[],\n }))\n // Deprioritize feature requests — push to end\n .sort((a, b) => (a.type === 'feature' ? 1 : 0) - (b.type === 'feature' ? 1 : 0))\n .slice(0, count)\n}\n\nfunction oneYearAgo(): string {\n const d = new Date()\n d.setFullYear(d.getFullYear() - 1)\n return isoDate(d.toISOString())!\n}\n\n/**\n * Batch-fetch top comments for issues via GraphQL.\n * Enriches the top N highest-reaction issues with their most-reacted comments.\n */\nfunction enrichWithComments(owner: string, repo: string, issues: GitHubIssue[], topN = 10): void {\n // Only fetch comments for issues worth enriching\n const worth = issues\n .filter(i => i.comments > 0 && (i.type === 'bug' || i.type === 'question' || i.reactions >= 3))\n .sort((a, b) => b.reactions - a.reactions)\n .slice(0, topN)\n\n if (worth.length === 0)\n return\n\n // Build a single GraphQL query fetching comments for all selected issues\n const fragments = worth.map((issue, i) =>\n `i${i}: issue(number: ${issue.number}) { comments(first: 3) { nodes { body author { login } reactions { totalCount } } } }`,\n ).join(' ')\n\n const query = `query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { ${fragments} } }`\n\n try {\n const { stdout: result } = spawnSync('gh', [\n 'api',\n 'graphql',\n '-f',\n `query=${query}`,\n '-f',\n `owner=${owner}`,\n '-f',\n `repo=${repo}`,\n ], { encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 })\n\n if (!result)\n return\n\n const data = JSON.parse(result)\n const repo_ = data?.data?.repository\n if (!repo_)\n return\n\n for (let i = 0; i < worth.length; i++) {\n const nodes = repo_[`i${i}`]?.comments?.nodes\n if (!Array.isArray(nodes))\n continue\n worth[i]!.topComments = nodes\n .filter((c: any) => c.author && !BOT_USERS.has(c.author.login))\n .map((c: any) => ({\n body: c.body || '',\n author: c.author.login,\n reactions: c.reactions?.totalCount || 0,\n }))\n }\n }\n catch {\n // Non-critical — issues still useful without comments\n }\n}\n\n/**\n * Fetch issues from a GitHub repo sorted by reactions (upvotes).\n * Returns 75% open issues + 25% recently closed issues (within last year).\n * Filters noise (duplicates, stale, tracking) and deprioritizes feature requests.\n * Enriches top issues with their most-reacted comments via GraphQL.\n */\nexport async function fetchGitHubIssues(\n owner: string,\n repo: string,\n limit = 30,\n releasedAt?: string,\n): Promise<GitHubIssue[]> {\n if (!isGhAvailable())\n return []\n\n const openCount = Math.ceil(limit * 0.75)\n const closedCount = limit - openCount\n\n try {\n const open = fetchIssuesByState(owner, repo, 'open', openCount, releasedAt)\n const closed = fetchIssuesByState(owner, repo, 'closed', closedCount, releasedAt)\n const all = [...open, ...closed]\n enrichWithComments(owner, repo, all)\n return all\n }\n catch {\n return []\n }\n}\n\n/**\n * Format a single issue as markdown with YAML frontmatter\n */\nexport function formatIssueAsMarkdown(issue: GitHubIssue): string {\n const limit = bodyLimit(issue.reactions)\n const fmFields: Record<string, string | number | boolean | undefined> = {\n number: issue.number,\n title: issue.title,\n type: issue.type,\n state: issue.state,\n created: isoDate(issue.createdAt),\n url: issue.url,\n reactions: issue.reactions,\n comments: issue.comments,\n }\n if (issue.labels.length > 0)\n fmFields.labels = `[${issue.labels.join(', ')}]`\n const fm = buildFrontmatter(fmFields)\n\n const lines = [fm, '', `# ${issue.title}`]\n\n if (issue.body) {\n const body = issue.body.length > limit\n ? `${issue.body.slice(0, limit)}...`\n : issue.body\n lines.push('', body)\n }\n\n if (issue.topComments.length > 0) {\n lines.push('', '---', '', '## Top Comments')\n for (const c of issue.topComments) {\n const reactions = c.reactions > 0 ? ` (+${c.reactions})` : ''\n const commentBody = c.body.length > 600\n ? `${c.body.slice(0, 600)}...`\n : c.body\n lines.push('', `**@${c.author}**${reactions}:`, '', commentBody)\n }\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Generate a summary index of all issues for quick LLM scanning.\n * Groups by type so the LLM can quickly find bugs vs questions.\n */\nexport function generateIssueIndex(issues: GitHubIssue[]): string {\n const byType = new Map<IssueType, GitHubIssue[]>()\n for (const issue of issues) {\n const list = byType.get(issue.type) || []\n list.push(issue)\n byType.set(issue.type, list)\n }\n\n const typeLabels: Record<IssueType, string> = {\n bug: 'Bugs & Regressions',\n question: 'Questions & Usage Help',\n docs: 'Documentation',\n feature: 'Feature Requests',\n other: 'Other',\n }\n\n const typeOrder: IssueType[] = ['bug', 'question', 'docs', 'other', 'feature']\n\n const fm = [\n '---',\n `total: ${issues.length}`,\n `open: ${issues.filter(i => i.state === 'open').length}`,\n `closed: ${issues.filter(i => i.state !== 'open').length}`,\n '---',\n ]\n\n const sections: string[] = [fm.join('\\n'), '', '# Issues Index', '']\n\n for (const type of typeOrder) {\n const group = byType.get(type)\n if (!group?.length)\n continue\n sections.push(`## ${typeLabels[type]} (${group.length})`, '')\n for (const issue of group) {\n const reactions = issue.reactions > 0 ? ` (+${issue.reactions})` : ''\n const state = issue.state === 'open' ? '' : ' [closed]'\n sections.push(`- [#${issue.number}](./issue-${issue.number}.md): ${issue.title}${reactions}${state}`)\n }\n sections.push('')\n }\n\n return sections.join('\\n')\n}\n","/**\n * Shared utilities for doc resolution\n */\n\nimport { ofetch } from 'ofetch'\n\nexport const $fetch = ofetch.create({\n retry: 3,\n retryDelay: 500,\n timeout: 15_000,\n headers: { 'User-Agent': 'skilld/1.0' },\n})\n\n/**\n * Fetch text content from URL\n */\nexport async function fetchText(url: string): Promise<string | null> {\n return $fetch(url, { responseType: 'text' }).catch(() => null)\n}\n\n/**\n * Verify URL exists and is not HTML (likely 404 page)\n */\nexport async function verifyUrl(url: string): Promise<boolean> {\n const res = await $fetch.raw(url, { method: 'HEAD' }).catch(() => null)\n if (!res)\n return false\n const contentType = res.headers.get('content-type') || ''\n return !contentType.includes('text/html')\n}\n\n/**\n * Check if URL points to a social media or package registry site (not real docs)\n */\nconst USELESS_HOSTS = new Set([\n 'twitter.com',\n 'x.com',\n 'facebook.com',\n 'linkedin.com',\n 'youtube.com',\n 'instagram.com',\n 'npmjs.com',\n 'www.npmjs.com',\n 'yarnpkg.com',\n])\n\nexport function isUselessDocsUrl(url: string): boolean {\n try {\n const { hostname } = new URL(url)\n return USELESS_HOSTS.has(hostname)\n }\n catch { return false }\n}\n\n/**\n * Check if URL is a GitHub repo URL (not a docs site)\n */\nexport function isGitHubRepoUrl(url: string): boolean {\n try {\n const parsed = new URL(url)\n return parsed.hostname === 'github.com' || parsed.hostname === 'www.github.com'\n }\n catch {\n return false\n }\n}\n\n/**\n * Parse owner/repo from GitHub URL\n */\nexport function parseGitHubUrl(url: string): { owner: string, repo: string } | null {\n const match = url.match(/github\\.com\\/([^/]+)\\/([^/]+?)(?:\\.git)?(?:[/#]|$)/)\n if (!match)\n return null\n return { owner: match[1]!, repo: match[2]! }\n}\n\n/**\n * Normalize git repo URL to https\n */\nexport function normalizeRepoUrl(url: string): string {\n return url\n .replace(/^git\\+/, '')\n .replace(/#.*$/, '')\n .replace(/\\.git$/, '')\n .replace(/^git:\\/\\//, 'https://')\n .replace(/^ssh:\\/\\/git@github\\.com/, 'https://github.com')\n // SSH format: git@github.com:owner/repo\n .replace(/^git@github\\.com:/, 'https://github.com/')\n}\n\n/**\n * Extract branch hint from URL fragment (e.g. \"git+https://...#main\" → \"main\")\n */\nexport function extractBranchHint(url: string): string | undefined {\n const hash = url.indexOf('#')\n if (hash === -1)\n return undefined\n const fragment = url.slice(hash + 1)\n // Ignore non-branch fragments like \"readme\"\n if (!fragment || fragment === 'readme')\n return undefined\n return fragment\n}\n","/**\n * GitHub release notes fetching via gh CLI (preferred) with ungh.cc fallback\n */\n\nimport { spawnSync } from 'node:child_process'\nimport { isoDate } from './github-common'\nimport { isGhAvailable } from './issues'\nimport { $fetch } from './utils'\n\nexport interface GitHubRelease {\n id: number\n tag: string\n name: string\n prerelease: boolean\n createdAt: string\n publishedAt: string\n markdown: string\n}\n\ninterface UnghReleasesResponse {\n releases: GitHubRelease[]\n}\n\ninterface CachedDoc {\n path: string\n content: string\n}\n\nexport interface SemVer {\n major: number\n minor: number\n patch: number\n raw: string\n}\n\nexport function parseSemver(version: string): SemVer | null {\n const clean = version.replace(/^v/, '')\n const match = clean.match(/^(\\d+)(?:\\.(\\d+))?(?:\\.(\\d+))?/)\n if (!match)\n return null\n return {\n major: +match[1]!,\n minor: match[2] ? +match[2] : 0,\n patch: match[3] ? +match[3] : 0,\n raw: clean,\n }\n}\n\n/**\n * Extract version from a release tag, handling monorepo formats:\n * - `pkg@1.2.3` → `1.2.3`\n * - `pkg-v1.2.3` → `1.2.3`\n * - `v1.2.3` → `1.2.3`\n * - `1.2.3` → `1.2.3`\n */\nfunction extractVersion(tag: string, packageName?: string): string | null {\n if (packageName) {\n // Monorepo: pkg@version or pkg-vversion\n const atMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}@(.+)$`))\n if (atMatch)\n return atMatch[1]!\n const dashMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}-v?(.+)$`))\n if (dashMatch)\n return dashMatch[1]!\n }\n // Standard: v1.2.3 or 1.2.3\n return tag.replace(/^v/, '')\n}\n\nfunction escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n}\n\n/**\n * Check if a release tag belongs to a specific package\n */\nfunction tagMatchesPackage(tag: string, packageName: string): boolean {\n // Exact match: pkg@version or pkg-vversion\n return tag.startsWith(`${packageName}@`) || tag.startsWith(`${packageName}-v`) || tag.startsWith(`${packageName}-`)\n}\n\nexport function compareSemver(a: SemVer, b: SemVer): number {\n if (a.major !== b.major)\n return a.major - b.major\n if (a.minor !== b.minor)\n return a.minor - b.minor\n return a.patch - b.patch\n}\n\n/**\n * Fetch releases via gh CLI (fast, authenticated, paginated)\n */\nfunction fetchReleasesViaGh(owner: string, repo: string): GitHubRelease[] {\n try {\n const { stdout: json } = spawnSync('gh', [\n 'api',\n `repos/${owner}/${repo}/releases?per_page=100`,\n '--jq',\n '[.[] | {id: .id, tag: .tag_name, name: .name, prerelease: .prerelease, createdAt: .created_at, publishedAt: .published_at, markdown: .body}]',\n ], { encoding: 'utf-8', timeout: 15_000, stdio: ['ignore', 'pipe', 'ignore'] })\n if (!json)\n return []\n return JSON.parse(json) as GitHubRelease[]\n }\n catch {\n return []\n }\n}\n\n/**\n * Fetch all releases from a GitHub repo via ungh.cc (fallback)\n */\nasync function fetchReleasesViaUngh(owner: string, repo: string): Promise<GitHubRelease[]> {\n const data = await $fetch<UnghReleasesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/releases`,\n { signal: AbortSignal.timeout(15_000) },\n ).catch(() => null)\n return data?.releases ?? []\n}\n\n/**\n * Fetch all releases — gh CLI first, ungh.cc fallback\n */\nasync function fetchAllReleases(owner: string, repo: string): Promise<GitHubRelease[]> {\n if (isGhAvailable()) {\n const releases = fetchReleasesViaGh(owner, repo)\n if (releases.length > 0)\n return releases\n }\n return fetchReleasesViaUngh(owner, repo)\n}\n\n/**\n * Select last 20 stable releases for a package, sorted newest first.\n * For monorepos, filters to package-specific tags (pkg@version).\n * Falls back to generic tags (v1.2.3) only if no package-specific found.\n * If installedVersion is provided, filters out releases newer than it.\n */\nexport function selectReleases(releases: GitHubRelease[], packageName?: string, installedVersion?: string): GitHubRelease[] {\n // Check if this looks like a monorepo (has package-prefixed tags)\n const hasMonorepoTags = packageName && releases.some(r => tagMatchesPackage(r.tag, packageName))\n const installedSv = installedVersion ? parseSemver(installedVersion) : null\n\n const filtered = releases.filter((r) => {\n if (r.prerelease)\n return false\n\n const ver = extractVersion(r.tag, hasMonorepoTags ? packageName : undefined)\n if (!ver)\n return false\n\n const sv = parseSemver(ver)\n if (!sv)\n return false\n\n // Monorepo: only include tags for this package\n if (hasMonorepoTags && packageName && !tagMatchesPackage(r.tag, packageName))\n return false\n\n // Filter out releases newer than installed version\n if (installedSv && compareSemver(sv, installedSv) > 0)\n return false\n\n return true\n })\n\n return filtered\n .sort((a, b) => {\n const verA = extractVersion(a.tag, hasMonorepoTags ? packageName : undefined)\n const verB = extractVersion(b.tag, hasMonorepoTags ? packageName : undefined)\n if (!verA || !verB)\n return 0\n return compareSemver(parseSemver(verB)!, parseSemver(verA)!)\n })\n .slice(0, 20)\n}\n\n/**\n * Format a release as markdown with YAML frontmatter\n */\nfunction formatRelease(release: GitHubRelease, packageName?: string): string {\n const date = isoDate(release.publishedAt || release.createdAt)\n const version = extractVersion(release.tag, packageName) || release.tag\n\n const fm = [\n '---',\n `tag: ${release.tag}`,\n `version: ${version}`,\n `published: ${date}`,\n ]\n if (release.name && release.name !== release.tag)\n fm.push(`name: \"${release.name.replace(/\"/g, '\\\\\"')}\"`)\n fm.push('---')\n\n return `${fm.join('\\n')}\\n\\n# ${release.name || release.tag}\\n\\n${release.markdown}`\n}\n\nexport interface ReleaseIndexOptions {\n releases: GitHubRelease[]\n packageName?: string\n blogReleases?: Array<{ version: string, title: string, date: string }>\n hasChangelog?: boolean\n}\n\n/**\n * Generate a unified summary index of all releases for quick LLM scanning.\n * Includes GitHub releases, blog release posts, and CHANGELOG link.\n */\nexport function generateReleaseIndex(releasesOrOpts: GitHubRelease[] | ReleaseIndexOptions, packageName?: string): string {\n // Support both old signature and new options object\n const opts: ReleaseIndexOptions = Array.isArray(releasesOrOpts)\n ? { releases: releasesOrOpts, packageName }\n : releasesOrOpts\n\n const { releases, blogReleases, hasChangelog } = opts\n const pkg = opts.packageName\n\n const total = releases.length + (blogReleases?.length ?? 0)\n const fm = [\n '---',\n `total: ${total}`,\n `latest: ${releases[0]?.tag || 'unknown'}`,\n '---',\n ]\n\n const lines: string[] = [fm.join('\\n'), '', '# Releases Index', '']\n\n // Blog release posts (major version announcements)\n if (blogReleases && blogReleases.length > 0) {\n lines.push('## Blog Releases', '')\n for (const b of blogReleases) {\n lines.push(`- [${b.version}](./blog-${b.version}.md): ${b.title} (${b.date})`)\n }\n lines.push('')\n }\n\n // GitHub release notes\n if (releases.length > 0) {\n if (blogReleases && blogReleases.length > 0)\n lines.push('## Release Notes', '')\n for (const r of releases) {\n const date = isoDate(r.publishedAt || r.createdAt)\n const filename = r.tag.includes('@') || r.tag.startsWith('v') ? r.tag : `v${r.tag}`\n const version = extractVersion(r.tag, pkg) || r.tag\n const sv = parseSemver(version)\n const label = sv?.patch === 0 && sv.minor === 0 ? ' **[MAJOR]**' : sv?.patch === 0 ? ' **[MINOR]**' : ''\n lines.push(`- [${r.tag}](./${filename}.md): ${r.name || r.tag} (${date})${label}`)\n }\n lines.push('')\n }\n\n // CHANGELOG link\n if (hasChangelog) {\n lines.push('## Changelog', '')\n lines.push('- [CHANGELOG.md](./CHANGELOG.md)')\n lines.push('')\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Detect if releases are just short stubs redirecting to CHANGELOG.md.\n * Samples up to 3 releases — if all are short (<500 chars) and mention CHANGELOG, it's a redirect pattern.\n */\nexport function isChangelogRedirectPattern(releases: GitHubRelease[]): boolean {\n const sample = releases.slice(0, 3)\n if (sample.length === 0)\n return false\n return sample.every((r) => {\n const body = (r.markdown || '').trim()\n return body.length < 500 && /changelog\\.md/i.test(body)\n })\n}\n\n/**\n * Fetch CHANGELOG.md from a GitHub repo at a specific ref as fallback\n */\nasync function fetchChangelog(owner: string, repo: string, ref: string): Promise<string | null> {\n for (const filename of ['CHANGELOG.md', 'changelog.md', 'CHANGES.md']) {\n const url = `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${filename}`\n const content = await $fetch(url, { responseType: 'text', signal: AbortSignal.timeout(10_000) }).catch(() => null)\n if (content)\n return content\n }\n return null\n}\n\n/**\n * Fetch release notes for a package. Returns CachedDoc[] with releases/{tag}.md files.\n *\n * Strategy:\n * 1. Fetch GitHub releases, filter to package-specific tags for monorepos\n * 2. If no releases found, try CHANGELOG.md as fallback\n */\nexport async function fetchReleaseNotes(\n owner: string,\n repo: string,\n installedVersion: string,\n gitRef?: string,\n packageName?: string,\n): Promise<CachedDoc[]> {\n const releases = await fetchAllReleases(owner, repo)\n const selected = selectReleases(releases, packageName, installedVersion)\n\n if (selected.length > 0) {\n // Detect changelog-redirect pattern: short stubs that just link to CHANGELOG.md\n // Sample up to 3 releases to check\n if (isChangelogRedirectPattern(selected)) {\n const ref = gitRef || selected[0]!.tag\n const changelog = await fetchChangelog(owner, repo, ref)\n if (changelog)\n return [{ path: 'releases/CHANGELOG.md', content: changelog }]\n }\n\n const docs = selected.map((r) => {\n const filename = r.tag.includes('@') || r.tag.startsWith('v')\n ? r.tag\n : `v${r.tag}`\n return {\n path: `releases/${filename}.md`,\n content: formatRelease(r, packageName),\n }\n })\n\n // Also fetch CHANGELOG.md alongside individual releases (unless redirect pattern)\n const ref = gitRef || selected[0]!.tag\n const changelog = await fetchChangelog(owner, repo, ref)\n if (changelog && changelog.length < 500_000) {\n docs.push({ path: 'releases/CHANGELOG.md', content: changelog })\n }\n\n return docs\n }\n\n // Fallback: CHANGELOG.md (indexed as single file)\n const ref = gitRef || 'main'\n const changelog = await fetchChangelog(owner, repo, ref)\n if (!changelog)\n return []\n\n return [{ path: 'releases/CHANGELOG.md', content: changelog }]\n}\n","/**\n * Blog release notes fetching for packages with curated blog releases\n * Supports version filtering and extensible for multiple packages\n */\n\nimport type { BlogRelease } from './package-registry'\nimport { htmlToMarkdown } from 'mdream'\nimport { getBlogPreset } from './package-registry'\nimport { compareSemver, parseSemver } from './releases'\nimport { $fetch } from './utils'\n\nexport interface BlogReleasePost {\n version: string\n title: string\n date: string\n markdown: string\n url: string\n}\n\ninterface CachedDoc {\n path: string\n content: string\n}\n\n/**\n * Parse version from blog URL\n * Handles: https://blog.vuejs.org/posts/vue-3-5 → 3.5\n */\nfunction parseVersionFromUrl(url: string): string | null {\n const match = url.match(/\\/posts\\/\\w+-(\\d+)-(\\d+)/)\n if (match)\n return `${match[1]}.${match[2]}`\n return null\n}\n\n/**\n * Format a blog release as markdown with YAML frontmatter\n */\nfunction formatBlogRelease(release: BlogReleasePost): string {\n const fm = [\n '---',\n `version: ${release.version}`,\n `title: \"${release.title.replace(/\"/g, '\\\\\"')}\"`,\n `date: ${release.date}`,\n `url: ${release.url}`,\n `source: blog-release`,\n '---',\n ]\n\n return `${fm.join('\\n')}\\n\\n# ${release.title}\\n\\n${release.markdown}`\n}\n\n/**\n * Fetch and parse a single blog post\n */\nasync function fetchBlogPost(url: string): Promise<BlogReleasePost | null> {\n try {\n const html = await $fetch(url, { responseType: 'text', signal: AbortSignal.timeout(10_000) }).catch(() => null)\n if (!html)\n return null\n\n // Extract version from URL\n const version = parseVersionFromUrl(url)\n if (!version)\n return null\n\n // Extract title from <h1> or <title>\n let title = ''\n const titleMatch = html.match(/<h1[^>]*>([^<]+)<\\/h1>/)\n if (titleMatch)\n title = titleMatch[1]!.trim()\n\n // If no h1, try meta title\n if (!title) {\n const metaTitleMatch = html.match(/<title>([^<]+)<\\/title>/)\n if (metaTitleMatch)\n title = metaTitleMatch[1]!.trim()\n }\n\n // Extract date from article metadata or ISO date pattern\n let date = new Date().toISOString().split('T')[0]!\n const dateMatch = html.match(/(?:published|date|posted)[\"']?\\s*:\\s*[\"']?(\\d{4}-\\d{2}-\\d{2})/)\n if (dateMatch)\n date = dateMatch[1]!\n\n // Convert HTML to markdown using mdream\n const markdown = htmlToMarkdown(html)\n if (!markdown)\n return null\n\n return {\n version,\n title: title || `Release ${version}`,\n date,\n markdown,\n url,\n }\n }\n catch {\n return null\n }\n}\n\n/**\n * Filter blog releases by installed version\n * Only includes releases where version <= installedVersion\n * Returns all releases if version parsing fails (fail-safe)\n */\nfunction filterBlogsByVersion(entries: BlogRelease[], installedVersion: string): BlogRelease[] {\n const installedSv = parseSemver(installedVersion)\n if (!installedSv)\n return entries // Fail-safe: include all if version parsing fails\n\n return entries.filter((entry) => {\n const entrySv = parseSemver(entry.version)\n if (!entrySv)\n return false\n // Include only releases where version <= installed version\n return compareSemver(entrySv, installedSv) <= 0\n })\n}\n\n/**\n * Fetch blog release notes from package presets\n * Filters to only releases matching or older than the installed version\n * Returns CachedDoc[] with releases/blog-{version}.md files\n */\nexport async function fetchBlogReleases(\n packageName: string,\n installedVersion: string,\n): Promise<CachedDoc[]> {\n const preset = getBlogPreset(packageName)\n if (!preset)\n return []\n\n const filteredReleases = filterBlogsByVersion(preset.releases, installedVersion)\n if (filteredReleases.length === 0)\n return []\n\n const releases: BlogReleasePost[] = []\n\n // Fetch all blog posts in parallel with 3 concurrent requests\n const batchSize = 3\n for (let i = 0; i < filteredReleases.length; i += batchSize) {\n const batch = filteredReleases.slice(i, i + batchSize)\n const results = await Promise.all(batch.map(entry => fetchBlogPost(entry.url)))\n for (const result of results) {\n if (result)\n releases.push(result)\n }\n }\n\n if (releases.length === 0)\n return []\n\n // Sort by version descending (newest first)\n releases.sort((a, b) => {\n const aVer = a.version.split('.').map(Number)\n const bVer = b.version.split('.').map(Number)\n for (let i = 0; i < Math.max(aVer.length, bVer.length); i++) {\n const diff = (bVer[i] ?? 0) - (aVer[i] ?? 0)\n if (diff !== 0)\n return diff\n }\n return 0\n })\n\n // Format as cached docs — stored in releases/ alongside regular releases\n return releases.map(r => ({\n path: `releases/blog-${r.version}.md`,\n content: formatBlogRelease(r),\n }))\n}\n","/**\n * GitHub discussions fetching via gh CLI GraphQL\n * Prioritizes Q&A and Help categories, includes accepted answers\n */\n\nimport { spawnSync } from 'node:child_process'\nimport { BOT_USERS, buildFrontmatter, isoDate } from './github-common'\nimport { isGhAvailable } from './issues'\n\n/** Categories most useful for skill generation (in priority order) */\nconst HIGH_VALUE_CATEGORIES = new Set([\n 'q&a',\n 'help',\n 'troubleshooting',\n 'support',\n])\n\nconst LOW_VALUE_CATEGORIES = new Set([\n 'show and tell',\n 'ideas',\n 'polls',\n])\n\nexport interface DiscussionComment {\n body: string\n author: string\n}\n\nexport interface GitHubDiscussion {\n number: number\n title: string\n body: string\n category: string\n createdAt: string\n url: string\n upvoteCount: number\n comments: number\n answer?: string\n topComments: DiscussionComment[]\n}\n\n/**\n * Fetch discussions from a GitHub repo using gh CLI GraphQL.\n * Prioritizes Q&A and Help categories. Includes accepted answer body for answered discussions.\n */\nexport async function fetchGitHubDiscussions(\n owner: string,\n repo: string,\n limit = 20,\n releasedAt?: string,\n): Promise<GitHubDiscussion[]> {\n if (!isGhAvailable())\n return []\n\n // GraphQL discussions endpoint doesn't support date filtering,\n // so we fetch latest N and filter client-side. Skip entirely\n // if the cutoff is in the past — results would be empty anyway.\n if (releasedAt) {\n const cutoff = new Date(releasedAt)\n cutoff.setMonth(cutoff.getMonth() + 6)\n if (cutoff < new Date())\n return []\n }\n\n try {\n // Fetch more to compensate for filtering\n const fetchCount = Math.min(limit * 3, 80)\n const query = `query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { discussions(first: ${fetchCount}, orderBy: {field: CREATED_AT, direction: DESC}) { nodes { number title body category { name } createdAt url upvoteCount comments(first: 3) { totalCount nodes { body author { login } } } answer { body } author { login } } } } }`\n\n const { stdout: result } = spawnSync('gh', ['api', 'graphql', '-f', `query=${query}`, '-f', `owner=${owner}`, '-f', `repo=${repo}`], {\n encoding: 'utf-8',\n maxBuffer: 10 * 1024 * 1024,\n })\n if (!result)\n return []\n\n const data = JSON.parse(result)\n const nodes = data?.data?.repository?.discussions?.nodes\n if (!Array.isArray(nodes))\n return []\n\n const discussions = nodes\n .filter((d: any) => d.author && !BOT_USERS.has(d.author.login))\n .filter((d: any) => {\n const cat = (d.category?.name || '').toLowerCase()\n return !LOW_VALUE_CATEGORIES.has(cat)\n })\n .map((d: any) => ({\n number: d.number,\n title: d.title,\n body: d.body || '',\n category: d.category?.name || '',\n createdAt: d.createdAt,\n url: d.url,\n upvoteCount: d.upvoteCount || 0,\n comments: d.comments?.totalCount || 0,\n answer: d.answer?.body || undefined,\n topComments: (d.comments?.nodes || [])\n .filter((c: any) => c.author && !BOT_USERS.has(c.author.login))\n .map((c: any) => ({ body: c.body || '', author: c.author.login })),\n }))\n // Prioritize high-value categories, then sort by engagement\n .sort((a: GitHubDiscussion, b: GitHubDiscussion) => {\n const aHigh = HIGH_VALUE_CATEGORIES.has(a.category.toLowerCase()) ? 1 : 0\n const bHigh = HIGH_VALUE_CATEGORIES.has(b.category.toLowerCase()) ? 1 : 0\n if (aHigh !== bHigh)\n return bHigh - aHigh\n return (b.upvoteCount + b.comments) - (a.upvoteCount + a.comments)\n })\n .slice(0, limit)\n\n return discussions\n }\n catch {\n return []\n }\n}\n\n/**\n * Format a single discussion as markdown with YAML frontmatter\n */\nexport function formatDiscussionAsMarkdown(d: GitHubDiscussion): string {\n const fm = buildFrontmatter({\n number: d.number,\n title: d.title,\n category: d.category,\n created: isoDate(d.createdAt),\n url: d.url,\n upvotes: d.upvoteCount,\n comments: d.comments,\n answered: !!d.answer,\n })\n\n const bodyLimit = d.upvoteCount >= 5 ? 1500 : 800\n const lines = [fm, '', `# ${d.title}`]\n\n if (d.body) {\n const body = d.body.length > bodyLimit\n ? `${d.body.slice(0, bodyLimit)}...`\n : d.body\n lines.push('', body)\n }\n\n if (d.answer) {\n const answerLimit = 1000\n const answer = d.answer.length > answerLimit\n ? `${d.answer.slice(0, answerLimit)}...`\n : d.answer\n lines.push('', '---', '', '## Accepted Answer', '', answer)\n }\n else if (d.topComments.length > 0) {\n // No accepted answer — include top comments as context\n lines.push('', '---', '', '## Top Comments')\n for (const c of d.topComments) {\n const commentBody = c.body.length > 600\n ? `${c.body.slice(0, 600)}...`\n : c.body\n lines.push('', `**@${c.author}:**`, '', commentBody)\n }\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Generate a summary index of all discussions for quick LLM scanning.\n * Groups by category so the LLM can quickly find Q&A vs general discussions.\n */\nexport function generateDiscussionIndex(discussions: GitHubDiscussion[]): string {\n const byCategory = new Map<string, GitHubDiscussion[]>()\n for (const d of discussions) {\n const cat = d.category || 'Uncategorized'\n const list = byCategory.get(cat) || []\n list.push(d)\n byCategory.set(cat, list)\n }\n\n const answered = discussions.filter(d => d.answer).length\n\n const fm = [\n '---',\n `total: ${discussions.length}`,\n `answered: ${answered}`,\n '---',\n ]\n\n const sections: string[] = [fm.join('\\n'), '', '# Discussions Index', '']\n\n // Sort categories: high-value first\n const cats = [...byCategory.keys()].sort((a, b) => {\n const aHigh = HIGH_VALUE_CATEGORIES.has(a.toLowerCase()) ? 0 : 1\n const bHigh = HIGH_VALUE_CATEGORIES.has(b.toLowerCase()) ? 0 : 1\n return aHigh - bHigh || a.localeCompare(b)\n })\n\n for (const cat of cats) {\n const group = byCategory.get(cat)!\n sections.push(`## ${cat} (${group.length})`, '')\n for (const d of group) {\n const upvotes = d.upvoteCount > 0 ? ` (+${d.upvoteCount})` : ''\n const answered = d.answer ? ' [answered]' : ''\n sections.push(`- [#${d.number}](./discussion-${d.number}.md): ${d.title}${upvotes}${answered}`)\n }\n sections.push('')\n }\n\n return sections.join('\\n')\n}\n","/**\n * Globs .d.ts type definition files from a package for search indexing.\n * Only types — source code is too verbose.\n */\nimport { existsSync, readFileSync } from 'node:fs'\nimport { globby } from 'globby'\nimport { join } from 'pathe'\n\nexport interface EntryFile {\n path: string\n content: string\n type: 'types' | 'source'\n}\n\nconst SKIP_DIRS = [\n 'node_modules',\n '_vendor',\n '__tests__',\n '__mocks__',\n '__fixtures__',\n 'test',\n 'tests',\n 'fixture',\n 'fixtures',\n 'locales',\n 'locale',\n 'i18n',\n '.git',\n]\n\nconst SKIP_PATTERNS = [\n '*.min.*',\n '*.prod.*',\n '*.global.*',\n '*.browser.*',\n '*.map',\n '*.map.js',\n 'CHANGELOG*',\n 'LICENSE*',\n 'README*',\n]\n\nconst MAX_FILE_SIZE = 500 * 1024 // 500KB per file\n\n/**\n * Glob .d.ts type definition files from a package directory, skipping junk.\n */\nexport async function resolveEntryFiles(packageDir: string): Promise<EntryFile[]> {\n if (!existsSync(join(packageDir, 'package.json')))\n return []\n\n const ignore = [\n ...SKIP_DIRS.map(d => `**/${d}/**`),\n ...SKIP_PATTERNS,\n ]\n\n const files = await globby(['**/*.d.{ts,mts,cts}'], {\n cwd: packageDir,\n ignore,\n absolute: false,\n })\n\n const entries: EntryFile[] = []\n\n for (const file of files) {\n const absPath = join(packageDir, file)\n let content: string\n try {\n content = readFileSync(absPath, 'utf-8')\n }\n catch {\n continue\n }\n\n if (content.length > MAX_FILE_SIZE)\n continue\n\n entries.push({ path: file, content, type: 'types' })\n }\n\n return entries\n}\n","/**\n * Git repo skill source — parse inputs + fetch pre-authored skills from repos\n *\n * Supports GitHub shorthand (owner/repo), full URLs, SSH, GitLab, and local paths.\n * Skills are pre-authored SKILL.md files — no doc resolution or LLM generation needed.\n */\n\nimport { existsSync, readdirSync, readFileSync } from 'node:fs'\nimport pLimit from 'p-limit'\nimport { resolve } from 'pathe'\nimport { yamlParseKV } from '../core/yaml'\nimport { $fetch, normalizeRepoUrl, parseGitHubUrl } from './utils'\n\nexport interface GitSkillSource {\n type: 'github' | 'gitlab' | 'git-ssh' | 'local'\n owner?: string\n repo?: string\n /** Direct path to a specific skill (from /tree/ref/path URLs) */\n skillPath?: string\n /** Branch/tag parsed from URL */\n ref?: string\n /** Absolute path for local sources */\n localPath?: string\n}\n\nexport interface RemoteSkill {\n /** From SKILL.md frontmatter `name` field, or directory name */\n name: string\n /** From SKILL.md frontmatter `description` field */\n description: string\n /** Path within repo (e.g., \"skills/web-design-guidelines\") */\n path: string\n /** Full SKILL.md content */\n content: string\n /** Supporting files (scripts/, references/, assets/) */\n files: Array<{ path: string, content: string }>\n}\n\n/**\n * Detect whether an input string is a git skill source.\n * Returns null for npm package names (including scoped @scope/pkg).\n */\nexport function parseGitSkillInput(input: string): GitSkillSource | null {\n const trimmed = input.trim()\n\n // Scoped npm packages → not git\n if (trimmed.startsWith('@'))\n return null\n\n // Local paths\n if (trimmed.startsWith('./') || trimmed.startsWith('../') || trimmed.startsWith('/') || trimmed.startsWith('~')) {\n const localPath = trimmed.startsWith('~')\n ? resolve(process.env.HOME || '', trimmed.slice(1))\n : resolve(trimmed)\n return { type: 'local', localPath }\n }\n\n // SSH format: git@github.com:owner/repo\n if (trimmed.startsWith('git@')) {\n const normalized = normalizeRepoUrl(trimmed)\n const gh = parseGitHubUrl(normalized)\n if (gh)\n return { type: 'github', owner: gh.owner, repo: gh.repo }\n return null\n }\n\n // Full URLs\n if (trimmed.startsWith('https://') || trimmed.startsWith('http://')) {\n return parseGitUrl(trimmed)\n }\n\n // GitHub shorthand: owner/repo (exactly one slash, no spaces, no commas)\n if (/^[\\w.-]+\\/[\\w.-]+$/.test(trimmed)) {\n return { type: 'github', owner: trimmed.split('/')[0], repo: trimmed.split('/')[1] }\n }\n\n // Everything else → npm\n return null\n}\n\nfunction parseGitUrl(url: string): GitSkillSource | null {\n try {\n const parsed = new URL(url)\n\n if (parsed.hostname === 'github.com' || parsed.hostname === 'www.github.com') {\n const parts = parsed.pathname.replace(/^\\//, '').replace(/\\.git$/, '').split('/')\n const owner = parts[0]\n const repo = parts[1]\n if (!owner || !repo)\n return null\n\n // Handle /tree/ref/path URLs → extract specific skill path\n if (parts[2] === 'tree' && parts.length >= 4) {\n const ref = parts[3]\n const skillPath = parts.length > 4 ? parts.slice(4).join('/') : undefined\n return { type: 'github', owner, repo, ref, skillPath }\n }\n\n return { type: 'github', owner, repo }\n }\n\n if (parsed.hostname === 'gitlab.com') {\n const parts = parsed.pathname.replace(/^\\//, '').replace(/\\.git$/, '').split('/')\n const owner = parts[0]\n const repo = parts[1]\n if (!owner || !repo)\n return null\n return { type: 'gitlab', owner, repo }\n }\n\n return null\n }\n catch {\n return null\n }\n}\n\n/**\n * Parse name and description from SKILL.md frontmatter.\n */\nexport function parseSkillFrontmatterName(content: string): { name?: string, description?: string } {\n const match = content.match(/^---\\n([\\s\\S]*?)\\n---/)\n if (!match)\n return {}\n\n const result: { name?: string, description?: string } = {}\n for (const line of match[1].split('\\n')) {\n const kv = yamlParseKV(line)\n if (!kv)\n continue\n if (kv[0] === 'name')\n result.name = kv[1]\n if (kv[0] === 'description')\n result.description = kv[1]\n }\n return result\n}\n\ninterface UnghFilesResponse {\n meta: { sha: string }\n files: Array<{ path: string, mode: string, sha: string, size: number }>\n}\n\n/** Supporting file dirs within a skill directory */\nconst SUPPORTING_DIRS = ['scripts', 'references', 'assets']\n\n/**\n * Fetch skills from a git source. Returns list of discovered skills + commit SHA.\n */\nexport async function fetchGitSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n if (source.type === 'local')\n return fetchLocalSkills(source)\n if (source.type === 'github')\n return fetchGitHubSkills(source, onProgress)\n if (source.type === 'gitlab')\n return fetchGitLabSkills(source, onProgress)\n return { skills: [] }\n}\n\n// ── Local ──\n\nfunction fetchLocalSkills(source: GitSkillSource): { skills: RemoteSkill[] } {\n const base = source.localPath!\n if (!existsSync(base))\n return { skills: [] }\n\n const skills: RemoteSkill[] = []\n\n // Check for skills/ subdirectory\n const skillsDir = resolve(base, 'skills')\n if (existsSync(skillsDir)) {\n for (const entry of readdirSync(skillsDir, { withFileTypes: true })) {\n if (!entry.isDirectory())\n continue\n const skill = readLocalSkill(resolve(skillsDir, entry.name), `skills/${entry.name}`)\n if (skill)\n skills.push(skill)\n }\n }\n\n // Check for root SKILL.md\n if (skills.length === 0) {\n const skill = readLocalSkill(base, '')\n if (skill)\n skills.push(skill)\n }\n\n return { skills }\n}\n\nfunction readLocalSkill(dir: string, repoPath: string): RemoteSkill | null {\n const skillMdPath = resolve(dir, 'SKILL.md')\n if (!existsSync(skillMdPath))\n return null\n\n const content = readFileSync(skillMdPath, 'utf-8')\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = dir.split('/').pop()!\n const name = frontmatter.name || dirName\n\n const files: Array<{ path: string, content: string }> = []\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPath = resolve(dir, subdir)\n if (!existsSync(subdirPath))\n continue\n for (const file of readdirSync(subdirPath, { withFileTypes: true })) {\n if (!file.isFile())\n continue\n files.push({\n path: `${subdir}/${file.name}`,\n content: readFileSync(resolve(subdirPath, file.name), 'utf-8'),\n })\n }\n }\n\n return {\n name,\n description: frontmatter.description || '',\n path: repoPath,\n content,\n files,\n }\n}\n\n// ── GitHub ──\n\nasync function fetchGitHubSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const { owner, repo } = source\n if (!owner || !repo)\n return { skills: [] }\n\n const ref = source.ref || 'main'\n onProgress?.(`Listing files at ${owner}/${repo}@${ref}`)\n\n const data = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/${ref}`,\n ).catch(() => null)\n\n if (!data?.files?.length) {\n // Try 'master' fallback if default ref failed\n if (ref === 'main') {\n const fallback = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/master`,\n ).catch(() => null)\n if (fallback?.files?.length)\n return extractGitHubSkills(owner!, repo!, 'master', fallback, source.skillPath, onProgress)\n }\n return { skills: [] }\n }\n\n return extractGitHubSkills(owner!, repo!, ref, data, source.skillPath, onProgress)\n}\n\nasync function extractGitHubSkills(\n owner: string,\n repo: string,\n ref: string,\n data: UnghFilesResponse,\n skillPath?: string,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const allFiles = data.files.map(f => f.path)\n const commitSha = data.meta?.sha\n\n // Find SKILL.md files\n let skillMdPaths: string[]\n\n if (skillPath) {\n // Direct skill path: look for SKILL.md at that path\n const candidates = [\n `${skillPath}/SKILL.md`,\n // In case they linked directly to the SKILL.md\n skillPath.endsWith('/SKILL.md') ? skillPath : null,\n ].filter(Boolean) as string[]\n\n skillMdPaths = allFiles.filter(f => candidates.includes(f))\n }\n else {\n // Discover: skills/*/SKILL.md or root SKILL.md\n skillMdPaths = allFiles.filter(f =>\n f.match(/^skills\\/[^/]+\\/SKILL\\.md$/) || f === 'SKILL.md',\n )\n }\n\n if (skillMdPaths.length === 0)\n return { skills: [], commitSha }\n\n const limit = pLimit(5)\n const skills: RemoteSkill[] = []\n\n onProgress?.(`Found ${skillMdPaths.length} skill(s), downloading...`)\n\n await Promise.all(skillMdPaths.map(mdPath => limit(async () => {\n const skillDir = mdPath === 'SKILL.md' ? '' : mdPath.replace(/\\/SKILL\\.md$/, '')\n const content = await fetchRawGitHub(owner, repo, ref, mdPath)\n if (!content)\n return\n\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = skillDir ? skillDir.split('/').pop()! : repo\n const name = frontmatter.name || dirName\n\n // Fetch supporting files\n const supportingFiles: Array<{ path: string, content: string }> = []\n const prefix = skillDir ? `${skillDir}/` : ''\n\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPrefix = `${prefix}${subdir}/`\n const matching = allFiles.filter(f => f.startsWith(subdirPrefix))\n for (const filePath of matching) {\n const fileContent = await fetchRawGitHub(owner, repo, ref, filePath)\n if (fileContent) {\n const relativePath = filePath.slice(prefix.length)\n supportingFiles.push({ path: relativePath, content: fileContent })\n }\n }\n }\n\n skills.push({\n name,\n description: frontmatter.description || '',\n path: skillDir,\n content,\n files: supportingFiles,\n })\n })))\n\n return { skills, commitSha }\n}\n\nasync function fetchRawGitHub(owner: string, repo: string, ref: string, path: string): Promise<string | null> {\n return $fetch(\n `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${path}`,\n { responseType: 'text' },\n ).catch(() => null)\n}\n\n// ── GitLab ──\n\ninterface GitLabTreeEntry {\n id: string\n name: string\n type: string\n path: string\n mode: string\n}\n\nasync function fetchGitLabSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const { owner, repo } = source\n if (!owner || !repo)\n return { skills: [] }\n\n const ref = source.ref || 'main'\n const projectId = encodeURIComponent(`${owner}/${repo}`)\n\n onProgress?.(`Listing files at ${owner}/${repo}@${ref}`)\n\n const tree = await $fetch<GitLabTreeEntry[]>(\n `https://gitlab.com/api/v4/projects/${projectId}/repository/tree?ref=${ref}&recursive=true&per_page=100`,\n ).catch(() => null)\n\n if (!tree?.length)\n return { skills: [] }\n\n const allFiles = tree.filter(e => e.type === 'blob').map(e => e.path)\n\n // Find SKILL.md files\n const skillMdPaths = source.skillPath\n ? allFiles.filter(f => f === `${source.skillPath}/SKILL.md`)\n : allFiles.filter(f => f.match(/^skills\\/[^/]+\\/SKILL\\.md$/) || f === 'SKILL.md')\n\n if (skillMdPaths.length === 0)\n return { skills: [] }\n\n const limit = pLimit(5)\n const skills: RemoteSkill[] = []\n\n onProgress?.(`Found ${skillMdPaths.length} skill(s), downloading...`)\n\n await Promise.all(skillMdPaths.map(mdPath => limit(async () => {\n const skillDir = mdPath === 'SKILL.md' ? '' : mdPath.replace(/\\/SKILL\\.md$/, '')\n const content = await fetchRawGitLab(owner!, repo!, ref, mdPath)\n if (!content)\n return\n\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = skillDir ? skillDir.split('/').pop()! : repo!\n const name = frontmatter.name || dirName\n\n // Fetch supporting files\n const supportingFiles: Array<{ path: string, content: string }> = []\n const prefix = skillDir ? `${skillDir}/` : ''\n\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPrefix = `${prefix}${subdir}/`\n const matching = allFiles.filter(f => f.startsWith(subdirPrefix))\n for (const filePath of matching) {\n const fileContent = await fetchRawGitLab(owner!, repo!, ref, filePath)\n if (fileContent) {\n const relativePath = filePath.slice(prefix.length)\n supportingFiles.push({ path: relativePath, content: fileContent })\n }\n }\n }\n\n skills.push({\n name,\n description: frontmatter.description || '',\n path: skillDir,\n content,\n files: supportingFiles,\n })\n })))\n\n return { skills }\n}\n\nasync function fetchRawGitLab(owner: string, repo: string, ref: string, path: string): Promise<string | null> {\n return $fetch(\n `https://gitlab.com/${owner}/${repo}/-/raw/${ref}/${path}`,\n { responseType: 'text' },\n ).catch(() => null)\n}\n","/**\n * GitHub/ungh README resolution + versioned docs\n */\n\nimport type { LlmsLink } from './types'\nimport { spawnSync } from 'node:child_process'\nimport { existsSync as fsExistsSync, readFileSync as fsReadFileSync } from 'node:fs'\nimport { fileURLToPath } from 'node:url'\nimport { isGhAvailable } from './issues'\nimport { getDocOverride } from './package-registry'\nimport { $fetch, extractBranchHint, fetchText, parseGitHubUrl } from './utils'\n\n/** Minimum git-doc file count to prefer over llms.txt */\nexport const MIN_GIT_DOCS = 5\n\n/** True when git-docs exist but are too few to be useful (< MIN_GIT_DOCS) */\nexport const isShallowGitDocs = (n: number) => n > 0 && n < MIN_GIT_DOCS\n\nexport interface GitDocsResult {\n /** URL pattern for fetching docs (use with ref) */\n baseUrl: string\n /** Git ref (tag) used */\n ref: string\n /** List of doc file paths relative to repo root */\n files: string[]\n /** Prefix to strip when normalizing paths to docs/ (e.g. 'apps/evalite-docs/src/content/') for nested monorepo docs */\n docsPrefix?: string\n /** Full repo file tree — only set when discoverDocFiles() heuristic was used (not standard docs/ prefix) */\n allFiles?: string[]\n /** True when ref is a branch (main/master) rather than a version-specific tag */\n fallback?: boolean\n}\n\ninterface UnghFilesResponse {\n meta: { sha: string }\n files: Array<{ path: string, mode: string, sha: string, size: number }>\n}\n\n/**\n * List files at a git ref using ungh (no rate limits)\n */\nasync function listFilesAtRef(owner: string, repo: string, ref: string): Promise<string[]> {\n const data = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/${ref}`,\n ).catch(() => null)\n return data?.files?.map(f => f.path) ?? []\n}\n\ninterface TagResult {\n ref: string\n files: string[]\n /** True when ref is a branch fallback (main/master) rather than a version tag */\n fallback?: boolean\n}\n\n/**\n * Find git tag for a version by checking if ungh can list files at that ref.\n * Tries v{version}, {version}, and optionally {packageName}@{version} (changeset convention).\n */\nasync function findGitTag(owner: string, repo: string, version: string, packageName?: string, branchHint?: string): Promise<TagResult | null> {\n const candidates = [`v${version}`, version]\n if (packageName)\n candidates.push(`${packageName}@${version}`)\n\n for (const tag of candidates) {\n const files = await listFilesAtRef(owner, repo, tag)\n if (files.length > 0)\n return { ref: tag, files }\n }\n\n // Fallback: find latest release tag matching {packageName}@* (version mismatch in monorepos)\n if (packageName) {\n const latestTag = await findLatestReleaseTag(owner, repo, packageName)\n if (latestTag) {\n const files = await listFilesAtRef(owner, repo, latestTag)\n if (files.length > 0)\n return { ref: latestTag, files }\n }\n }\n\n // Last resort: try default branch (prefer hint from repo URL fragment)\n const branches = branchHint\n ? [branchHint, ...['main', 'master'].filter(b => b !== branchHint)]\n : ['main', 'master']\n for (const branch of branches) {\n const files = await listFilesAtRef(owner, repo, branch)\n if (files.length > 0)\n return { ref: branch, files, fallback: true }\n }\n\n return null\n}\n\n/**\n * Find the latest release tag matching `{packageName}@*` via ungh releases API.\n * Handles monorepos where npm version doesn't match git tag version.\n */\nasync function findLatestReleaseTag(owner: string, repo: string, packageName: string): Promise<string | null> {\n const data = await $fetch<{ releases?: Array<{ tag: string }> }>(\n `https://ungh.cc/repos/${owner}/${repo}/releases`,\n ).catch(() => null)\n const prefix = `${packageName}@`\n return data?.releases?.find(r => r.tag.startsWith(prefix))?.tag ?? null\n}\n\n/**\n * Filter file paths by prefix and md/mdx extension\n */\nfunction filterDocFiles(files: string[], pathPrefix: string): string[] {\n return files.filter(f => f.startsWith(pathPrefix) && /\\.(?:md|mdx)$/.test(f))\n}\n\n/** Known noise paths to exclude from doc discovery */\nconst NOISE_PATTERNS = [\n /^\\.changeset\\//,\n /CHANGELOG\\.md$/i,\n /CONTRIBUTING\\.md$/i,\n /^\\.github\\//,\n]\n\n/** Directories to exclude from \"best directory\" heuristic */\nconst EXCLUDE_DIRS = new Set([\n 'test',\n 'tests',\n '__tests__',\n 'fixtures',\n 'fixture',\n 'examples',\n 'example',\n 'node_modules',\n '.git',\n 'dist',\n 'build',\n 'coverage',\n 'e2e',\n 'spec',\n 'mocks',\n '__mocks__',\n])\n\n/** Directory names that suggest documentation */\nconst DOC_DIR_BONUS = new Set([\n 'docs',\n 'documentation',\n 'pages',\n 'content',\n 'website',\n 'guide',\n 'guides',\n 'wiki',\n 'manual',\n 'api',\n])\n\ninterface DiscoveredDocs {\n files: string[]\n /** Prefix before 'docs/' to strip when normalizing (e.g. 'apps/evalite-docs/src/content/') */\n prefix: string\n}\n\n/**\n * Check if a path contains any excluded directory\n */\nfunction hasExcludedDir(path: string): boolean {\n const parts = path.split('/')\n return parts.some(p => EXCLUDE_DIRS.has(p.toLowerCase()))\n}\n\n/**\n * Get the depth of a path (number of directory levels)\n */\nfunction getPathDepth(path: string): number {\n return path.split('/').filter(Boolean).length\n}\n\n/**\n * Check if path contains a doc-related directory name\n */\nfunction hasDocDirBonus(path: string): boolean {\n const parts = path.split('/')\n return parts.some(p => DOC_DIR_BONUS.has(p.toLowerCase()))\n}\n\n/**\n * Score a directory for doc likelihood.\n * Higher = better. Formula: count * nameBonus / depth\n */\nfunction scoreDocDir(dir: string, fileCount: number): number {\n const depth = getPathDepth(dir) || 1\n const nameBonus = hasDocDirBonus(dir) ? 1.5 : 1\n return (fileCount * nameBonus) / depth\n}\n\n/**\n * Discover doc files in non-standard locations.\n * First tries to find clusters of md/mdx files in paths containing /docs/.\n * Falls back to finding the directory with the most markdown files (≥5).\n */\nfunction discoverDocFiles(allFiles: string[]): DiscoveredDocs | null {\n const mdFiles = allFiles\n .filter(f => /\\.(?:md|mdx)$/.test(f))\n .filter(f => !NOISE_PATTERNS.some(p => p.test(f)))\n .filter(f => f.includes('/'))\n\n // Strategy 1: Look for /docs/ clusters (existing behavior)\n const docsGroups = new Map<string, string[]>()\n\n for (const file of mdFiles) {\n const docsIdx = file.lastIndexOf('/docs/')\n if (docsIdx === -1)\n continue\n\n const prefix = file.slice(0, docsIdx + '/docs/'.length)\n const group = docsGroups.get(prefix) || []\n group.push(file)\n docsGroups.set(prefix, group)\n }\n\n if (docsGroups.size > 0) {\n const largest = [...docsGroups.entries()].sort((a, b) => b[1].length - a[1].length)[0]!\n if (largest[1].length >= 3) {\n const fullPrefix = largest[0]\n const docsIdx = fullPrefix.lastIndexOf('docs/')\n const stripPrefix = docsIdx > 0 ? fullPrefix.slice(0, docsIdx) : ''\n return { files: largest[1], prefix: stripPrefix }\n }\n }\n\n // Strategy 2: Find best directory by file count (for non-standard structures)\n const dirGroups = new Map<string, string[]>()\n\n for (const file of mdFiles) {\n if (hasExcludedDir(file))\n continue\n\n // Group by immediate parent directory\n const lastSlash = file.lastIndexOf('/')\n if (lastSlash === -1)\n continue\n\n const dir = file.slice(0, lastSlash + 1)\n const group = dirGroups.get(dir) || []\n group.push(file)\n dirGroups.set(dir, group)\n }\n\n if (dirGroups.size === 0)\n return null\n\n // Score and sort directories\n const scored = [...dirGroups.entries()]\n .map(([dir, files]) => ({ dir, files, score: scoreDocDir(dir, files.length) }))\n .filter(d => d.files.length >= 5) // Minimum threshold\n .sort((a, b) => b.score - a.score)\n\n if (scored.length === 0)\n return null\n\n const best = scored[0]!\n\n // For non-docs paths, the prefix is everything up to (but not including) the final dir\n // e.g. 'website/pages/' -> prefix is 'website/' so files normalize to 'pages/...'\n // But actually we want the full prefix so downstream can strip it\n return { files: best.files, prefix: best.dir }\n}\n\n/**\n * List markdown files in a folder at a specific git ref\n */\nasync function listDocsAtRef(owner: string, repo: string, ref: string, pathPrefix = 'docs/'): Promise<string[]> {\n const files = await listFilesAtRef(owner, repo, ref)\n return filterDocFiles(files, pathPrefix)\n}\n\n/**\n * Fetch versioned docs from GitHub repo's docs/ folder.\n * Pass packageName to check doc overrides (e.g. vue -> vuejs/docs).\n */\nexport async function fetchGitDocs(owner: string, repo: string, version: string, packageName?: string, repoUrl?: string): Promise<GitDocsResult | null> {\n const override = packageName ? getDocOverride(packageName) : undefined\n if (override) {\n const ref = override.ref || 'main'\n const fallback = !override.ref\n const files = await listDocsAtRef(override.owner, override.repo, ref, `${override.path}/`)\n if (files.length === 0)\n return null\n return {\n baseUrl: `https://raw.githubusercontent.com/${override.owner}/${override.repo}/${ref}`,\n ref,\n files,\n fallback,\n }\n }\n\n const branchHint = repoUrl ? extractBranchHint(repoUrl) : undefined\n const tag = await findGitTag(owner, repo, version, packageName, branchHint)\n if (!tag)\n return null\n\n let docs = filterDocFiles(tag.files, 'docs/')\n let docsPrefix: string | undefined\n let allFiles: string[] | undefined\n\n // Fallback: discover docs in nested paths (monorepos, content collections)\n if (docs.length === 0) {\n const discovered = discoverDocFiles(tag.files)\n if (discovered) {\n docs = discovered.files\n docsPrefix = discovered.prefix || undefined\n allFiles = tag.files\n }\n }\n\n if (docs.length === 0)\n return null\n\n return {\n baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,\n ref: tag.ref,\n files: docs,\n docsPrefix,\n allFiles,\n fallback: tag.fallback,\n }\n}\n\n/**\n * Strip file extension (.md, .mdx) and leading slash from a path\n */\nfunction normalizePath(p: string): string {\n return p.replace(/^\\//, '').replace(/\\.(?:md|mdx)$/, '')\n}\n\n/**\n * Validate that discovered git docs are relevant by cross-referencing llms.txt links\n * against the repo file tree. Uses extensionless suffix matching to handle monorepo nesting.\n *\n * Returns { isValid, matchRatio } where isValid = matchRatio >= 0.3\n */\nexport function validateGitDocsWithLlms(\n llmsLinks: LlmsLink[],\n repoFiles: string[],\n): { isValid: boolean, matchRatio: number } {\n if (llmsLinks.length === 0)\n return { isValid: true, matchRatio: 1 }\n\n // Sample up to 10 links\n const sample = llmsLinks.slice(0, 10)\n\n // Normalize llms link paths\n const normalizedLinks = sample.map((link) => {\n let path = link.url\n // Strip absolute URL to pathname\n if (path.startsWith('http')) {\n try {\n path = new URL(path).pathname\n }\n catch { /* keep as-is */ }\n }\n return normalizePath(path)\n })\n\n // Pre-process repo files: strip extensions to get extensionless paths\n const repoNormalized = new Set(repoFiles.map(normalizePath))\n\n let matches = 0\n for (const linkPath of normalizedLinks) {\n // Check if any repo file ends with this path (suffix matching for monorepo nesting)\n for (const repoPath of repoNormalized) {\n if (repoPath === linkPath || repoPath.endsWith(`/${linkPath}`)) {\n matches++\n break\n }\n }\n }\n\n const matchRatio = matches / sample.length\n return { isValid: matchRatio >= 0.3, matchRatio }\n}\n\n/**\n * Verify a GitHub repo is the source for an npm package by checking package.json name field.\n * Checks root first, then common monorepo paths (packages/{shortName}, packages/{name}).\n */\nasync function verifyNpmRepo(owner: string, repo: string, packageName: string): Promise<boolean> {\n const base = `https://raw.githubusercontent.com/${owner}/${repo}/HEAD`\n const shortName = packageName.replace(/^@.*\\//, '')\n const paths = [\n 'package.json',\n `packages/${shortName}/package.json`,\n `packages/${packageName.replace(/^@/, '').replace('/', '-')}/package.json`,\n ]\n for (const path of paths) {\n const text = await fetchText(`${base}/${path}`)\n if (!text)\n continue\n try {\n const pkg = JSON.parse(text) as { name?: string }\n if (pkg.name === packageName)\n return true\n }\n catch {}\n }\n return false\n}\n\nexport async function searchGitHubRepo(packageName: string): Promise<string | null> {\n // Try ungh heuristic first — check if repo name matches package name\n const shortName = packageName.replace(/^@.*\\//, '')\n for (const candidate of [packageName.replace(/^@/, '').replace('/', '/'), shortName]) {\n // Only try if it looks like owner/repo\n if (!candidate.includes('/')) {\n // Try common patterns: {name}/{name}\n const unghRes = await $fetch.raw(`https://ungh.cc/repos/${shortName}/${shortName}`).catch(() => null)\n if (unghRes?.ok)\n return `https://github.com/${shortName}/${shortName}`\n continue\n }\n const unghRes = await $fetch.raw(`https://ungh.cc/repos/${candidate}`).catch(() => null)\n if (unghRes?.ok)\n return `https://github.com/${candidate}`\n }\n\n // Try gh CLI — strip @ to avoid GitHub search syntax issues\n const searchTerm = packageName.replace(/^@/, '')\n if (isGhAvailable()) {\n try {\n const { stdout: json } = spawnSync('gh', ['search', 'repos', searchTerm, '--json', 'fullName', '--limit', '5'], {\n encoding: 'utf-8',\n timeout: 15_000,\n })\n if (!json)\n throw new Error('no output')\n const repos = JSON.parse(json) as Array<{ fullName: string }>\n // Prefer exact suffix match\n const match = repos.find(r =>\n r.fullName.toLowerCase().endsWith(`/${packageName.toLowerCase()}`)\n || r.fullName.toLowerCase().endsWith(`/${shortName.toLowerCase()}`),\n )\n if (match)\n return `https://github.com/${match.fullName}`\n // Validate remaining results via package.json\n for (const candidate of repos) {\n const gh = parseGitHubUrl(`https://github.com/${candidate.fullName}`)\n if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName))\n return `https://github.com/${candidate.fullName}`\n }\n }\n catch {\n // fall through to REST API\n }\n }\n\n // Fallback: GitHub REST search API (no auth needed, but rate-limited)\n const query = encodeURIComponent(`${searchTerm} in:name`)\n const data = await $fetch<{ items?: Array<{ full_name: string }> }>(\n `https://api.github.com/search/repositories?q=${query}&per_page=5`,\n ).catch(() => null)\n if (!data?.items?.length)\n return null\n\n // Prefer exact suffix match\n const match = data.items.find(r =>\n r.full_name.toLowerCase().endsWith(`/${packageName.toLowerCase()}`)\n || r.full_name.toLowerCase().endsWith(`/${shortName.toLowerCase()}`),\n )\n if (match)\n return `https://github.com/${match.full_name}`\n\n // Validate remaining results via package.json\n for (const candidate of data.items) {\n const gh = parseGitHubUrl(`https://github.com/${candidate.full_name}`)\n if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName))\n return `https://github.com/${candidate.full_name}`\n }\n\n return null\n}\n\n/**\n * Fetch GitHub repo metadata to get website URL.\n * Pass packageName to check doc overrides first (avoids API call).\n */\nexport async function fetchGitHubRepoMeta(owner: string, repo: string, packageName?: string): Promise<{ homepage?: string } | null> {\n const override = packageName ? getDocOverride(packageName) : undefined\n if (override?.homepage)\n return { homepage: override.homepage }\n\n // Prefer gh CLI to avoid rate limits\n if (isGhAvailable()) {\n try {\n const { stdout: json } = spawnSync('gh', ['api', `repos/${owner}/${repo}`, '-q', '{homepage}'], {\n encoding: 'utf-8',\n timeout: 10_000,\n })\n if (!json)\n throw new Error('no output')\n const data = JSON.parse(json) as { homepage?: string }\n return data?.homepage ? { homepage: data.homepage } : null\n }\n catch {\n // fall through to fetch\n }\n }\n\n const data = await $fetch<{ homepage?: string }>(\n `https://api.github.com/repos/${owner}/${repo}`,\n ).catch(() => null)\n return data?.homepage ? { homepage: data.homepage } : null\n}\n\n/**\n * Resolve README URL for a GitHub repo, returns ungh:// pseudo-URL or raw URL\n */\nexport async function fetchReadme(owner: string, repo: string, subdir?: string, ref?: string): Promise<string | null> {\n const branch = ref || 'main'\n\n // Try ungh first\n const unghUrl = subdir\n ? `https://ungh.cc/repos/${owner}/${repo}/files/${branch}/${subdir}/README.md`\n : `https://ungh.cc/repos/${owner}/${repo}/readme${ref ? `?ref=${ref}` : ''}`\n\n const unghRes = await $fetch.raw(unghUrl).catch(() => null)\n\n if (unghRes?.ok) {\n return `ungh://${owner}/${repo}${subdir ? `/${subdir}` : ''}${ref ? `@${ref}` : ''}`\n }\n\n // Fallback to raw.githubusercontent.com — use GET instead of HEAD\n // because raw.githubusercontent.com sometimes returns HTML on HEAD for valid URLs\n const basePath = subdir ? `${subdir}/` : ''\n const branches = ref ? [ref] : ['main', 'master']\n for (const b of branches) {\n for (const filename of ['README.md', 'Readme.md', 'readme.md']) {\n const readmeUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${b}/${basePath}${filename}`\n const res = await $fetch.raw(readmeUrl).catch(() => null)\n if (res?.ok)\n return readmeUrl\n }\n }\n\n return null\n}\n\n/**\n * Fetch README content from ungh:// pseudo-URL, file:// URL, or regular URL\n */\nexport interface GitSourceResult {\n /** URL pattern for fetching source */\n baseUrl: string\n /** Git ref (tag) used */\n ref: string\n /** List of source file paths relative to repo root */\n files: string[]\n}\n\n/** Source file extensions to include */\nconst SOURCE_EXTENSIONS = new Set([\n '.ts',\n '.tsx',\n '.mts',\n '.cts',\n '.js',\n '.jsx',\n '.mjs',\n '.cjs',\n '.vue',\n '.svelte',\n '.astro',\n])\n\n/** Paths/patterns to exclude */\nconst EXCLUDE_PATTERNS = [\n /\\.test\\./,\n /\\.spec\\./,\n /\\.d\\.ts$/,\n /__tests__/,\n /__mocks__/,\n /\\.config\\./,\n /fixtures?\\//,\n]\n\n/**\n * Filter source files from a file list\n */\nfunction filterSourceFiles(files: string[]): string[] {\n return files.filter((path) => {\n if (!path.startsWith('src/'))\n return false\n\n const ext = path.slice(path.lastIndexOf('.'))\n if (!SOURCE_EXTENSIONS.has(ext))\n return false\n if (EXCLUDE_PATTERNS.some(p => p.test(path)))\n return false\n\n return true\n })\n}\n\n/**\n * Fetch source files from GitHub repo's src/ folder\n */\nexport async function fetchGitSource(owner: string, repo: string, version: string, packageName?: string, repoUrl?: string): Promise<GitSourceResult | null> {\n const branchHint = repoUrl ? extractBranchHint(repoUrl) : undefined\n const tag = await findGitTag(owner, repo, version, packageName, branchHint)\n if (!tag)\n return null\n\n const files = filterSourceFiles(tag.files)\n if (files.length === 0)\n return null\n\n return {\n baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,\n ref: tag.ref,\n files,\n }\n}\n\n/**\n * Fetch README content from ungh:// pseudo-URL, file:// URL, or regular URL\n */\nexport async function fetchReadmeContent(url: string): Promise<string | null> {\n // Local file\n if (url.startsWith('file://')) {\n const filePath = fileURLToPath(url)\n if (!fsExistsSync(filePath))\n return null\n return fsReadFileSync(filePath, 'utf-8')\n }\n\n if (url.startsWith('ungh://')) {\n let path = url.replace('ungh://', '')\n let ref = 'main'\n\n // Parse ref from owner/repo/subdir@ref\n const atIdx = path.lastIndexOf('@')\n if (atIdx !== -1) {\n ref = path.slice(atIdx + 1)\n path = path.slice(0, atIdx)\n }\n\n const parts = path.split('/')\n const owner = parts[0]\n const repo = parts[1]\n const subdir = parts.slice(2).join('/')\n\n const unghUrl = subdir\n ? `https://ungh.cc/repos/${owner}/${repo}/files/${ref}/${subdir}/README.md`\n : `https://ungh.cc/repos/${owner}/${repo}/readme?ref=${ref}`\n\n const text = await $fetch(unghUrl, { responseType: 'text' }).catch(() => null)\n if (!text)\n return null\n\n try {\n const json = JSON.parse(text) as { markdown?: string, file?: { contents?: string } }\n return json.markdown || json.file?.contents || null\n }\n catch {\n return text\n }\n }\n\n return fetchText(url)\n}\n","/**\n * llms.txt fetching and parsing\n */\n\nimport type { FetchedDoc, LlmsContent, LlmsLink } from './types'\nimport pLimit from 'p-limit'\nimport { fetchText, verifyUrl } from './utils'\n\n/**\n * Check for llms.txt at a docs URL, returns the llms.txt URL if found\n */\nexport async function fetchLlmsUrl(docsUrl: string): Promise<string | null> {\n const origin = new URL(docsUrl).origin\n const llmsUrl = `${origin}/llms.txt`\n if (await verifyUrl(llmsUrl))\n return llmsUrl\n return null\n}\n\n/**\n * Fetch and parse llms.txt content\n */\nexport async function fetchLlmsTxt(url: string): Promise<LlmsContent | null> {\n const content = await fetchText(url)\n if (!content || content.length < 50)\n return null\n\n return {\n raw: content,\n links: parseMarkdownLinks(content),\n }\n}\n\n/**\n * Parse markdown links from llms.txt to get .md file paths\n */\nexport function parseMarkdownLinks(content: string): LlmsLink[] {\n const links: LlmsLink[] = []\n const seen = new Set<string>()\n const linkRegex = /\\[([^\\]]+)\\]\\(([^)]+\\.md)\\)/g\n for (let match = linkRegex.exec(content); match !== null; match = linkRegex.exec(content)) {\n const url = match[2]!\n if (!seen.has(url)) {\n seen.add(url)\n links.push({ title: match[1]!, url })\n }\n }\n\n return links\n}\n\n/**\n * Download all .md files referenced in llms.txt\n */\n/** Reject non-https URLs and private/link-local IPs */\nfunction isSafeUrl(url: string): boolean {\n try {\n const parsed = new URL(url)\n if (parsed.protocol !== 'https:')\n return false\n const host = parsed.hostname\n // Reject private/link-local/loopback\n if (host === 'localhost' || host === '127.0.0.1' || host === '::1')\n return false\n if (host === '169.254.169.254') // cloud metadata\n return false\n if (/^(?:10\\.|172\\.(?:1[6-9]|2\\d|3[01])\\.|192\\.168\\.)/.test(host))\n return false\n if (host.startsWith('[')) // IPv6 link-local\n return false\n return true\n }\n catch { return false }\n}\n\nexport async function downloadLlmsDocs(\n llmsContent: LlmsContent,\n baseUrl: string,\n onProgress?: (url: string, index: number, total: number) => void,\n): Promise<FetchedDoc[]> {\n const limit = pLimit(5)\n let completed = 0\n\n const results = await Promise.all(\n llmsContent.links.map(link => limit(async () => {\n const url = link.url.startsWith('http')\n ? link.url\n : `${baseUrl.replace(/\\/$/, '')}${link.url.startsWith('/') ? '' : '/'}${link.url}`\n\n if (!isSafeUrl(url))\n return null\n\n onProgress?.(link.url, completed++, llmsContent.links.length)\n\n const content = await fetchText(url)\n if (content && content.length > 100)\n return { url: link.url, title: link.title, content } as FetchedDoc\n return null\n })),\n )\n\n return results.filter((d): d is FetchedDoc => d !== null)\n}\n\n/**\n * Normalize llms.txt links to relative paths for local access\n * Handles: absolute URLs, root-relative paths, and relative paths\n */\nexport function normalizeLlmsLinks(content: string, baseUrl?: string): string {\n let normalized = content\n\n // Handle absolute URLs: https://example.com/docs/foo.md → ./docs/foo.md\n if (baseUrl) {\n const base = baseUrl.replace(/\\/$/, '')\n const escaped = base.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n normalized = normalized.replace(\n new RegExp(`\\\\]\\\\(${escaped}(/[^)]+\\\\.md)\\\\)`, 'g'),\n '](./docs$1)',\n )\n }\n\n // Handle root-relative paths: /foo.md → ./docs/foo.md\n normalized = normalized.replace(/\\]\\(\\/([^)]+\\.md)\\)/g, '](./docs/$1)')\n\n return normalized\n}\n\n/**\n * Extract sections from llms-full.txt by URL patterns\n * Format: ---\\nurl: /path.md\\n---\\n<content>\\n\\n---\\nurl: ...\n */\nexport function extractSections(content: string, patterns: string[]): string | null {\n const sections: string[] = []\n const parts = content.split(/\\n---\\n/)\n\n for (const part of parts) {\n const urlMatch = part.match(/^url: *(\\S.*)$/m)\n if (!urlMatch)\n continue\n\n const url = urlMatch[1]!\n if (patterns.some(p => url.includes(p))) {\n const contentStart = part.indexOf('\\n', part.indexOf('url:'))\n if (contentStart > -1) {\n sections.push(part.slice(contentStart + 1))\n }\n }\n }\n\n if (sections.length === 0)\n return null\n return sections.join('\\n\\n---\\n\\n')\n}\n","/**\n * NPM registry lookup\n */\n\nimport type { LocalDependency, NpmPackageInfo, ResolveAttempt, ResolvedPackage, ResolveResult } from './types'\nimport { spawnSync } from 'node:child_process'\nimport { createWriteStream, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, unlinkSync } from 'node:fs'\nimport { Writable } from 'node:stream'\nimport { pathToFileURL } from 'node:url'\nimport { resolvePathSync } from 'mlly'\nimport { basename, dirname, join, resolve } from 'pathe'\nimport { getCacheDir } from '../cache/version'\nimport { fetchGitDocs, fetchGitHubRepoMeta, fetchReadme, searchGitHubRepo, validateGitDocsWithLlms } from './github'\nimport { fetchLlmsTxt, fetchLlmsUrl } from './llms'\nimport { $fetch, isGitHubRepoUrl, isUselessDocsUrl, normalizeRepoUrl, parseGitHubUrl } from './utils'\n\n/**\n * Search npm registry for packages matching a query.\n * Used as a fallback when direct package lookup fails.\n */\nexport async function searchNpmPackages(query: string, size = 5): Promise<Array<{ name: string, description?: string, version: string }>> {\n const data = await $fetch<{\n objects: Array<{ package: { name: string, description?: string, version: string } }>\n }>(`https://registry.npmjs.org/-/v1/search?text=${encodeURIComponent(query)}&size=${size}`).catch(() => null)\n\n if (!data?.objects?.length)\n return []\n\n return data.objects.map(o => ({\n name: o.package.name,\n description: o.package.description,\n version: o.package.version,\n }))\n}\n\n/**\n * Fetch package info from npm registry\n */\nexport async function fetchNpmPackage(packageName: string): Promise<NpmPackageInfo | null> {\n // Try unpkg first (faster, CDN)\n const data = await $fetch<NpmPackageInfo>(`https://unpkg.com/${packageName}/package.json`).catch(() => null)\n if (data)\n return data\n\n // Fallback to npm registry\n return $fetch<NpmPackageInfo>(`https://registry.npmjs.org/${packageName}/latest`).catch(() => null)\n}\n\nexport interface DistTagInfo {\n version: string\n releasedAt?: string\n}\n\nexport interface NpmRegistryMeta {\n releasedAt?: string\n distTags?: Record<string, DistTagInfo>\n}\n\n/**\n * Fetch release date and dist-tags from npm registry\n */\nexport async function fetchNpmRegistryMeta(packageName: string, version: string): Promise<NpmRegistryMeta> {\n const data = await $fetch<{\n 'time'?: Record<string, string>\n 'dist-tags'?: Record<string, string>\n }>(`https://registry.npmjs.org/${packageName}`).catch(() => null)\n\n if (!data)\n return {}\n\n // Enrich dist-tags with release dates\n const distTags: Record<string, DistTagInfo> | undefined = data['dist-tags']\n ? Object.fromEntries(\n Object.entries(data['dist-tags']).map(([tag, ver]) => [\n tag,\n { version: ver, releasedAt: data.time?.[ver] },\n ]),\n )\n : undefined\n\n return {\n releasedAt: data.time?.[version] || undefined,\n distTags,\n }\n}\n\nexport type ResolveStep = 'npm' | 'github-docs' | 'github-meta' | 'github-search' | 'readme' | 'llms.txt' | 'local'\n\nexport interface ResolveOptions {\n /** User's installed version - used to fetch versioned git docs */\n version?: string\n /** Current working directory - for local readme fallback */\n cwd?: string\n /** Progress callback - called before each resolution step */\n onProgress?: (step: ResolveStep) => void\n}\n\n/**\n * Shared GitHub resolution cascade: git docs → repo meta (homepage) → README.\n * Used for both \"repo URL found in package.json\" and \"repo URL found via search\" paths.\n */\nasync function resolveGitHub(\n gh: { owner: string, repo: string },\n targetVersion: string | undefined,\n pkg: { name: string },\n result: ResolvedPackage,\n attempts: ResolveAttempt[],\n onProgress?: (step: ResolveStep) => void,\n opts?: { rawRepoUrl?: string, subdir?: string },\n): Promise<string[] | undefined> {\n let allFiles: string[] | undefined\n\n // Try versioned git docs first (docs/**/*.md at git tag)\n if (targetVersion) {\n onProgress?.('github-docs')\n const gitDocs = await fetchGitDocs(gh.owner, gh.repo, targetVersion, pkg.name, opts?.rawRepoUrl)\n if (gitDocs) {\n result.gitDocsUrl = gitDocs.baseUrl\n result.gitRef = gitDocs.ref\n result.gitDocsFallback = gitDocs.fallback\n allFiles = gitDocs.allFiles\n attempts.push({\n source: 'github-docs',\n url: gitDocs.baseUrl,\n status: 'success',\n message: gitDocs.fallback\n ? `Found ${gitDocs.files.length} docs at ${gitDocs.ref} (no tag for v${targetVersion})`\n : `Found ${gitDocs.files.length} docs at ${gitDocs.ref}`,\n })\n }\n else {\n attempts.push({\n source: 'github-docs',\n url: `${result.repoUrl}/tree/v${targetVersion}/docs`,\n status: 'not-found',\n message: 'No docs/ folder found at version tag',\n })\n }\n }\n\n // If no docsUrl yet (npm had no homepage), try GitHub repo metadata\n if (!result.docsUrl) {\n onProgress?.('github-meta')\n const repoMeta = await fetchGitHubRepoMeta(gh.owner, gh.repo, pkg.name)\n if (repoMeta?.homepage && !isUselessDocsUrl(repoMeta.homepage)) {\n result.docsUrl = repoMeta.homepage\n attempts.push({\n source: 'github-meta',\n url: result.repoUrl!,\n status: 'success',\n message: `Found homepage: ${repoMeta.homepage}`,\n })\n }\n else {\n attempts.push({\n source: 'github-meta',\n url: result.repoUrl!,\n status: 'not-found',\n message: 'No homepage in repo metadata',\n })\n }\n }\n\n // README fallback via ungh\n onProgress?.('readme')\n const readmeUrl = await fetchReadme(gh.owner, gh.repo, opts?.subdir, result.gitRef)\n if (readmeUrl) {\n result.readmeUrl = readmeUrl\n attempts.push({\n source: 'readme',\n url: readmeUrl,\n status: 'success',\n })\n }\n else {\n attempts.push({\n source: 'readme',\n url: `${result.repoUrl}/README.md`,\n status: 'not-found',\n message: 'No README found',\n })\n }\n\n return allFiles\n}\n\n/**\n * Resolve documentation URL for a package (legacy - returns null on failure)\n */\nexport async function resolvePackageDocs(packageName: string, options: ResolveOptions = {}): Promise<ResolvedPackage | null> {\n const result = await resolvePackageDocsWithAttempts(packageName, options)\n return result.package\n}\n\n/**\n * Resolve documentation URL for a package with attempt tracking\n */\nexport async function resolvePackageDocsWithAttempts(packageName: string, options: ResolveOptions = {}): Promise<ResolveResult> {\n const attempts: ResolveAttempt[] = []\n const { onProgress } = options\n\n onProgress?.('npm')\n const pkg = await fetchNpmPackage(packageName)\n if (!pkg) {\n attempts.push({\n source: 'npm',\n url: `https://registry.npmjs.org/${packageName}/latest`,\n status: 'not-found',\n message: 'Package not found on npm registry',\n })\n return { package: null, attempts }\n }\n\n attempts.push({\n source: 'npm',\n url: `https://registry.npmjs.org/${packageName}/latest`,\n status: 'success',\n message: `Found ${pkg.name}@${pkg.version}`,\n })\n\n // Fetch release date and dist-tags for this version\n const registryMeta = pkg.version\n ? await fetchNpmRegistryMeta(packageName, pkg.version)\n : {}\n\n const result: ResolvedPackage = {\n name: pkg.name,\n version: pkg.version,\n releasedAt: registryMeta.releasedAt,\n description: pkg.description,\n dependencies: pkg.dependencies,\n distTags: registryMeta.distTags,\n }\n\n // Track allFiles from heuristic git doc discovery for llms.txt validation\n let gitDocsAllFiles: string[] | undefined\n\n // Extract repo URL (handle both object and shorthand string formats)\n let subdir: string | undefined\n let rawRepoUrl: string | undefined\n if (typeof pkg.repository === 'object' && pkg.repository?.url) {\n rawRepoUrl = pkg.repository.url\n const normalized = normalizeRepoUrl(rawRepoUrl)\n // Handle shorthand \"owner/repo\" in repository.url field (e.g. cac)\n if (!normalized.includes('://') && normalized.includes('/') && !normalized.includes(':'))\n result.repoUrl = `https://github.com/${normalized}`\n else\n result.repoUrl = normalized\n subdir = pkg.repository.directory\n }\n else if (typeof pkg.repository === 'string') {\n if (pkg.repository.includes('://')) {\n // Full URL string (e.g. \"https://github.com/org/repo/tree/main/packages/sub\")\n const gh = parseGitHubUrl(pkg.repository)\n if (gh)\n result.repoUrl = `https://github.com/${gh.owner}/${gh.repo}`\n }\n else {\n // Shorthand: \"owner/repo\" or \"github:owner/repo\"\n const repo = pkg.repository.replace(/^github:/, '')\n if (repo.includes('/') && !repo.includes(':'))\n result.repoUrl = `https://github.com/${repo}`\n }\n }\n\n // Use npm homepage early (skip GitHub repo URLs)\n if (pkg.homepage && !isGitHubRepoUrl(pkg.homepage) && !isUselessDocsUrl(pkg.homepage)) {\n result.docsUrl = pkg.homepage\n }\n\n // GitHub repo handling - try versioned git docs first\n if (result.repoUrl?.includes('github.com')) {\n const gh = parseGitHubUrl(result.repoUrl)\n if (gh) {\n const targetVersion = options.version || pkg.version\n gitDocsAllFiles = await resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgress, { rawRepoUrl, subdir })\n }\n }\n else if (!result.repoUrl) {\n // No repo URL in package.json — try to find it via GitHub search\n onProgress?.('github-search')\n const searchedUrl = await searchGitHubRepo(pkg.name)\n if (searchedUrl) {\n result.repoUrl = searchedUrl\n attempts.push({\n source: 'github-search',\n url: searchedUrl,\n status: 'success',\n message: `Found via GitHub search: ${searchedUrl}`,\n })\n\n const gh = parseGitHubUrl(searchedUrl)\n if (gh) {\n const targetVersion = options.version || pkg.version\n gitDocsAllFiles = await resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgress)\n }\n }\n else {\n attempts.push({\n source: 'github-search',\n status: 'not-found',\n message: 'No repository URL in package.json and GitHub search found no match',\n })\n }\n }\n\n // Check for llms.txt on docsUrl\n if (result.docsUrl) {\n onProgress?.('llms.txt')\n const llmsUrl = await fetchLlmsUrl(result.docsUrl)\n if (llmsUrl) {\n result.llmsUrl = llmsUrl\n attempts.push({\n source: 'llms.txt',\n url: llmsUrl,\n status: 'success',\n })\n }\n else {\n attempts.push({\n source: 'llms.txt',\n url: `${new URL(result.docsUrl).origin}/llms.txt`,\n status: 'not-found',\n message: 'No llms.txt at docs URL',\n })\n }\n }\n\n // Validate heuristic git docs against llms.txt links\n if (result.gitDocsUrl && result.llmsUrl && gitDocsAllFiles) {\n const llmsContent = await fetchLlmsTxt(result.llmsUrl)\n if (llmsContent && llmsContent.links.length > 0) {\n const validation = validateGitDocsWithLlms(llmsContent.links, gitDocsAllFiles)\n if (!validation.isValid) {\n attempts.push({\n source: 'github-docs',\n url: result.gitDocsUrl,\n status: 'not-found',\n message: `Heuristic git docs don't match llms.txt links (${Math.round(validation.matchRatio * 100)}% match), preferring llms.txt`,\n })\n result.gitDocsUrl = undefined\n result.gitRef = undefined\n }\n }\n }\n\n // Fallback: check local node_modules readme when all else fails\n if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl && options.cwd) {\n onProgress?.('local')\n const pkgDir = join(options.cwd, 'node_modules', packageName)\n // Check common readme variations (case-insensitive)\n const readmeFile = existsSync(pkgDir) && readdirSync(pkgDir).find(f => /^readme\\.md$/i.test(f))\n if (readmeFile) {\n const readmePath = join(pkgDir, readmeFile)\n result.readmeUrl = pathToFileURL(readmePath).href\n attempts.push({\n source: 'readme',\n url: readmePath,\n status: 'success',\n message: 'Found local readme in node_modules',\n })\n }\n }\n\n // Must have at least one source\n if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl) {\n return { package: null, attempts }\n }\n\n return { package: result, attempts }\n}\n\n/**\n * Parse version specifier, handling protocols like link:, workspace:, npm:, file:\n */\nexport function parseVersionSpecifier(\n name: string,\n version: string,\n cwd: string,\n): LocalDependency | null {\n // link: - resolve local package.json\n if (version.startsWith('link:')) {\n const linkPath = resolve(cwd, version.slice(5))\n const linkedPkgPath = join(linkPath, 'package.json')\n if (existsSync(linkedPkgPath)) {\n const linkedPkg = JSON.parse(readFileSync(linkedPkgPath, 'utf-8'))\n return {\n name: linkedPkg.name || name,\n version: linkedPkg.version || '0.0.0',\n }\n }\n return null // linked package doesn't exist\n }\n\n // npm: - extract aliased package name\n if (version.startsWith('npm:')) {\n const specifier = version.slice(4)\n const atIndex = specifier.startsWith('@')\n ? specifier.indexOf('@', 1)\n : specifier.indexOf('@')\n const realName = atIndex > 0 ? specifier.slice(0, atIndex) : specifier\n return { name: realName, version: resolveInstalledVersion(realName, cwd) || '*' }\n }\n\n // file: and git: - skip (local/custom sources)\n if (version.startsWith('file:') || version.startsWith('git:') || version.startsWith('git+')) {\n return null\n }\n\n // For everything else (semver, catalog:, workspace:, etc.)\n // resolve the actual installed version from node_modules\n const installed = resolveInstalledVersion(name, cwd)\n if (installed)\n return { name, version: installed }\n\n // Fallback: strip semver prefix if it looks like one\n if (/^[\\^~>=<\\d]/.test(version))\n return { name, version: version.replace(/^[\\^~>=<]/, '') }\n\n // catalog: and workspace: specifiers - include with wildcard version\n // so the dep isn't silently dropped from state.deps\n if (version.startsWith('catalog:') || version.startsWith('workspace:'))\n return { name, version: '*' }\n\n return null\n}\n\n/**\n * Resolve the actual installed version of a package by finding its package.json\n * via mlly's resolvePathSync. Works regardless of package manager or version protocol.\n */\nexport function resolveInstalledVersion(name: string, cwd: string): string | null {\n try {\n const resolved = resolvePathSync(`${name}/package.json`, { url: cwd })\n const pkg = JSON.parse(readFileSync(resolved, 'utf-8'))\n return pkg.version || null\n }\n catch {\n // Packages with `exports` that don't expose ./package.json\n // Resolve the entry point, then walk up to find package.json\n try {\n const entry = resolvePathSync(name, { url: cwd })\n let dir = dirname(entry)\n while (dir && basename(dir) !== 'node_modules') {\n const pkgPath = join(dir, 'package.json')\n if (existsSync(pkgPath)) {\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n return pkg.version || null\n }\n dir = dirname(dir)\n }\n }\n catch {}\n return null\n }\n}\n\n/**\n * Read package.json dependencies with versions\n */\nexport async function readLocalDependencies(cwd: string): Promise<LocalDependency[]> {\n const pkgPath = join(cwd, 'package.json')\n if (!existsSync(pkgPath)) {\n throw new Error('No package.json found in current directory')\n }\n\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n const deps: Record<string, string> = {\n ...pkg.dependencies,\n ...pkg.devDependencies,\n }\n\n const results: LocalDependency[] = []\n\n for (const [name, version] of Object.entries(deps)) {\n // Skip types and dev tools\n if (name.startsWith('@types/') || ['typescript', 'eslint', 'prettier', 'vitest', 'jest'].includes(name)) {\n continue\n }\n\n const parsed = parseVersionSpecifier(name, version, cwd)\n if (parsed) {\n results.push(parsed)\n }\n }\n\n return results\n}\n\nexport interface LocalPackageInfo {\n name: string\n version: string\n description?: string\n repoUrl?: string\n localPath: string\n}\n\n/**\n * Read package info from a local path (for link: deps)\n */\nexport function readLocalPackageInfo(localPath: string): LocalPackageInfo | null {\n const pkgPath = join(localPath, 'package.json')\n if (!existsSync(pkgPath))\n return null\n\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n\n let repoUrl: string | undefined\n if (pkg.repository?.url) {\n repoUrl = normalizeRepoUrl(pkg.repository.url)\n }\n else if (typeof pkg.repository === 'string') {\n repoUrl = normalizeRepoUrl(pkg.repository)\n }\n\n return {\n name: pkg.name,\n version: pkg.version || '0.0.0',\n description: pkg.description,\n repoUrl,\n localPath,\n }\n}\n\n/**\n * Resolve docs for a local package (link: dependency)\n */\nexport async function resolveLocalPackageDocs(localPath: string): Promise<ResolvedPackage | null> {\n const info = readLocalPackageInfo(localPath)\n if (!info)\n return null\n\n const result: ResolvedPackage = {\n name: info.name,\n version: info.version,\n description: info.description,\n repoUrl: info.repoUrl,\n }\n\n // Try GitHub if repo URL available\n if (info.repoUrl?.includes('github.com')) {\n const gh = parseGitHubUrl(info.repoUrl)\n if (gh) {\n // Try versioned git docs\n const gitDocs = await fetchGitDocs(gh.owner, gh.repo, info.version, info.name)\n if (gitDocs) {\n result.gitDocsUrl = gitDocs.baseUrl\n result.gitRef = gitDocs.ref\n result.gitDocsFallback = gitDocs.fallback\n }\n\n // README fallback via ungh\n const readmeUrl = await fetchReadme(gh.owner, gh.repo, undefined, result.gitRef)\n if (readmeUrl) {\n result.readmeUrl = readmeUrl\n }\n }\n }\n\n // Fallback: read local readme (case-insensitive)\n if (!result.readmeUrl && !result.gitDocsUrl) {\n const readmeFile = readdirSync(localPath).find(f => /^readme\\.md$/i.test(f))\n if (readmeFile) {\n result.readmeUrl = pathToFileURL(join(localPath, readmeFile)).href\n }\n }\n\n if (!result.readmeUrl && !result.gitDocsUrl) {\n return null\n }\n\n return result\n}\n\n/**\n * Download and extract npm package tarball to cache directory.\n * Used when the package isn't available in node_modules.\n *\n * Extracts to: ~/.skilld/references/<pkg>@<version>/pkg/\n * Returns the extracted directory path, or null on failure.\n */\nexport async function fetchPkgDist(name: string, version: string): Promise<string | null> {\n const cacheDir = getCacheDir(name, version)\n const pkgDir = join(cacheDir, 'pkg')\n\n // Already extracted\n if (existsSync(join(pkgDir, 'package.json')))\n return pkgDir\n\n // Fetch version metadata to get tarball URL\n const data = await $fetch<{ dist?: { tarball?: string } }>(\n `https://registry.npmjs.org/${name}/${version}`,\n ).catch(() => null)\n if (!data)\n return null\n const tarballUrl = data.dist?.tarball\n if (!tarballUrl)\n return null\n\n // Download tarball to temp file\n const tarballRes = await fetch(tarballUrl, {\n headers: { 'User-Agent': 'skilld/1.0' },\n }).catch(() => null)\n\n if (!tarballRes?.ok || !tarballRes.body)\n return null\n\n mkdirSync(pkgDir, { recursive: true })\n\n const tmpTarball = join(cacheDir, '_pkg.tgz')\n const fileStream = createWriteStream(tmpTarball)\n\n // Stream response body to file\n const reader = tarballRes.body.getReader()\n await new Promise<void>((res, reject) => {\n const writable = new Writable({\n write(chunk, _encoding, callback) {\n fileStream.write(chunk, callback)\n },\n })\n writable.on('finish', () => {\n fileStream.end()\n res()\n })\n writable.on('error', reject)\n\n function pump() {\n reader.read().then(({ done, value }) => {\n if (done) {\n writable.end()\n return\n }\n writable.write(value, () => pump())\n }).catch(reject)\n }\n pump()\n })\n\n // Extract tarball — npm tarballs have a \"package/\" prefix\n const { status } = spawnSync('tar', ['xzf', tmpTarball, '--strip-components=1', '-C', pkgDir], { stdio: 'ignore' })\n if (status !== 0) {\n rmSync(pkgDir, { recursive: true, force: true })\n rmSync(tmpTarball, { force: true })\n return null\n }\n\n unlinkSync(tmpTarball)\n return pkgDir\n}\n\n/**\n * Fetch just the latest version string from npm (lightweight)\n */\nexport async function fetchLatestVersion(packageName: string): Promise<string | null> {\n const data = await $fetch<{ version?: string }>(\n `https://unpkg.com/${packageName}/package.json`,\n ).catch(() => null)\n return data?.version || null\n}\n\n/**\n * Get installed skill version from SKILL.md\n */\nexport function getInstalledSkillVersion(skillDir: string): string | null {\n const skillPath = join(skillDir, 'SKILL.md')\n if (!existsSync(skillPath))\n return null\n\n const content = readFileSync(skillPath, 'utf-8')\n const match = content.match(/^version:\\s*\"?([^\"\\n]+)\"?/m)\n return match?.[1] || null\n}\n"],"mappings":";;;;;;;;;;;;AAIA,MAAa,YAAY,IAAI,IAAI;CAC/B;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAa,WAAW,QAAgB,IAAI,MAAM,IAAI,CAAC;AAGvD,SAAgB,iBAAiB,QAAuE;CACtG,MAAM,QAAQ,CAAC,MAAM;AACrB,MAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,OAAO,CACzC,KAAI,MAAM,KAAA,EACR,OAAM,KAAK,GAAG,EAAE,IAAI,OAAO,MAAM,YAAY,UAAU,KAAK,EAAE,GAAG,IAAI,EAAE,QAAQ,MAAM,OAAM,CAAC,KAAK,IAAI;AAEzG,OAAM,KAAK,MAAM;AACjB,QAAO,MAAM,KAAK,KAAK;;ACSzB,IAAI;AAKJ,SAAgB,gBAAyB;AACvC,KAAI,iBAAiB,KAAA,EACnB,QAAO;CACT,MAAM,EAAE,WAAW,UAAU,MAAM,CAAC,QAAQ,SAAS,EAAE,EAAE,OAAO,UAAU,CAAC;AAC3E,QAAQ,eAAe,WAAW;;AAIpC,MAAM,eAAe,IAAI,IAAI;CAC3B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAM,iBAAiB,IAAI,IAAI;CAC7B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,aAAa,IAAI,IAAI;CACzB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,kBAAkB,IAAI,IAAI;CAC9B;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,cAAc,IAAI,IAAI;CAC1B;CACA;CACA;CACA;CACD,CAAC;AAKF,SAAgB,cAAc,QAA6B;CACzD,MAAM,QAAQ,OAAO,KAAI,MAAK,EAAE,aAAa,CAAC;AAC9C,KAAI,MAAM,MAAK,MAAK,WAAW,IAAI,EAAE,CAAC,CACpC,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,gBAAgB,IAAI,EAAE,CAAC,CACzC,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,YAAY,IAAI,EAAE,CAAC,CACrC,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,eAAe,IAAI,EAAE,CAAC,CACxC,QAAO;AACT,QAAO;;AAMT,SAAS,aAAa,OAAmE;AAEvF,KADc,MAAM,OAAO,KAAI,MAAK,EAAE,aAAa,CAAC,CAC1C,MAAK,MAAK,aAAa,IAAI,EAAE,CAAC,CACtC,QAAO;AAET,KAAI,MAAM,MAAM,WAAW,KAAK,IAAI,MAAM,MAAM,WAAW,SAAS,IAAI,MAAM,MAAM,WAAW,aAAa,CAC1G,QAAO;AACT,QAAO;;AAMT,SAAS,UAAU,WAA2B;AAC5C,KAAI,aAAa,GACf,QAAO;AACT,KAAI,aAAa,EACf,QAAO;AACT,QAAO;;AAMT,SAAS,mBACP,OACA,MACA,OACA,OACA,YACe;CACf,MAAM,aAAa,KAAK,IAAI,QAAQ,GAAG,IAAI;CAC3C,IAAI,WAAW;AACf,KAAI,UAAU,SACZ,KAAI,YAAY;EAEd,MAAM,OAAO,IAAI,KAAK,WAAW;AACjC,OAAK,SAAS,KAAK,UAAU,GAAG,EAAE;AAClC,aAAW,aAAa,QAAQ,KAAK,aAAa,CAAC;OAGnD,YAAW,YAAY,YAAY;UAG9B,YAAY;EAEnB,MAAM,OAAO,IAAI,KAAK,WAAW;AACjC,OAAK,SAAS,KAAK,UAAU,GAAG,EAAE;AAClC,aAAW,cAAc,QAAQ,KAAK,aAAa,CAAC;;CAKtD,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;EACzC;EACA,mBAJQ,QAAQ,MAAM,GAAG,KAAK,eAAe,QAAQ,WAIhC,sCAAsC;EAC3D;EACA;EACD,EAAE;EAAE,UAAU;EAAS,WAAW,KAAK,OAAO;EAAM,CAAC;AAEtD,KAAI,CAAC,OACH,QAAO,EAAE;AAEX,QAAO,OACJ,MAAM,CACN,MAAM,KAAK,CACX,OAAO,QAAQ,CACf,KAAI,SAAQ,KAAK,MAAM,KAAK,CAAqD,CACjF,QAAO,UAAS,CAAC,UAAU,IAAI,MAAM,KAAK,IAAI,MAAM,aAAa,MAAM,CACvE,QAAO,UAAS,CAAC,aAAa,MAAM,CAAC,CACrC,KAAK,EAAE,MAAM,GAAG,UAAU,IAAI,GAAG,aAAa;EAC7C,GAAG;EACH,MAAM,cAAc,MAAM,OAAO;EACjC,aAAa,EAAA;EACd,EAAE,CAEF,MAAM,GAAG,OAAO,EAAE,SAAS,YAAY,IAAI,MAAM,EAAE,SAAS,YAAY,IAAI,GAAG,CAC/E,MAAM,GAAG,MAAM;;AAGpB,SAAS,aAAqB;CAC5B,MAAM,oBAAI,IAAI,MAAM;AACpB,GAAE,YAAY,EAAE,aAAa,GAAG,EAAE;AAClC,QAAO,QAAQ,EAAE,aAAa,CAAC;;AAOjC,SAAS,mBAAmB,OAAe,MAAc,QAAuB,OAAO,IAAU;CAE/F,MAAM,QAAQ,OACX,QAAO,MAAK,EAAE,WAAW,MAAM,EAAE,SAAS,SAAS,EAAE,SAAS,cAAc,EAAE,aAAa,GAAG,CAC9F,MAAM,GAAG,MAAM,EAAE,YAAY,EAAE,UAAU,CACzC,MAAM,GAAG,KAAK;AAEjB,KAAI,MAAM,WAAW,EACnB;CAOF,MAAM,QAAQ,qFAJI,MAAM,KAAK,OAAO,MAClC,IAAI,EAAE,kBAAkB,MAAM,OAAO,uFACtC,CAAC,KAAK,IAAI,CAEkG;AAE7G,KAAI;EACF,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;GACzC;GACA;GACA;GACA,SAAS;GACT;GACA,SAAS;GACT;GACA,QAAQ;GACT,EAAE;GAAE,UAAU;GAAS,WAAW,KAAK,OAAO;GAAM,CAAC;AAEtD,MAAI,CAAC,OACH;EAGF,MAAM,QADO,KAAK,MAAM,OAAO,EACX,MAAM;AAC1B,MAAI,CAAC,MACH;AAEF,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,QAAQ,MAAM,IAAI,MAAM,UAAU;AACxC,OAAI,CAAC,MAAM,QAAQ,MAAM,CACvB;AACF,SAAM,GAAI,cAAc,MACrB,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,KAAK,OAAY;IAChB,MAAM,EAAE,QAAQ;IAChB,QAAQ,EAAE,OAAO;IACjB,WAAW,EAAE,WAAW,cAAc;IACvC,EAAE;;SAGH;;AAWR,eAAsB,kBACpB,OACA,MACA,QAAQ,IACR,YACwB;AACxB,KAAI,CAAC,eAAe,CAClB,QAAO,EAAE;CAEX,MAAM,YAAY,KAAK,KAAK,QAAQ,IAAK;CACzC,MAAM,cAAc,QAAQ;AAE5B,KAAI;EACF,MAAM,OAAO,mBAAmB,OAAO,MAAM,QAAQ,WAAW,WAAW;EAC3E,MAAM,SAAS,mBAAmB,OAAO,MAAM,UAAU,aAAa,WAAW;EACjF,MAAM,MAAM,CAAC,GAAG,MAAM,GAAG,OAAO;AAChC,qBAAmB,OAAO,MAAM,IAAI;AACpC,SAAO;SAEH;AACJ,SAAO,EAAE;;;AAOb,SAAgB,sBAAsB,OAA4B;CAChE,MAAM,QAAQ,UAAU,MAAM,UAAU;CACxC,MAAM,WAAkE;EACtE,QAAQ,MAAM;EACd,OAAO,MAAM;EACb,MAAM,MAAM;EACZ,OAAO,MAAM;EACb,SAAS,QAAQ,MAAM,UAAU;EACjC,KAAK,MAAM;EACX,WAAW,MAAM;EACjB,UAAU,MAAM;EACjB;AACD,KAAI,MAAM,OAAO,SAAS,EACxB,UAAS,SAAS,IAAI,MAAM,OAAO,KAAK,KAAK,CAAC;CAGhD,MAAM,QAAQ;EAFH,iBAAiB,SAAS;EAElB;EAAI,KAAK,MAAM;EAAQ;AAE1C,KAAI,MAAM,MAAM;EACd,MAAM,OAAO,MAAM,KAAK,SAAS,QAC7B,GAAG,MAAM,KAAK,MAAM,GAAG,MAAM,CAAC,OAC9B,MAAM;AACV,QAAM,KAAK,IAAI,KAAK;;AAGtB,KAAI,MAAM,YAAY,SAAS,GAAG;AAChC,QAAM,KAAK,IAAI,OAAO,IAAI,kBAAkB;AAC5C,OAAK,MAAM,KAAK,MAAM,aAAa;GACjC,MAAM,YAAY,EAAE,YAAY,IAAI,MAAM,EAAE,UAAU,KAAK;GAC3D,MAAM,cAAc,EAAE,KAAK,SAAS,MAChC,GAAG,EAAE,KAAK,MAAM,GAAG,IAAI,CAAC,OACxB,EAAE;AACN,SAAM,KAAK,IAAI,MAAM,EAAE,OAAO,IAAI,UAAU,IAAI,IAAI,YAAY;;;AAIpE,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,mBAAmB,QAA+B;CAChE,MAAM,yBAAS,IAAI,KAA+B;AAClD,MAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,OAAO,OAAO,IAAI,MAAM,KAAK,IAAI,EAAE;AACzC,OAAK,KAAK,MAAM;AAChB,SAAO,IAAI,MAAM,MAAM,KAAK;;CAG9B,MAAM,aAAwC;EAC5C,KAAK;EACL,UAAU;EACV,MAAM;EACN,SAAS;EACT,OAAO;EACR;CAED,MAAM,YAAyB;EAAC;EAAO;EAAY;EAAQ;EAAS;EAAU;CAU9E,MAAM,WAAqB;EARhB;GACT;GACA,UAAU,OAAO;GACjB,SAAS,OAAO,QAAO,MAAK,EAAE,UAAU,OAAO,CAAC;GAChD,WAAW,OAAO,QAAO,MAAK,EAAE,UAAU,OAAO,CAAC;GAClD;GACD,CAE8B,KAAK,KAAK;EAAE;EAAI;EAAkB;EAAG;AAEpE,MAAK,MAAM,QAAQ,WAAW;EAC5B,MAAM,QAAQ,OAAO,IAAI,KAAK;AAC9B,MAAI,CAAC,OAAO,OACV;AACF,WAAS,KAAK,MAAM,WAAW,MAAM,IAAI,MAAM,OAAO,IAAI,GAAG;AAC7D,OAAK,MAAM,SAAS,OAAO;GACzB,MAAM,YAAY,MAAM,YAAY,IAAI,MAAM,MAAM,UAAU,KAAK;GACnE,MAAM,QAAQ,MAAM,UAAU,SAAS,KAAK;AAC5C,YAAS,KAAK,OAAO,MAAM,OAAO,YAAY,MAAM,OAAO,QAAQ,MAAM,QAAQ,YAAY,QAAQ;;AAEvG,WAAS,KAAK,GAAG;;AAGnB,QAAO,SAAS,KAAK,KAAK;;ACpX5B,MAAa,SAAS,OAAO,OAAO;CAClC,OAAO;CACP,YAAY;CACZ,SAAS;CACT,SAAS,EAAE,cAAc,cAAA;CAC1B,CAAC;AAKF,eAAsB,UAAU,KAAqC;AACnE,QAAO,OAAO,KAAK,EAAE,cAAc,QAAQ,CAAC,CAAC,YAAY,KAAK;;AAMhE,eAAsB,UAAU,KAA+B;CAC7D,MAAM,MAAM,MAAM,OAAO,IAAI,KAAK,EAAE,QAAQ,QAAQ,CAAC,CAAC,YAAY,KAAK;AACvE,KAAI,CAAC,IACH,QAAO;AAET,QAAO,EADa,IAAI,QAAQ,IAAI,eAAe,IAAI,IACnC,SAAS,YAAY;;AAM3C,MAAM,gBAAgB,IAAI,IAAI;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,SAAgB,iBAAiB,KAAsB;AACrD,KAAI;EACF,MAAM,EAAE,aAAa,IAAI,IAAI,IAAI;AACjC,SAAO,cAAc,IAAI,SAAS;SAE9B;AAAE,SAAO;;;AAMjB,SAAgB,gBAAgB,KAAsB;AACpD,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,SAAO,OAAO,aAAa,gBAAgB,OAAO,aAAa;SAE3D;AACJ,SAAO;;;AAOX,SAAgB,eAAe,KAAqD;CAClF,MAAM,QAAQ,IAAI,MAAM,qDAAqD;AAC7E,KAAI,CAAC,MACH,QAAO;AACT,QAAO;EAAE,OAAO,MAAM;EAAK,MAAM,MAAM;EAAK;;AAM9C,SAAgB,iBAAiB,KAAqB;AACpD,QAAO,IACJ,QAAQ,UAAU,GAAG,CACrB,QAAQ,QAAQ,GAAG,CACnB,QAAQ,UAAU,GAAG,CACrB,QAAQ,aAAa,WAAW,CAChC,QAAQ,4BAA4B,qBAAqB,CAEzD,QAAQ,qBAAqB,sBAAsB;;AAMxD,SAAgB,kBAAkB,KAAiC;CACjE,MAAM,OAAO,IAAI,QAAQ,IAAI;AAC7B,KAAI,SAAS,GACX,QAAO,KAAA;CACT,MAAM,WAAW,IAAI,MAAM,OAAO,EAAE;AAEpC,KAAI,CAAC,YAAY,aAAa,SAC5B,QAAO,KAAA;AACT,QAAO;;ACnET,SAAgB,YAAY,SAAgC;CAC1D,MAAM,QAAQ,QAAQ,QAAQ,MAAM,GAAG;CACvC,MAAM,QAAQ,MAAM,MAAM,iCAAiC;AAC3D,KAAI,CAAC,MACH,QAAO;AACT,QAAO;EACL,OAAO,CAAC,MAAM;EACd,OAAO,MAAM,KAAK,CAAC,MAAM,KAAK;EAC9B,OAAO,MAAM,KAAK,CAAC,MAAM,KAAK;EAC9B,KAAK;EACN;;AAUH,SAAS,eAAe,KAAa,aAAqC;AACxE,KAAI,aAAa;EAEf,MAAM,UAAU,IAAI,MAAM,IAAI,OAAO,IAAI,YAAY,YAAY,CAAC,QAAQ,CAAC;AAC3E,MAAI,QACF,QAAO,QAAQ;EACjB,MAAM,YAAY,IAAI,MAAM,IAAI,OAAO,IAAI,YAAY,YAAY,CAAC,UAAU,CAAC;AAC/E,MAAI,UACF,QAAO,UAAU;;AAGrB,QAAO,IAAI,QAAQ,MAAM,GAAG;;AAG9B,SAAS,YAAY,KAAqB;AACxC,QAAO,IAAI,QAAQ,uBAAuB,OAAO;;AAMnD,SAAS,kBAAkB,KAAa,aAA8B;AAEpE,QAAO,IAAI,WAAW,GAAG,YAAY,GAAG,IAAI,IAAI,WAAW,GAAG,YAAY,IAAI,IAAI,IAAI,WAAW,GAAG,YAAY,GAAG;;AAGrH,SAAgB,cAAc,GAAW,GAAmB;AAC1D,KAAI,EAAE,UAAU,EAAE,MAChB,QAAO,EAAE,QAAQ,EAAE;AACrB,KAAI,EAAE,UAAU,EAAE,MAChB,QAAO,EAAE,QAAQ,EAAE;AACrB,QAAO,EAAE,QAAQ,EAAE;;AAMrB,SAAS,mBAAmB,OAAe,MAA+B;AACxE,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GACvC;GACA,SAAS,MAAM,GAAG,KAAK;GACvB;GACA;GACD,EAAE;GAAE,UAAU;GAAS,SAAS;GAAQ,OAAO;IAAC;IAAU;IAAQ;;GAAW,CAAC;AAC/E,MAAI,CAAC,KACH,QAAO,EAAE;AACX,SAAO,KAAK,MAAM,KAAK;SAEnB;AACJ,SAAO,EAAE;;;AAOb,eAAe,qBAAqB,OAAe,MAAwC;AAKzF,SAJa,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,YACvC,EAAE,QAAQ,YAAY,QAAQ,KAAO,EAAE,CACxC,CAAC,YAAY,KAAK,GACN,YAAY,EAAE;;AAM7B,eAAe,iBAAiB,OAAe,MAAwC;AACrF,KAAI,eAAe,EAAE;EACnB,MAAM,WAAW,mBAAmB,OAAO,KAAK;AAChD,MAAI,SAAS,SAAS,EACpB,QAAO;;AAEX,QAAO,qBAAqB,OAAO,KAAK;;AAS1C,SAAgB,eAAe,UAA2B,aAAsB,kBAA4C;CAE1H,MAAM,kBAAkB,eAAe,SAAS,MAAK,MAAK,kBAAkB,EAAE,KAAK,YAAY,CAAC;CAChG,MAAM,cAAc,mBAAmB,YAAY,iBAAiB,GAAG;AAyBvE,QAvBiB,SAAS,QAAQ,MAAM;AACtC,MAAI,EAAE,WACJ,QAAO;EAET,MAAM,MAAM,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;AAC5E,MAAI,CAAC,IACH,QAAO;EAET,MAAM,KAAK,YAAY,IAAI;AAC3B,MAAI,CAAC,GACH,QAAO;AAGT,MAAI,mBAAmB,eAAe,CAAC,kBAAkB,EAAE,KAAK,YAAY,CAC1E,QAAO;AAGT,MAAI,eAAe,cAAc,IAAI,YAAY,GAAG,EAClD,QAAO;AAET,SAAO;GACP,CAGC,MAAM,GAAG,MAAM;EACd,MAAM,OAAO,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;EAC7E,MAAM,OAAO,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;AAC7E,MAAI,CAAC,QAAQ,CAAC,KACZ,QAAO;AACT,SAAO,cAAc,YAAY,KAAK,EAAG,YAAY,KAAK,CAAE;GAC5D,CACD,MAAM,GAAG,GAAG;;AAMjB,SAAS,cAAc,SAAwB,aAA8B;CAC3E,MAAM,OAAO,QAAQ,QAAQ,eAAe,QAAQ,UAAU;CAC9D,MAAM,UAAU,eAAe,QAAQ,KAAK,YAAY,IAAI,QAAQ;CAEpE,MAAM,KAAK;EACT;EACA,QAAQ,QAAQ;EAChB,YAAY;EACZ,cAAc;EACf;AACD,KAAI,QAAQ,QAAQ,QAAQ,SAAS,QAAQ,IAC3C,IAAG,KAAK,UAAU,QAAQ,KAAK,QAAQ,MAAM,OAAM,CAAC,GAAG;AACzD,IAAG,KAAK,MAAM;AAEd,QAAO,GAAG,GAAG,KAAK,KAAK,CAAC,QAAQ,QAAQ,QAAQ,QAAQ,IAAI,MAAM,QAAQ;;AAc5E,SAAgB,qBAAqB,gBAAuD,aAA8B;CAExH,MAAM,OAA4B,MAAM,QAAQ,eAAe,GAC3D;EAAE,UAAU;EAAgB;EAAa,GACzC;CAEJ,MAAM,EAAE,UAAU,cAAc,iBAAiB;CACjD,MAAM,MAAM,KAAK;CAUjB,MAAM,QAAkB;EAPb;GACT;GACA,UAHY,SAAS,UAAU,cAAc,UAAU;GAIvD,WAAW,SAAS,IAAI,OAAO;GAC/B;GACD,CAE2B,KAAK,KAAK;EAAE;EAAI;EAAoB;EAAG;AAGnE,KAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,QAAM,KAAK,oBAAoB,GAAG;AAClC,OAAK,MAAM,KAAK,aACd,OAAM,KAAK,MAAM,EAAE,QAAQ,WAAW,EAAE,QAAQ,QAAQ,EAAE,MAAM,IAAI,EAAE,KAAK,GAAG;AAEhF,QAAM,KAAK,GAAG;;AAIhB,KAAI,SAAS,SAAS,GAAG;AACvB,MAAI,gBAAgB,aAAa,SAAS,EACxC,OAAM,KAAK,oBAAoB,GAAG;AACpC,OAAK,MAAM,KAAK,UAAU;GACxB,MAAM,OAAO,QAAQ,EAAE,eAAe,EAAE,UAAU;GAClD,MAAM,WAAW,EAAE,IAAI,SAAS,IAAI,IAAI,EAAE,IAAI,WAAW,IAAI,GAAG,EAAE,MAAM,IAAI,EAAE;GAE9E,MAAM,KAAK,YADK,eAAe,EAAE,KAAK,IAAI,IAAI,EAAE,IACjB;GAC/B,MAAM,QAAQ,IAAI,UAAU,KAAK,GAAG,UAAU,IAAI,iBAAiB,IAAI,UAAU,IAAI,iBAAiB;AACtG,SAAM,KAAK,MAAM,EAAE,IAAI,MAAM,SAAS,QAAQ,EAAE,QAAQ,EAAE,IAAI,IAAI,KAAK,GAAG,QAAQ;;AAEpF,QAAM,KAAK,GAAG;;AAIhB,KAAI,cAAc;AAChB,QAAM,KAAK,gBAAgB,GAAG;AAC9B,QAAM,KAAK,mCAAmC;AAC9C,QAAM,KAAK,GAAG;;AAGhB,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,2BAA2B,UAAoC;CAC7E,MAAM,SAAS,SAAS,MAAM,GAAG,EAAE;AACnC,KAAI,OAAO,WAAW,EACpB,QAAO;AACT,QAAO,OAAO,OAAO,MAAM;EACzB,MAAM,QAAQ,EAAE,YAAY,IAAI,MAAM;AACtC,SAAO,KAAK,SAAS,OAAO,iBAAiB,KAAK,KAAK;GACvD;;AAMJ,eAAe,eAAe,OAAe,MAAc,KAAqC;AAC9F,MAAK,MAAM,YAAY;EAAC;EAAgB;EAAgB;EAAa,EAAE;EAErE,MAAM,UAAU,MAAM,OADV,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI,GAAG,YACvC;GAAE,cAAc;GAAQ,QAAQ,YAAY,QAAQ,IAAA;GAAS,CAAC,CAAC,YAAY,KAAK;AAClH,MAAI,QACF,QAAO;;AAEX,QAAO;;AAUT,eAAsB,kBACpB,OACA,MACA,kBACA,QACA,aACsB;CAEtB,MAAM,WAAW,eADA,MAAM,iBAAiB,OAAO,KAAK,EACV,aAAa,iBAAiB;AAExE,KAAI,SAAS,SAAS,GAAG;AAGvB,MAAI,2BAA2B,SAAS,EAAE;GAExC,MAAM,YAAY,MAAM,eAAe,OAAO,MADlC,UAAU,SAAS,GAAI,IACqB;AACxD,OAAI,UACF,QAAO,CAAC;IAAE,MAAM;IAAyB,SAAS;IAAW,CAAC;;EAGlE,MAAM,OAAO,SAAS,KAAK,MAAM;AAI/B,UAAO;IACL,MAAM,YAJS,EAAE,IAAI,SAAS,IAAI,IAAI,EAAE,IAAI,WAAW,IAAI,GACzD,EAAE,MACF,IAAI,EAAE,MAEmB;IAC3B,SAAS,cAAc,GAAG,YAAA;IAC3B;IACD;EAIF,MAAM,YAAY,MAAM,eAAe,OAAO,MADlC,UAAU,SAAS,GAAI,IACqB;AACxD,MAAI,aAAa,UAAU,SAAS,IAClC,MAAK,KAAK;GAAE,MAAM;GAAyB,SAAS;GAAW,CAAC;AAGlE,SAAO;;CAKT,MAAM,YAAY,MAAM,eAAe,OAAO,MADlC,UAAU,OACkC;AACxD,KAAI,CAAC,UACH,QAAO,EAAE;AAEX,QAAO,CAAC;EAAE,MAAM;EAAyB,SAAS;EAAW,CAAC;;ACzThE,SAAS,oBAAoB,KAA4B;CACvD,MAAM,QAAQ,IAAI,MAAM,2BAA2B;AACnD,KAAI,MACF,QAAO,GAAG,MAAM,GAAG,GAAG,MAAM;AAC9B,QAAO;;AAMT,SAAS,kBAAkB,SAAkC;AAW3D,QAAO,GAVI;EACT;EACA,YAAY,QAAQ;EACpB,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAM,CAAC;EAC9C,SAAS,QAAQ;EACjB,QAAQ,QAAQ;EAChB;EACA;EACD,CAEY,KAAK,KAAK,CAAC,QAAQ,QAAQ,MAAM,MAAM,QAAQ;;AAM9D,eAAe,cAAc,KAA8C;AACzE,KAAI;EACF,MAAM,OAAO,MAAM,OAAO,KAAK;GAAE,cAAc;GAAQ,QAAQ,YAAY,QAAQ,IAAA;GAAS,CAAC,CAAC,YAAY,KAAK;AAC/G,MAAI,CAAC,KACH,QAAO;EAGT,MAAM,UAAU,oBAAoB,IAAI;AACxC,MAAI,CAAC,QACH,QAAO;EAGT,IAAI,QAAQ;EACZ,MAAM,aAAa,KAAK,MAAM,yBAAyB;AACvD,MAAI,WACF,SAAQ,WAAW,GAAI,MAAM;AAG/B,MAAI,CAAC,OAAO;GACV,MAAM,iBAAiB,KAAK,MAAM,0BAA0B;AAC5D,OAAI,eACF,SAAQ,eAAe,GAAI,MAAM;;EAIrC,IAAI,wBAAO,IAAI,MAAM,EAAC,aAAa,CAAC,MAAM,IAAI,CAAC;EAC/C,MAAM,YAAY,KAAK,MAAM,gEAAgE;AAC7F,MAAI,UACF,QAAO,UAAU;EAGnB,MAAM,WAAW,eAAe,KAAK;AACrC,MAAI,CAAC,SACH,QAAO;AAET,SAAO;GACL;GACA,OAAO,SAAS,WAAW;GAC3B;GACA;GACA;GACD;SAEG;AACJ,SAAO;;;AASX,SAAS,qBAAqB,SAAwB,kBAAyC;CAC7F,MAAM,cAAc,YAAY,iBAAiB;AACjD,KAAI,CAAC,YACH,QAAO;AAET,QAAO,QAAQ,QAAQ,UAAU;EAC/B,MAAM,UAAU,YAAY,MAAM,QAAQ;AAC1C,MAAI,CAAC,QACH,QAAO;AAET,SAAO,cAAc,SAAS,YAAY,IAAI;GAC9C;;AAQJ,eAAsB,kBACpB,aACA,kBACsB;CACtB,MAAM,SAAS,cAAc,YAAY;AACzC,KAAI,CAAC,OACH,QAAO,EAAE;CAEX,MAAM,mBAAmB,qBAAqB,OAAO,UAAU,iBAAiB;AAChF,KAAI,iBAAiB,WAAW,EAC9B,QAAO,EAAE;CAEX,MAAM,WAA8B,EAAE;CAGtC,MAAM,YAAY;AAClB,MAAK,IAAI,IAAI,GAAG,IAAI,iBAAiB,QAAQ,KAAK,WAAW;EAC3D,MAAM,QAAQ,iBAAiB,MAAM,GAAG,IAAI,UAAU;EACtD,MAAM,UAAU,MAAM,QAAQ,IAAI,MAAM,KAAI,UAAS,cAAc,MAAM,IAAI,CAAC,CAAC;AAC/E,OAAK,MAAM,UAAU,QACnB,KAAI,OACF,UAAS,KAAK,OAAO;;AAI3B,KAAI,SAAS,WAAW,EACtB,QAAO,EAAE;AAGX,UAAS,MAAM,GAAG,MAAM;EACtB,MAAM,OAAO,EAAE,QAAQ,MAAM,IAAI,CAAC,IAAI,OAAO;EAC7C,MAAM,OAAO,EAAE,QAAQ,MAAM,IAAI,CAAC,IAAI,OAAO;AAC7C,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,KAAK,QAAQ,KAAK,OAAO,EAAE,KAAK;GAC3D,MAAM,QAAQ,KAAK,MAAM,MAAM,KAAK,MAAM;AAC1C,OAAI,SAAS,EACX,QAAO;;AAEX,SAAO;GACP;AAGF,QAAO,SAAS,KAAI,OAAM;EACxB,MAAM,iBAAiB,EAAE,QAAQ;EACjC,SAAS,kBAAkB,EAAA;EAC5B,EAAE;;ACjKL,MAAM,wBAAwB,IAAI,IAAI;CACpC;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,uBAAuB,IAAI,IAAI;CACnC;CACA;CACA;CACD,CAAC;AAwBF,eAAsB,uBACpB,OACA,MACA,QAAQ,IACR,YAC6B;AAC7B,KAAI,CAAC,eAAe,CAClB,QAAO,EAAE;AAKX,KAAI,YAAY;EACd,MAAM,SAAS,IAAI,KAAK,WAAW;AACnC,SAAO,SAAS,OAAO,UAAU,GAAG,EAAE;AACtC,MAAI,yBAAS,IAAI,MAAM,CACrB,QAAO,EAAE;;AAGb,KAAI;EAKF,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;GAAC;GAAO;GAAW;GAAM,SAFtD,wGADK,KAAK,IAAI,QAAQ,GAAG,GAAG,CACuF;GAE3C;GAAM,SAAS;GAAS;GAAM,QAAQ;GAAO,EAAE;GACnI,UAAU;GACV,WAAW,KAAK,OAAO;GACxB,CAAC;AACF,MAAI,CAAC,OACH,QAAO,EAAE;EAGX,MAAM,QADO,KAAK,MAAM,OAAO,EACX,MAAM,YAAY,aAAa;AACnD,MAAI,CAAC,MAAM,QAAQ,MAAM,CACvB,QAAO,EAAE;AAgCX,SA9BoB,MACjB,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,QAAQ,MAAW;GAClB,MAAM,OAAO,EAAE,UAAU,QAAQ,IAAI,aAAa;AAClD,UAAO,CAAC,qBAAqB,IAAI,IAAI;IACrC,CACD,KAAK,OAAY;GAChB,QAAQ,EAAE;GACV,OAAO,EAAE;GACT,MAAM,EAAE,QAAQ;GAChB,UAAU,EAAE,UAAU,QAAQ;GAC9B,WAAW,EAAE;GACb,KAAK,EAAE;GACP,aAAa,EAAE,eAAe;GAC9B,UAAU,EAAE,UAAU,cAAc;GACpC,QAAQ,EAAE,QAAQ,QAAQ,KAAA;GAC1B,cAAc,EAAE,UAAU,SAAS,EAAE,EAClC,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,KAAK,OAAY;IAAE,MAAM,EAAE,QAAQ;IAAI,QAAQ,EAAE,OAAO;IAAO,EAAA;GACnE,EAAE,CAEF,MAAM,GAAqB,MAAwB;GAClD,MAAM,QAAQ,sBAAsB,IAAI,EAAE,SAAS,aAAa,CAAC,GAAG,IAAI;GACxE,MAAM,QAAQ,sBAAsB,IAAI,EAAE,SAAS,aAAa,CAAC,GAAG,IAAI;AACxE,OAAI,UAAU,MACZ,QAAO,QAAQ;AACjB,UAAQ,EAAE,cAAc,EAAE,YAAa,EAAE,cAAc,EAAE;IACzD,CACD,MAAM,GAAG,MAAM;SAId;AACJ,SAAO,EAAE;;;AAOb,SAAgB,2BAA2B,GAA6B;CACtE,MAAM,KAAK,iBAAiB;EAC1B,QAAQ,EAAE;EACV,OAAO,EAAE;EACT,UAAU,EAAE;EACZ,SAAS,QAAQ,EAAE,UAAU;EAC7B,KAAK,EAAE;EACP,SAAS,EAAE;EACX,UAAU,EAAE;EACZ,UAAU,CAAC,CAAC,EAAE;EACf,CAAC;CAEF,MAAM,YAAY,EAAE,eAAe,IAAI,OAAO;CAC9C,MAAM,QAAQ;EAAC;EAAI;EAAI,KAAK,EAAE;EAAQ;AAEtC,KAAI,EAAE,MAAM;EACV,MAAM,OAAO,EAAE,KAAK,SAAS,YACzB,GAAG,EAAE,KAAK,MAAM,GAAG,UAAU,CAAC,OAC9B,EAAE;AACN,QAAM,KAAK,IAAI,KAAK;;AAGtB,KAAI,EAAE,QAAQ;EACZ,MAAM,cAAc;EACpB,MAAM,SAAS,EAAE,OAAO,SAAS,cAC7B,GAAG,EAAE,OAAO,MAAM,GAAG,YAAY,CAAC,OAClC,EAAE;AACN,QAAM,KAAK,IAAI,OAAO,IAAI,sBAAsB,IAAI,OAAO;YAEpD,EAAE,YAAY,SAAS,GAAG;AAEjC,QAAM,KAAK,IAAI,OAAO,IAAI,kBAAkB;AAC5C,OAAK,MAAM,KAAK,EAAE,aAAa;GAC7B,MAAM,cAAc,EAAE,KAAK,SAAS,MAChC,GAAG,EAAE,KAAK,MAAM,GAAG,IAAI,CAAC,OACxB,EAAE;AACN,SAAM,KAAK,IAAI,MAAM,EAAE,OAAO,MAAM,IAAI,YAAY;;;AAIxD,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,wBAAwB,aAAyC;CAC/E,MAAM,6BAAa,IAAI,KAAiC;AACxD,MAAK,MAAM,KAAK,aAAa;EAC3B,MAAM,MAAM,EAAE,YAAY;EAC1B,MAAM,OAAO,WAAW,IAAI,IAAI,IAAI,EAAE;AACtC,OAAK,KAAK,EAAE;AACZ,aAAW,IAAI,KAAK,KAAK;;CAG3B,MAAM,WAAW,YAAY,QAAO,MAAK,EAAE,OAAO,CAAC;CASnD,MAAM,WAAqB;EAPhB;GACT;GACA,UAAU,YAAY;GACtB,aAAa;GACb;GACD,CAE8B,KAAK,KAAK;EAAE;EAAI;EAAuB;EAAG;CAGzE,MAAM,OAAO,CAAC,GAAG,WAAW,MAAM,CAAC,CAAC,MAAM,GAAG,MAAM;AAGjD,UAFc,sBAAsB,IAAI,EAAE,aAAa,CAAC,GAAG,IAAI,MACjD,sBAAsB,IAAI,EAAE,aAAa,CAAC,GAAG,IAAI,MACvC,EAAE,cAAc,EAAE;GAC1C;AAEF,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,QAAQ,WAAW,IAAI,IAAI;AACjC,WAAS,KAAK,MAAM,IAAI,IAAI,MAAM,OAAO,IAAI,GAAG;AAChD,OAAK,MAAM,KAAK,OAAO;GACrB,MAAM,UAAU,EAAE,cAAc,IAAI,MAAM,EAAE,YAAY,KAAK;GAC7D,MAAM,WAAW,EAAE,SAAS,gBAAgB;AAC5C,YAAS,KAAK,OAAO,EAAE,OAAO,iBAAiB,EAAE,OAAO,QAAQ,EAAE,QAAQ,UAAU,WAAW;;AAEjG,WAAS,KAAK,GAAG;;AAGnB,QAAO,SAAS,KAAK,KAAK;;AChM5B,MAAM,YAAY;CAChB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAM,gBAAgB;CACpB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAM,gBAAgB,MAAM;AAK5B,eAAsB,kBAAkB,YAA0C;AAChF,KAAI,CAAC,WAAW,KAAK,YAAY,eAAe,CAAC,CAC/C,QAAO,EAAE;CAOX,MAAM,QAAQ,MAAM,OAAO,CAAC,sBAAsB,EAAE;EAClD,KAAK;EACL,QAPa,CACb,GAAG,UAAU,KAAI,MAAK,MAAM,EAAE,KAAK,EACnC,GAAG,cACJ;EAKC,UAAU;EACX,CAAC;CAEF,MAAM,UAAuB,EAAE;AAE/B,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,UAAU,KAAK,YAAY,KAAK;EACtC,IAAI;AACJ,MAAI;AACF,aAAU,aAAa,SAAS,QAAQ;UAEpC;AACJ;;AAGF,MAAI,QAAQ,SAAS,cACnB;AAEF,UAAQ,KAAK;GAAE,MAAM;GAAM;GAAS,MAAM;GAAS,CAAC;;AAGtD,QAAO;;ACtCT,SAAgB,mBAAmB,OAAsC;CACvE,MAAM,UAAU,MAAM,MAAM;AAG5B,KAAI,QAAQ,WAAW,IAAI,CACzB,QAAO;AAGT,KAAI,QAAQ,WAAW,KAAK,IAAI,QAAQ,WAAW,MAAM,IAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,WAAW,IAAI,CAI7G,QAAO;EAAE,MAAM;EAAS,WAHN,QAAQ,WAAW,IAAI,GACrC,QAAQ,QAAQ,IAAI,QAAQ,IAAI,QAAQ,MAAM,EAAE,CAAC,GACjD,QAAQ,QAAA;EACuB;AAIrC,KAAI,QAAQ,WAAW,OAAO,EAAE;EAE9B,MAAM,KAAK,eADQ,iBAAiB,QAAQ,CACP;AACrC,MAAI,GACF,QAAO;GAAE,MAAM;GAAU,OAAO,GAAG;GAAO,MAAM,GAAG;GAAM;AAC3D,SAAO;;AAIT,KAAI,QAAQ,WAAW,WAAW,IAAI,QAAQ,WAAW,UAAU,CACjE,QAAO,YAAY,QAAQ;AAI7B,KAAI,qBAAqB,KAAK,QAAQ,CACpC,QAAO;EAAE,MAAM;EAAU,OAAO,QAAQ,MAAM,IAAI,CAAC;EAAI,MAAM,QAAQ,MAAM,IAAI,CAAC;EAAI;AAItF,QAAO;;AAGT,SAAS,YAAY,KAAoC;AACvD,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAE3B,MAAI,OAAO,aAAa,gBAAgB,OAAO,aAAa,kBAAkB;GAC5E,MAAM,QAAQ,OAAO,SAAS,QAAQ,OAAO,GAAG,CAAC,QAAQ,UAAU,GAAG,CAAC,MAAM,IAAI;GACjF,MAAM,QAAQ,MAAM;GACpB,MAAM,OAAO,MAAM;AACnB,OAAI,CAAC,SAAS,CAAC,KACb,QAAO;AAGT,OAAI,MAAM,OAAO,UAAU,MAAM,UAAU,EAGzC,QAAO;IAAE,MAAM;IAAU;IAAO;IAAM,KAF1B,MAAM;IAEyB,WADzB,MAAM,SAAS,IAAI,MAAM,MAAM,EAAE,CAAC,KAAK,IAAI,GAAG,KAAA;IACV;AAGxD,UAAO;IAAE,MAAM;IAAU;IAAO;IAAM;;AAGxC,MAAI,OAAO,aAAa,cAAc;GACpC,MAAM,QAAQ,OAAO,SAAS,QAAQ,OAAO,GAAG,CAAC,QAAQ,UAAU,GAAG,CAAC,MAAM,IAAI;GACjF,MAAM,QAAQ,MAAM;GACpB,MAAM,OAAO,MAAM;AACnB,OAAI,CAAC,SAAS,CAAC,KACb,QAAO;AACT,UAAO;IAAE,MAAM;IAAU;IAAO;IAAM;;AAGxC,SAAO;SAEH;AACJ,SAAO;;;AAOX,SAAgB,0BAA0B,SAA0D;CAClG,MAAM,QAAQ,QAAQ,MAAM,wBAAwB;AACpD,KAAI,CAAC,MACH,QAAO,EAAE;CAEX,MAAM,SAAkD,EAAE;AAC1D,MAAK,MAAM,QAAQ,MAAM,GAAG,MAAM,KAAK,EAAE;EACvC,MAAM,KAAK,YAAY,KAAK;AAC5B,MAAI,CAAC,GACH;AACF,MAAI,GAAG,OAAO,OACZ,QAAO,OAAO,GAAG;AACnB,MAAI,GAAG,OAAO,cACZ,QAAO,cAAc,GAAG;;AAE5B,QAAO;;AAST,MAAM,kBAAkB;CAAC;CAAW;CAAc;CAAS;AAK3D,eAAsB,eACpB,QACA,YACwD;AACxD,KAAI,OAAO,SAAS,QAClB,QAAO,iBAAiB,OAAO;AACjC,KAAI,OAAO,SAAS,SAClB,QAAO,kBAAkB,QAAQ,WAAW;AAC9C,KAAI,OAAO,SAAS,SAClB,QAAO,kBAAkB,QAAQ,WAAW;AAC9C,QAAO,EAAE,QAAQ,EAAE,EAAE;;AAKvB,SAAS,iBAAiB,QAAmD;CAC3E,MAAM,OAAO,OAAO;AACpB,KAAI,CAAC,WAAW,KAAK,CACnB,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,SAAwB,EAAE;CAGhC,MAAM,YAAY,QAAQ,MAAM,SAAS;AACzC,KAAI,WAAW,UAAU,CACvB,MAAK,MAAM,SAAS,YAAY,WAAW,EAAE,eAAe,MAAM,CAAC,EAAE;AACnE,MAAI,CAAC,MAAM,aAAa,CACtB;EACF,MAAM,QAAQ,eAAe,QAAQ,WAAW,MAAM,KAAK,EAAE,UAAU,MAAM,OAAO;AACpF,MAAI,MACF,QAAO,KAAK,MAAM;;AAKxB,KAAI,OAAO,WAAW,GAAG;EACvB,MAAM,QAAQ,eAAe,MAAM,GAAG;AACtC,MAAI,MACF,QAAO,KAAK,MAAM;;AAGtB,QAAO,EAAE,QAAQ;;AAGnB,SAAS,eAAe,KAAa,UAAsC;CACzE,MAAM,cAAc,QAAQ,KAAK,WAAW;AAC5C,KAAI,CAAC,WAAW,YAAY,CAC1B,QAAO;CAET,MAAM,UAAU,aAAa,aAAa,QAAQ;CAClD,MAAM,cAAc,0BAA0B,QAAQ;CACtD,MAAM,UAAU,IAAI,MAAM,IAAI,CAAC,KAAK;CACpC,MAAM,OAAO,YAAY,QAAQ;CAEjC,MAAM,QAAkD,EAAE;AAC1D,MAAK,MAAM,UAAU,iBAAiB;EACpC,MAAM,aAAa,QAAQ,KAAK,OAAO;AACvC,MAAI,CAAC,WAAW,WAAW,CACzB;AACF,OAAK,MAAM,QAAQ,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,EAAE;AACnE,OAAI,CAAC,KAAK,QAAQ,CAChB;AACF,SAAM,KAAK;IACT,MAAM,GAAG,OAAO,GAAG,KAAK;IACxB,SAAS,aAAa,QAAQ,YAAY,KAAK,KAAK,EAAE,QAAA;IACvD,CAAC;;;AAIN,QAAO;EACL;EACA,aAAa,YAAY,eAAe;EACxC,MAAM;EACN;EACA;EACD;;AAKH,eAAe,kBACb,QACA,YACwD;CACxD,MAAM,EAAE,OAAO,SAAS;AACxB,KAAI,CAAC,SAAS,CAAC,KACb,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,MAAM,OAAO,OAAO;AAC1B,cAAa,oBAAoB,MAAM,GAAG,KAAK,GAAG,MAAM;CAExD,MAAM,OAAO,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,SAAS,MACjD,CAAC,YAAY,KAAK;AAEnB,KAAI,CAAC,MAAM,OAAO,QAAQ;AAExB,MAAI,QAAQ,QAAQ;GAClB,MAAM,WAAW,MAAM,OACrB,yBAAyB,MAAM,GAAG,KAAK,eACxC,CAAC,YAAY,KAAK;AACnB,OAAI,UAAU,OAAO,OACnB,QAAO,oBAAoB,OAAQ,MAAO,UAAU,UAAU,OAAO,WAAW,WAAW;;AAE/F,SAAO,EAAE,QAAQ,EAAE,EAAE;;AAGvB,QAAO,oBAAoB,OAAQ,MAAO,KAAK,MAAM,OAAO,WAAW,WAAW;;AAGpF,eAAe,oBACb,OACA,MACA,KACA,MACA,WACA,YACwD;CACxD,MAAM,WAAW,KAAK,MAAM,KAAI,MAAK,EAAE,KAAK;CAC5C,MAAM,YAAY,KAAK,MAAM;CAG7B,IAAI;AAEJ,KAAI,WAAW;EAEb,MAAM,aAAa,CACjB,GAAG,UAAU,YAEb,UAAU,SAAS,YAAY,GAAG,YAAY,KAC/C,CAAC,OAAO,QAAQ;AAEjB,iBAAe,SAAS,QAAO,MAAK,WAAW,SAAS,EAAE,CAAC;OAI3D,gBAAe,SAAS,QAAO,MAC7B,EAAE,MAAM,6BAA6B,IAAI,MAAM,WAChD;AAGH,KAAI,aAAa,WAAW,EAC1B,QAAO;EAAE,QAAQ,EAAE;EAAE;EAAW;CAElC,MAAM,QAAQ,OAAO,EAAE;CACvB,MAAM,SAAwB,EAAE;AAEhC,cAAa,SAAS,aAAa,OAAO,2BAA2B;AAErE,OAAM,QAAQ,IAAI,aAAa,KAAI,WAAU,MAAM,YAAY;EAC7D,MAAM,WAAW,WAAW,aAAa,KAAK,OAAO,QAAQ,gBAAgB,GAAG;EAChF,MAAM,UAAU,MAAM,eAAe,OAAO,MAAM,KAAK,OAAO;AAC9D,MAAI,CAAC,QACH;EAEF,MAAM,cAAc,0BAA0B,QAAQ;EACtD,MAAM,UAAU,WAAW,SAAS,MAAM,IAAI,CAAC,KAAK,GAAI;EACxD,MAAM,OAAO,YAAY,QAAQ;EAGjC,MAAM,kBAA4D,EAAE;EACpE,MAAM,SAAS,WAAW,GAAG,SAAS,KAAK;AAE3C,OAAK,MAAM,UAAU,iBAAiB;GACpC,MAAM,eAAe,GAAG,SAAS,OAAO;GACxC,MAAM,WAAW,SAAS,QAAO,MAAK,EAAE,WAAW,aAAa,CAAC;AACjE,QAAK,MAAM,YAAY,UAAU;IAC/B,MAAM,cAAc,MAAM,eAAe,OAAO,MAAM,KAAK,SAAS;AACpE,QAAI,aAAa;KACf,MAAM,eAAe,SAAS,MAAM,OAAO,OAAO;AAClD,qBAAgB,KAAK;MAAE,MAAM;MAAc,SAAS;MAAa,CAAC;;;;AAKxE,SAAO,KAAK;GACV;GACA,aAAa,YAAY,eAAe;GACxC,MAAM;GACN;GACA,OAAO;GACR,CAAC;GACF,CAAC,CAAC;AAEJ,QAAO;EAAE;EAAQ;EAAW;;AAG9B,eAAe,eAAe,OAAe,MAAc,KAAa,MAAsC;AAC5G,QAAO,OACL,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI,GAAG,QAC7D,EAAE,cAAc,QAAQ,CACzB,CAAC,YAAY,KAAK;;AAarB,eAAe,kBACb,QACA,YACwD;CACxD,MAAM,EAAE,OAAO,SAAS;AACxB,KAAI,CAAC,SAAS,CAAC,KACb,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,MAAM,OAAO,OAAO;CAC1B,MAAM,YAAY,mBAAmB,GAAG,MAAM,GAAG,OAAO;AAExD,cAAa,oBAAoB,MAAM,GAAG,KAAK,GAAG,MAAM;CAExD,MAAM,OAAO,MAAM,OACjB,sCAAsC,UAAU,uBAAuB,IAAI,8BAC5E,CAAC,YAAY,KAAK;AAEnB,KAAI,CAAC,MAAM,OACT,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,WAAW,KAAK,QAAO,MAAK,EAAE,SAAS,OAAO,CAAC,KAAI,MAAK,EAAE,KAAK;CAGrE,MAAM,eAAe,OAAO,YACxB,SAAS,QAAO,MAAK,MAAM,GAAG,OAAO,UAAU,WAAW,GAC1D,SAAS,QAAO,MAAK,EAAE,MAAM,6BAA6B,IAAI,MAAM,WAAW;AAEnF,KAAI,aAAa,WAAW,EAC1B,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,QAAQ,OAAO,EAAE;CACvB,MAAM,SAAwB,EAAE;AAEhC,cAAa,SAAS,aAAa,OAAO,2BAA2B;AAErE,OAAM,QAAQ,IAAI,aAAa,KAAI,WAAU,MAAM,YAAY;EAC7D,MAAM,WAAW,WAAW,aAAa,KAAK,OAAO,QAAQ,gBAAgB,GAAG;EAChF,MAAM,UAAU,MAAM,eAAe,OAAQ,MAAO,KAAK,OAAO;AAChE,MAAI,CAAC,QACH;EAEF,MAAM,cAAc,0BAA0B,QAAQ;EACtD,MAAM,UAAU,WAAW,SAAS,MAAM,IAAI,CAAC,KAAK,GAAI;EACxD,MAAM,OAAO,YAAY,QAAQ;EAGjC,MAAM,kBAA4D,EAAE;EACpE,MAAM,SAAS,WAAW,GAAG,SAAS,KAAK;AAE3C,OAAK,MAAM,UAAU,iBAAiB;GACpC,MAAM,eAAe,GAAG,SAAS,OAAO;GACxC,MAAM,WAAW,SAAS,QAAO,MAAK,EAAE,WAAW,aAAa,CAAC;AACjE,QAAK,MAAM,YAAY,UAAU;IAC/B,MAAM,cAAc,MAAM,eAAe,OAAQ,MAAO,KAAK,SAAS;AACtE,QAAI,aAAa;KACf,MAAM,eAAe,SAAS,MAAM,OAAO,OAAO;AAClD,qBAAgB,KAAK;MAAE,MAAM;MAAc,SAAS;MAAa,CAAC;;;;AAKxE,SAAO,KAAK;GACV;GACA,aAAa,YAAY,eAAe;GACxC,MAAM;GACN;GACA,OAAO;GACR,CAAC;GACF,CAAC,CAAC;AAEJ,QAAO,EAAE,QAAQ;;AAGnB,eAAe,eAAe,OAAe,MAAc,KAAa,MAAsC;AAC5G,QAAO,OACL,sBAAsB,MAAM,GAAG,KAAK,SAAS,IAAI,GAAG,QACpD,EAAE,cAAc,QAAQ,CACzB,CAAC,YAAY,KAAK;;ACjarB,MAAa,eAAe;AAG5B,MAAa,oBAAoB,MAAc,IAAI,KAAK,IAAI;AAyB5D,eAAe,eAAe,OAAe,MAAc,KAAgC;AAIzF,SAHa,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,SAAS,MACjD,CAAC,YAAY,KAAK,GACN,OAAO,KAAI,MAAK,EAAE,KAAK,IAAI,EAAE;;AAc5C,eAAe,WAAW,OAAe,MAAc,SAAiB,aAAsB,YAAgD;CAC5I,MAAM,aAAa,CAAC,IAAI,WAAW,QAAQ;AAC3C,KAAI,YACF,YAAW,KAAK,GAAG,YAAY,GAAG,UAAU;AAE9C,MAAK,MAAM,OAAO,YAAY;EAC5B,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,IAAI;AACpD,MAAI,MAAM,SAAS,EACjB,QAAO;GAAE,KAAK;GAAK;GAAO;;AAI9B,KAAI,aAAa;EACf,MAAM,YAAY,MAAM,qBAAqB,OAAO,MAAM,YAAY;AACtE,MAAI,WAAW;GACb,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,UAAU;AAC1D,OAAI,MAAM,SAAS,EACjB,QAAO;IAAE,KAAK;IAAW;IAAO;;;CAKtC,MAAM,WAAW,aACb,CAAC,YAAY,GAAG,CAAC,QAAQ,SAAS,CAAC,QAAO,MAAK,MAAM,WAAW,CAAC,GACjE,CAAC,QAAQ,SAAS;AACtB,MAAK,MAAM,UAAU,UAAU;EAC7B,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,OAAO;AACvD,MAAI,MAAM,SAAS,EACjB,QAAO;GAAE,KAAK;GAAQ;GAAO,UAAU;GAAM;;AAGjD,QAAO;;AAOT,eAAe,qBAAqB,OAAe,MAAc,aAA6C;CAC5G,MAAM,OAAO,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,WACxC,CAAC,YAAY,KAAK;CACnB,MAAM,SAAS,GAAG,YAAY;AAC9B,QAAO,MAAM,UAAU,MAAK,MAAK,EAAE,IAAI,WAAW,OAAO,CAAC,EAAE,OAAO;;AAMrE,SAAS,eAAe,OAAiB,YAA8B;AACrE,QAAO,MAAM,QAAO,MAAK,EAAE,WAAW,WAAW,IAAI,gBAAgB,KAAK,EAAE,CAAC;;AAI/E,MAAM,iBAAiB;CACrB;CACA;CACA;CACA;CACD;AAGD,MAAM,eAAe,IAAI,IAAI;CAC3B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAM,gBAAgB,IAAI,IAAI;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAWF,SAAS,eAAe,MAAuB;AAE7C,QADc,KAAK,MAAM,IAAI,CAChB,MAAK,MAAK,aAAa,IAAI,EAAE,aAAa,CAAC,CAAC;;AAM3D,SAAS,aAAa,MAAsB;AAC1C,QAAO,KAAK,MAAM,IAAI,CAAC,OAAO,QAAQ,CAAC;;AAMzC,SAAS,eAAe,MAAuB;AAE7C,QADc,KAAK,MAAM,IAAI,CAChB,MAAK,MAAK,cAAc,IAAI,EAAE,aAAa,CAAC,CAAC;;AAO5D,SAAS,YAAY,KAAa,WAA2B;CAC3D,MAAM,QAAQ,aAAa,IAAI,IAAI;AAEnC,QAAQ,aADU,eAAe,IAAI,GAAG,MAAM,KACb;;AAQnC,SAAS,iBAAiB,UAA2C;CACnE,MAAM,UAAU,SACb,QAAO,MAAK,gBAAgB,KAAK,EAAE,CAAC,CACpC,QAAO,MAAK,CAAC,eAAe,MAAK,MAAK,EAAE,KAAK,EAAE,CAAC,CAAC,CACjD,QAAO,MAAK,EAAE,SAAS,IAAI,CAAC;CAG/B,MAAM,6BAAa,IAAI,KAAuB;AAE9C,MAAK,MAAM,QAAQ,SAAS;EAC1B,MAAM,UAAU,KAAK,YAAY,SAAS;AAC1C,MAAI,YAAY,GACd;EAEF,MAAM,SAAS,KAAK,MAAM,GAAG,UAAU,EAAgB;EACvD,MAAM,QAAQ,WAAW,IAAI,OAAO,IAAI,EAAE;AAC1C,QAAM,KAAK,KAAK;AAChB,aAAW,IAAI,QAAQ,MAAM;;AAG/B,KAAI,WAAW,OAAO,GAAG;EACvB,MAAM,UAAU,CAAC,GAAG,WAAW,SAAS,CAAC,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS,EAAE,GAAG,OAAO,CAAC;AACpF,MAAI,QAAQ,GAAG,UAAU,GAAG;GAC1B,MAAM,aAAa,QAAQ;GAC3B,MAAM,UAAU,WAAW,YAAY,QAAQ;GAC/C,MAAM,cAAc,UAAU,IAAI,WAAW,MAAM,GAAG,QAAQ,GAAG;AACjE,UAAO;IAAE,OAAO,QAAQ;IAAI,QAAQ;IAAa;;;CAKrD,MAAM,4BAAY,IAAI,KAAuB;AAE7C,MAAK,MAAM,QAAQ,SAAS;AAC1B,MAAI,eAAe,KAAK,CACtB;EAGF,MAAM,YAAY,KAAK,YAAY,IAAI;AACvC,MAAI,cAAc,GAChB;EAEF,MAAM,MAAM,KAAK,MAAM,GAAG,YAAY,EAAE;EACxC,MAAM,QAAQ,UAAU,IAAI,IAAI,IAAI,EAAE;AACtC,QAAM,KAAK,KAAK;AAChB,YAAU,IAAI,KAAK,MAAM;;AAG3B,KAAI,UAAU,SAAS,EACrB,QAAO;CAGT,MAAM,SAAS,CAAC,GAAG,UAAU,SAAS,CAAC,CACpC,KAAK,CAAC,KAAK,YAAY;EAAE;EAAK;EAAO,OAAO,YAAY,KAAK,MAAM,OAAA;EAAS,EAAE,CAC9E,QAAO,MAAK,EAAE,MAAM,UAAU,EAAE,CAChC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAEpC,KAAI,OAAO,WAAW,EACpB,QAAO;CAET,MAAM,OAAO,OAAO;AAKpB,QAAO;EAAE,OAAO,KAAK;EAAO,QAAQ,KAAK;EAAK;;AAMhD,eAAe,cAAc,OAAe,MAAc,KAAa,aAAa,SAA4B;AAE9G,QAAO,eADO,MAAM,eAAe,OAAO,MAAM,IAAI,EACvB,WAAW;;AAO1C,eAAsB,aAAa,OAAe,MAAc,SAAiB,aAAsB,SAAiD;CACtJ,MAAM,WAAW,cAAc,eAAe,YAAY,GAAG,KAAA;AAC7D,KAAI,UAAU;EACZ,MAAM,MAAM,SAAS,OAAO;EAC5B,MAAM,WAAW,CAAC,SAAS;EAC3B,MAAM,QAAQ,MAAM,cAAc,SAAS,OAAO,SAAS,MAAM,KAAK,GAAG,SAAS,KAAK,GAAG;AAC1F,MAAI,MAAM,WAAW,EACnB,QAAO;AACT,SAAO;GACL,SAAS,qCAAqC,SAAS,MAAM,GAAG,SAAS,KAAK,GAAG;GACjF;GACA;GACA;GACD;;CAIH,MAAM,MAAM,MAAM,WAAW,OAAO,MAAM,SAAS,aADhC,UAAU,kBAAkB,QAAQ,GAAG,KAAA,EACiB;AAC3E,KAAI,CAAC,IACH,QAAO;CAET,IAAI,OAAO,eAAe,IAAI,OAAO,QAAQ;CAC7C,IAAI;CACJ,IAAI;AAGJ,KAAI,KAAK,WAAW,GAAG;EACrB,MAAM,aAAa,iBAAiB,IAAI,MAAM;AAC9C,MAAI,YAAY;AACd,UAAO,WAAW;AAClB,gBAAa,WAAW,UAAU,KAAA;AAClC,cAAW,IAAI;;;AAInB,KAAI,KAAK,WAAW,EAClB,QAAO;AAET,QAAO;EACL,SAAS,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI;EACnE,KAAK,IAAI;EACT,OAAO;EACP;EACA;EACA,UAAU,IAAI;EACf;;AAMH,SAAS,cAAc,GAAmB;AACxC,QAAO,EAAE,QAAQ,OAAO,GAAG,CAAC,QAAQ,iBAAiB,GAAG;;AAS1D,SAAgB,wBACd,WACA,WAC0C;AAC1C,KAAI,UAAU,WAAW,EACvB,QAAO;EAAE,SAAS;EAAM,YAAY;EAAG;CAGzC,MAAM,SAAS,UAAU,MAAM,GAAG,GAAG;CAGrC,MAAM,kBAAkB,OAAO,KAAK,SAAS;EAC3C,IAAI,OAAO,KAAK;AAEhB,MAAI,KAAK,WAAW,OAAO,CACzB,KAAI;AACF,UAAO,IAAI,IAAI,KAAK,CAAC;UAEjB;AAER,SAAO,cAAc,KAAK;GAC1B;CAGF,MAAM,iBAAiB,IAAI,IAAI,UAAU,IAAI,cAAc,CAAC;CAE5D,IAAI,UAAU;AACd,MAAK,MAAM,YAAY,gBAErB,MAAK,MAAM,YAAY,eACrB,KAAI,aAAa,YAAY,SAAS,SAAS,IAAI,WAAW,EAAE;AAC9D;AACA;;CAKN,MAAM,aAAa,UAAU,OAAO;AACpC,QAAO;EAAE,SAAS,cAAc;EAAK;EAAY;;AAOnD,eAAe,cAAc,OAAe,MAAc,aAAuC;CAC/F,MAAM,OAAO,qCAAqC,MAAM,GAAG,KAAK;CAEhE,MAAM,QAAQ;EACZ;EACA,YAHgB,YAAY,QAAQ,UAAU,GAAG,CAG3B;EACtB,YAAY,YAAY,QAAQ,MAAM,GAAG,CAAC,QAAQ,KAAK,IAAI,CAAC;EAC7D;AACD,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,OAAO,MAAM,UAAU,GAAG,KAAK,GAAG,OAAO;AAC/C,MAAI,CAAC,KACH;AACF,MAAI;AAEF,OADY,KAAK,MAAM,KAAK,CACpB,SAAS,YACf,QAAO;UAEL;;AAER,QAAO;;AAGT,eAAsB,iBAAiB,aAA6C;CAElF,MAAM,YAAY,YAAY,QAAQ,UAAU,GAAG;AACnD,MAAK,MAAM,aAAa,CAAC,YAAY,QAAQ,MAAM,GAAG,CAAC,QAAQ,KAAK,IAAI,EAAE,UAAU,EAAE;AAEpF,MAAI,CAAC,UAAU,SAAS,IAAI,EAAE;AAG5B,QADgB,MAAM,OAAO,IAAI,yBAAyB,UAAU,GAAG,YAAY,CAAC,YAAY,KAAK,GACxF,GACX,QAAO,sBAAsB,UAAU,GAAG;AAC5C;;AAGF,OADgB,MAAM,OAAO,IAAI,yBAAyB,YAAY,CAAC,YAAY,KAAK,GAC3E,GACX,QAAO,sBAAsB;;CAIjC,MAAM,aAAa,YAAY,QAAQ,MAAM,GAAG;AAChD,KAAI,eAAe,CACjB,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GAAC;GAAU;GAAS;GAAY;GAAU;GAAY;GAAW;GAAI,EAAE;GAC9G,UAAU;GACV,SAAS;GACV,CAAC;AACF,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,YAAY;EAC9B,MAAM,QAAQ,KAAK,MAAM,KAAK;EAE9B,MAAM,QAAQ,MAAM,MAAK,MACvB,EAAE,SAAS,aAAa,CAAC,SAAS,IAAI,YAAY,aAAa,GAAG,IAC/D,EAAE,SAAS,aAAa,CAAC,SAAS,IAAI,UAAU,aAAa,GAAG,CACpE;AACD,MAAI,MACF,QAAO,sBAAsB,MAAM;AAErC,OAAK,MAAM,aAAa,OAAO;GAC7B,MAAM,KAAK,eAAe,sBAAsB,UAAU,WAAW;AACrE,OAAI,MAAM,MAAM,cAAc,GAAG,OAAO,GAAG,MAAM,YAAY,CAC3D,QAAO,sBAAsB,UAAU;;SAGvC;CAOR,MAAM,OAAO,MAAM,OACjB,gDAFY,mBAAmB,GAAG,WAAW,UAAU,CAED,aACvD,CAAC,YAAY,KAAK;AACnB,KAAI,CAAC,MAAM,OAAO,OAChB,QAAO;CAGT,MAAM,QAAQ,KAAK,MAAM,MAAK,MAC5B,EAAE,UAAU,aAAa,CAAC,SAAS,IAAI,YAAY,aAAa,GAAG,IAChE,EAAE,UAAU,aAAa,CAAC,SAAS,IAAI,UAAU,aAAa,GAAG,CACrE;AACD,KAAI,MACF,QAAO,sBAAsB,MAAM;AAGrC,MAAK,MAAM,aAAa,KAAK,OAAO;EAClC,MAAM,KAAK,eAAe,sBAAsB,UAAU,YAAY;AACtE,MAAI,MAAM,MAAM,cAAc,GAAG,OAAO,GAAG,MAAM,YAAY,CAC3D,QAAO,sBAAsB,UAAU;;AAG3C,QAAO;;AAOT,eAAsB,oBAAoB,OAAe,MAAc,aAA6D;CAClI,MAAM,WAAW,cAAc,eAAe,YAAY,GAAG,KAAA;AAC7D,KAAI,UAAU,SACZ,QAAO,EAAE,UAAU,SAAS,UAAU;AAGxC,KAAI,eAAe,CACjB,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GAAC;GAAO,SAAS,MAAM,GAAG;GAAQ;GAAM;GAAa,EAAE;GAC9F,UAAU;GACV,SAAS;GACV,CAAC;AACF,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,YAAY;EAC9B,MAAM,OAAO,KAAK,MAAM,KAAK;AAC7B,SAAO,MAAM,WAAW,EAAE,UAAU,KAAK,UAAU,GAAG;SAElD;CAKR,MAAM,OAAO,MAAM,OACjB,gCAAgC,MAAM,GAAG,OAC1C,CAAC,YAAY,KAAK;AACnB,QAAO,MAAM,WAAW,EAAE,UAAU,KAAK,UAAU,GAAG;;AAMxD,eAAsB,YAAY,OAAe,MAAc,QAAiB,KAAsC;CAIpH,MAAM,UAAU,SACZ,yBAAyB,MAAM,GAAG,KAAK,SAJ5B,OAAO,OAIqC,GAAG,OAAO,cACjE,yBAAyB,MAAM,GAAG,KAAK,SAAS,MAAM,QAAQ,QAAQ;AAI1E,MAFgB,MAAM,OAAO,IAAI,QAAQ,CAAC,YAAY,KAAK,GAE9C,GACX,QAAO,UAAU,MAAM,GAAG,OAAO,SAAS,IAAI,WAAW,KAAK,MAAM,IAAI,QAAQ;CAKlF,MAAM,WAAW,SAAS,GAAG,OAAO,KAAK;CACzC,MAAM,WAAW,MAAM,CAAC,IAAI,GAAG,CAAC,QAAQ,SAAS;AACjD,MAAK,MAAM,KAAK,SACd,MAAK,MAAM,YAAY;EAAC;EAAa;EAAa;EAAY,EAAE;EAC9D,MAAM,YAAY,qCAAqC,MAAM,GAAG,KAAK,GAAG,EAAE,GAAG,WAAW;AAExF,OADY,MAAM,OAAO,IAAI,UAAU,CAAC,YAAY,KAAK,GAChD,GACP,QAAO;;AAIb,QAAO;;AAkFT,eAAsB,mBAAmB,KAAqC;AAE5E,KAAI,IAAI,WAAW,UAAU,EAAE;EAC7B,MAAM,WAAW,cAAc,IAAI;AACnC,MAAI,CAACA,WAAa,SAAS,CACzB,QAAO;AACT,SAAOC,aAAe,UAAU,QAAQ;;AAG1C,KAAI,IAAI,WAAW,UAAU,EAAE;EAC7B,IAAI,OAAO,IAAI,QAAQ,WAAW,GAAG;EACrC,IAAI,MAAM;EAGV,MAAM,QAAQ,KAAK,YAAY,IAAI;AACnC,MAAI,UAAU,IAAI;AAChB,SAAM,KAAK,MAAM,QAAQ,EAAE;AAC3B,UAAO,KAAK,MAAM,GAAG,MAAM;;EAG7B,MAAM,QAAQ,KAAK,MAAM,IAAI;EAC7B,MAAM,QAAQ,MAAM;EACpB,MAAM,OAAO,MAAM;EACnB,MAAM,SAAS,MAAM,MAAM,EAAE,CAAC,KAAK,IAAI;EAMvC,MAAM,OAAO,MAAM,OAJH,SACZ,yBAAyB,MAAM,GAAG,KAAK,SAAS,IAAI,GAAG,OAAO,cAC9D,yBAAyB,MAAM,GAAG,KAAK,cAAc,OAEtB,EAAE,cAAc,QAAQ,CAAC,CAAC,YAAY,KAAK;AAC9E,MAAI,CAAC,KACH,QAAO;AAET,MAAI;GACF,MAAM,OAAO,KAAK,MAAM,KAAK;AAC7B,UAAO,KAAK,YAAY,KAAK,MAAM,YAAY;UAE3C;AACJ,UAAO;;;AAIX,QAAO,UAAU,IAAI;;AC9oBvB,eAAsB,aAAa,SAAyC;CAE1E,MAAM,UAAU,GADD,IAAI,IAAI,QAAQ,CAAC,OACN;AAC1B,KAAI,MAAM,UAAU,QAAQ,CAC1B,QAAO;AACT,QAAO;;AAMT,eAAsB,aAAa,KAA0C;CAC3E,MAAM,UAAU,MAAM,UAAU,IAAI;AACpC,KAAI,CAAC,WAAW,QAAQ,SAAS,GAC/B,QAAO;AAET,QAAO;EACL,KAAK;EACL,OAAO,mBAAmB,QAAA;EAC3B;;AAMH,SAAgB,mBAAmB,SAA6B;CAC9D,MAAM,QAAoB,EAAE;CAC5B,MAAM,uBAAO,IAAI,KAAa;CAC9B,MAAM,YAAY;AAClB,MAAK,IAAI,QAAQ,UAAU,KAAK,QAAQ,EAAE,UAAU,MAAM,QAAQ,UAAU,KAAK,QAAQ,EAAE;EACzF,MAAM,MAAM,MAAM;AAClB,MAAI,CAAC,KAAK,IAAI,IAAI,EAAE;AAClB,QAAK,IAAI,IAAI;AACb,SAAM,KAAK;IAAE,OAAO,MAAM;IAAK;IAAK,CAAC;;;AAIzC,QAAO;;AAOT,SAAS,UAAU,KAAsB;AACvC,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,MAAI,OAAO,aAAa,SACtB,QAAO;EACT,MAAM,OAAO,OAAO;AAEpB,MAAI,SAAS,eAAe,SAAS,eAAe,SAAS,MAC3D,QAAO;AACT,MAAI,SAAS,kBACX,QAAO;AACT,MAAI,mDAAmD,KAAK,KAAK,CAC/D,QAAO;AACT,MAAI,KAAK,WAAW,IAAI,CACtB,QAAO;AACT,SAAO;SAEH;AAAE,SAAO;;;AAGjB,eAAsB,iBACpB,aACA,SACA,YACuB;CACvB,MAAM,QAAQ,OAAO,EAAE;CACvB,IAAI,YAAY;AAoBhB,SAlBgB,MAAM,QAAQ,IAC5B,YAAY,MAAM,KAAI,SAAQ,MAAM,YAAY;EAC9C,MAAM,MAAM,KAAK,IAAI,WAAW,OAAO,GACnC,KAAK,MACL,GAAG,QAAQ,QAAQ,OAAO,GAAG,GAAG,KAAK,IAAI,WAAW,IAAI,GAAG,KAAK,MAAM,KAAK;AAE/E,MAAI,CAAC,UAAU,IAAI,CACjB,QAAO;AAET,eAAa,KAAK,KAAK,aAAa,YAAY,MAAM,OAAO;EAE7D,MAAM,UAAU,MAAM,UAAU,IAAI;AACpC,MAAI,WAAW,QAAQ,SAAS,IAC9B,QAAO;GAAE,KAAK,KAAK;GAAK,OAAO,KAAK;GAAO;GAAS;AACtD,SAAO;GACP,CAAC,CACJ,EAEc,QAAQ,MAAuB,MAAM,KAAK;;AAO3D,SAAgB,mBAAmB,SAAiB,SAA0B;CAC5E,IAAI,aAAa;AAGjB,KAAI,SAAS;EAEX,MAAM,UADO,QAAQ,QAAQ,OAAO,GAAG,CAClB,QAAQ,uBAAuB,OAAO;AAC3D,eAAa,WAAW,QACtB,IAAI,OAAO,SAAS,QAAQ,mBAAmB,IAAI,EACnD,cACD;;AAIH,cAAa,WAAW,QAAQ,wBAAwB,eAAe;AAEvE,QAAO;;AAOT,SAAgB,gBAAgB,SAAiB,UAAmC;CAClF,MAAM,WAAqB,EAAE;CAC7B,MAAM,QAAQ,QAAQ,MAAM,UAAU;AAEtC,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,WAAW,KAAK,MAAM,kBAAkB;AAC9C,MAAI,CAAC,SACH;EAEF,MAAM,MAAM,SAAS;AACrB,MAAI,SAAS,MAAK,MAAK,IAAI,SAAS,EAAE,CAAC,EAAE;GACvC,MAAM,eAAe,KAAK,QAAQ,MAAM,KAAK,QAAQ,OAAO,CAAC;AAC7D,OAAI,eAAe,GACjB,UAAS,KAAK,KAAK,MAAM,eAAe,EAAE,CAAC;;;AAKjD,KAAI,SAAS,WAAW,EACtB,QAAO;AACT,QAAO,SAAS,KAAK,cAAc;;ACnIrC,eAAsB,kBAAkB,OAAe,OAAO,GAA4E;CACxI,MAAM,OAAO,MAAM,OAEhB,+CAA+C,mBAAmB,MAAM,CAAC,QAAQ,OAAO,CAAC,YAAY,KAAK;AAE7G,KAAI,CAAC,MAAM,SAAS,OAClB,QAAO,EAAE;AAEX,QAAO,KAAK,QAAQ,KAAI,OAAM;EAC5B,MAAM,EAAE,QAAQ;EAChB,aAAa,EAAE,QAAQ;EACvB,SAAS,EAAE,QAAQ;EACpB,EAAE;;AAML,eAAsB,gBAAgB,aAAqD;CAEzF,MAAM,OAAO,MAAM,OAAuB,qBAAqB,YAAY,eAAe,CAAC,YAAY,KAAK;AAC5G,KAAI,KACF,QAAO;AAGT,QAAO,OAAuB,8BAA8B,YAAY,SAAS,CAAC,YAAY,KAAK;;AAgBrG,eAAsB,qBAAqB,aAAqB,SAA2C;CACzG,MAAM,OAAO,MAAM,OAGhB,8BAA8B,cAAc,CAAC,YAAY,KAAK;AAEjE,KAAI,CAAC,KACH,QAAO,EAAE;CAGX,MAAM,WAAoD,KAAK,eAC3D,OAAO,YACL,OAAO,QAAQ,KAAK,aAAa,CAAC,KAAK,CAAC,KAAK,SAAS,CACpD,KACA;EAAE,SAAS;EAAK,YAAY,KAAK,OAAO;EAAM,CAC/C,CAAC,CACH,GACD,KAAA;AAEJ,QAAO;EACL,YAAY,KAAK,OAAO,YAAY,KAAA;EACpC;EACD;;AAkBH,eAAe,cACb,IACA,eACA,KACA,QACA,UACA,YACA,MAC+B;CAC/B,IAAI;AAGJ,KAAI,eAAe;AACjB,eAAa,cAAc;EAC3B,MAAM,UAAU,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,eAAe,IAAI,MAAM,MAAM,WAAW;AAChG,MAAI,SAAS;AACX,UAAO,aAAa,QAAQ;AAC5B,UAAO,SAAS,QAAQ;AACxB,UAAO,kBAAkB,QAAQ;AACjC,cAAW,QAAQ;AACnB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK,QAAQ;IACb,QAAQ;IACR,SAAS,QAAQ,WACb,SAAS,QAAQ,MAAM,OAAO,WAAW,QAAQ,IAAI,gBAAgB,cAAc,KACnF,SAAS,QAAQ,MAAM,OAAO,WAAW,QAAQ;IACtD,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,GAAG,OAAO,QAAQ,SAAS,cAAc;GAC9C,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,CAAC,OAAO,SAAS;AACnB,eAAa,cAAc;EAC3B,MAAM,WAAW,MAAM,oBAAoB,GAAG,OAAO,GAAG,MAAM,IAAI,KAAK;AACvE,MAAI,UAAU,YAAY,CAAC,iBAAiB,SAAS,SAAS,EAAE;AAC9D,UAAO,UAAU,SAAS;AAC1B,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK,OAAO;IACZ,QAAQ;IACR,SAAS,mBAAmB,SAAS;IACtC,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,OAAO;GACZ,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,cAAa,SAAS;CACtB,MAAM,YAAY,MAAM,YAAY,GAAG,OAAO,GAAG,MAAM,MAAM,QAAQ,OAAO,OAAO;AACnF,KAAI,WAAW;AACb,SAAO,YAAY;AACnB,WAAS,KAAK;GACZ,QAAQ;GACR,KAAK;GACL,QAAQ;GACT,CAAC;OAGF,UAAS,KAAK;EACZ,QAAQ;EACR,KAAK,GAAG,OAAO,QAAQ;EACvB,QAAQ;EACR,SAAS;EACV,CAAC;AAGJ,QAAO;;AAMT,eAAsB,mBAAmB,aAAqB,UAA0B,EAAE,EAAmC;AAE3H,SADe,MAAM,+BAA+B,aAAa,QAAQ,EAC3D;;AAMhB,eAAsB,+BAA+B,aAAqB,UAA0B,EAAE,EAA0B;CAC9H,MAAM,WAA6B,EAAE;CACrC,MAAM,EAAE,eAAe;AAEvB,cAAa,MAAM;CACnB,MAAM,MAAM,MAAM,gBAAgB,YAAY;AAC9C,KAAI,CAAC,KAAK;AACR,WAAS,KAAK;GACZ,QAAQ;GACR,KAAK,8BAA8B,YAAY;GAC/C,QAAQ;GACR,SAAS;GACV,CAAC;AACF,SAAO;GAAE,SAAS;GAAM;GAAU;;AAGpC,UAAS,KAAK;EACZ,QAAQ;EACR,KAAK,8BAA8B,YAAY;EAC/C,QAAQ;EACR,SAAS,SAAS,IAAI,KAAK,GAAG,IAAI;EACnC,CAAC;CAGF,MAAM,eAAe,IAAI,UACrB,MAAM,qBAAqB,aAAa,IAAI,QAAQ,GACpD,EAAE;CAEN,MAAM,SAA0B;EAC9B,MAAM,IAAI;EACV,SAAS,IAAI;EACb,YAAY,aAAa;EACzB,aAAa,IAAI;EACjB,cAAc,IAAI;EAClB,UAAU,aAAa;EACxB;CAGD,IAAI;CAGJ,IAAI;CACJ,IAAI;AACJ,KAAI,OAAO,IAAI,eAAe,YAAY,IAAI,YAAY,KAAK;AAC7D,eAAa,IAAI,WAAW;EAC5B,MAAM,aAAa,iBAAiB,WAAW;AAE/C,MAAI,CAAC,WAAW,SAAS,MAAM,IAAI,WAAW,SAAS,IAAI,IAAI,CAAC,WAAW,SAAS,IAAI,CACtF,QAAO,UAAU,sBAAsB;MAEvC,QAAO,UAAU;AACnB,WAAS,IAAI,WAAW;YAEjB,OAAO,IAAI,eAAe,SACjC,KAAI,IAAI,WAAW,SAAS,MAAM,EAAE;EAElC,MAAM,KAAK,eAAe,IAAI,WAAW;AACzC,MAAI,GACF,QAAO,UAAU,sBAAsB,GAAG,MAAM,GAAG,GAAG;QAErD;EAEH,MAAM,OAAO,IAAI,WAAW,QAAQ,YAAY,GAAG;AACnD,MAAI,KAAK,SAAS,IAAI,IAAI,CAAC,KAAK,SAAS,IAAI,CAC3C,QAAO,UAAU,sBAAsB;;AAK7C,KAAI,IAAI,YAAY,CAAC,gBAAgB,IAAI,SAAS,IAAI,CAAC,iBAAiB,IAAI,SAAS,CACnF,QAAO,UAAU,IAAI;AAIvB,KAAI,OAAO,SAAS,SAAS,aAAa,EAAE;EAC1C,MAAM,KAAK,eAAe,OAAO,QAAQ;AACzC,MAAI,GAEF,mBAAkB,MAAM,cAAc,IADhB,QAAQ,WAAW,IAAI,SACY,KAAK,QAAQ,UAAU,YAAY;GAAE;GAAY;GAAQ,CAAC;YAG9G,CAAC,OAAO,SAAS;AAExB,eAAa,gBAAgB;EAC7B,MAAM,cAAc,MAAM,iBAAiB,IAAI,KAAK;AACpD,MAAI,aAAa;AACf,UAAO,UAAU;AACjB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACR,SAAS,4BAA4B;IACtC,CAAC;GAEF,MAAM,KAAK,eAAe,YAAY;AACtC,OAAI,GAEF,mBAAkB,MAAM,cAAc,IADhB,QAAQ,WAAW,IAAI,SACY,KAAK,QAAQ,UAAU,WAAW;QAI7F,UAAS,KAAK;GACZ,QAAQ;GACR,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,OAAO,SAAS;AAClB,eAAa,WAAW;EACxB,MAAM,UAAU,MAAM,aAAa,OAAO,QAAQ;AAClD,MAAI,SAAS;AACX,UAAO,UAAU;AACjB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACT,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,GAAG,IAAI,IAAI,OAAO,QAAQ,CAAC,OAAO;GACvC,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,OAAO,cAAc,OAAO,WAAW,iBAAiB;EAC1D,MAAM,cAAc,MAAM,aAAa,OAAO,QAAQ;AACtD,MAAI,eAAe,YAAY,MAAM,SAAS,GAAG;GAC/C,MAAM,aAAa,wBAAwB,YAAY,OAAO,gBAAgB;AAC9E,OAAI,CAAC,WAAW,SAAS;AACvB,aAAS,KAAK;KACZ,QAAQ;KACR,KAAK,OAAO;KACZ,QAAQ;KACR,SAAS,kDAAkD,KAAK,MAAM,WAAW,aAAa,IAAI,CAAC;KACpG,CAAC;AACF,WAAO,aAAa,KAAA;AACpB,WAAO,SAAS,KAAA;;;;AAMtB,KAAI,CAAC,OAAO,WAAW,CAAC,OAAO,WAAW,CAAC,OAAO,aAAa,CAAC,OAAO,cAAc,QAAQ,KAAK;AAChG,eAAa,QAAQ;EACrB,MAAM,SAAS,KAAK,QAAQ,KAAK,gBAAgB,YAAY;EAE7D,MAAM,aAAa,WAAW,OAAO,IAAI,YAAY,OAAO,CAAC,MAAK,MAAK,gBAAgB,KAAK,EAAE,CAAC;AAC/F,MAAI,YAAY;GACd,MAAM,aAAa,KAAK,QAAQ,WAAW;AAC3C,UAAO,YAAY,cAAc,WAAW,CAAC;AAC7C,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACR,SAAS;IACV,CAAC;;;AAKN,KAAI,CAAC,OAAO,WAAW,CAAC,OAAO,WAAW,CAAC,OAAO,aAAa,CAAC,OAAO,WACrE,QAAO;EAAE,SAAS;EAAM;EAAU;AAGpC,QAAO;EAAE,SAAS;EAAQ;EAAU;;AAMtC,SAAgB,sBACd,MACA,SACA,KACwB;AAExB,KAAI,QAAQ,WAAW,QAAQ,EAAE;EAE/B,MAAM,gBAAgB,KADL,QAAQ,KAAK,QAAQ,MAAM,EAAE,CAAC,EACV,eAAe;AACpD,MAAI,WAAW,cAAc,EAAE;GAC7B,MAAM,YAAY,KAAK,MAAM,aAAa,eAAe,QAAQ,CAAC;AAClE,UAAO;IACL,MAAM,UAAU,QAAQ;IACxB,SAAS,UAAU,WAAW;IAC/B;;AAEH,SAAO;;AAIT,KAAI,QAAQ,WAAW,OAAO,EAAE;EAC9B,MAAM,YAAY,QAAQ,MAAM,EAAE;EAClC,MAAM,UAAU,UAAU,WAAW,IAAI,GACrC,UAAU,QAAQ,KAAK,EAAE,GACzB,UAAU,QAAQ,IAAI;EAC1B,MAAM,WAAW,UAAU,IAAI,UAAU,MAAM,GAAG,QAAQ,GAAG;AAC7D,SAAO;GAAE,MAAM;GAAU,SAAS,wBAAwB,UAAU,IAAI,IAAI;GAAK;;AAInF,KAAI,QAAQ,WAAW,QAAQ,IAAI,QAAQ,WAAW,OAAO,IAAI,QAAQ,WAAW,OAAO,CACzF,QAAO;CAKT,MAAM,YAAY,wBAAwB,MAAM,IAAI;AACpD,KAAI,UACF,QAAO;EAAE;EAAM,SAAS;EAAW;AAGrC,KAAI,cAAc,KAAK,QAAQ,CAC7B,QAAO;EAAE;EAAM,SAAS,QAAQ,QAAQ,aAAa,GAAA;EAAK;AAI5D,KAAI,QAAQ,WAAW,WAAW,IAAI,QAAQ,WAAW,aAAa,CACpE,QAAO;EAAE;EAAM,SAAS;EAAK;AAE/B,QAAO;;AAOT,SAAgB,wBAAwB,MAAc,KAA4B;AAChF,KAAI;EACF,MAAM,WAAW,gBAAgB,GAAG,KAAK,gBAAgB,EAAE,KAAK,KAAK,CAAC;AAEtE,SADY,KAAK,MAAM,aAAa,UAAU,QAAQ,CAAC,CAC5C,WAAW;SAElB;AAGJ,MAAI;GAEF,IAAI,MAAM,QADI,gBAAgB,MAAM,EAAE,KAAK,KAAK,CAAC,CACzB;AACxB,UAAO,OAAO,SAAS,IAAI,KAAK,gBAAgB;IAC9C,MAAM,UAAU,KAAK,KAAK,eAAe;AACzC,QAAI,WAAW,QAAQ,CAErB,QADY,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC,CAC3C,WAAW;AAExB,UAAM,QAAQ,IAAI;;UAGhB;AACN,SAAO;;;AAOX,eAAsB,sBAAsB,KAAyC;CACnF,MAAM,UAAU,KAAK,KAAK,eAAe;AACzC,KAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,6CAA6C;CAG/D,MAAM,MAAM,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;CACtD,MAAM,OAA+B;EACnC,GAAG,IAAI;EACP,GAAG,IAAI;EACR;CAED,MAAM,UAA6B,EAAE;AAErC,MAAK,MAAM,CAAC,MAAM,YAAY,OAAO,QAAQ,KAAK,EAAE;AAElD,MAAI,KAAK,WAAW,UAAU,IAAI;GAAC;GAAc;GAAU;GAAY;GAAU;GAAO,CAAC,SAAS,KAAK,CACrG;EAGF,MAAM,SAAS,sBAAsB,MAAM,SAAS,IAAI;AACxD,MAAI,OACF,SAAQ,KAAK,OAAO;;AAIxB,QAAO;;AAcT,SAAgB,qBAAqB,WAA4C;CAC/E,MAAM,UAAU,KAAK,WAAW,eAAe;AAC/C,KAAI,CAAC,WAAW,QAAQ,CACtB,QAAO;CAET,MAAM,MAAM,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;CAEtD,IAAI;AACJ,KAAI,IAAI,YAAY,IAClB,WAAU,iBAAiB,IAAI,WAAW,IAAI;UAEvC,OAAO,IAAI,eAAe,SACjC,WAAU,iBAAiB,IAAI,WAAW;AAG5C,QAAO;EACL,MAAM,IAAI;EACV,SAAS,IAAI,WAAW;EACxB,aAAa,IAAI;EACjB;EACA;EACD;;AAMH,eAAsB,wBAAwB,WAAoD;CAChG,MAAM,OAAO,qBAAqB,UAAU;AAC5C,KAAI,CAAC,KACH,QAAO;CAET,MAAM,SAA0B;EAC9B,MAAM,KAAK;EACX,SAAS,KAAK;EACd,aAAa,KAAK;EAClB,SAAS,KAAK;EACf;AAGD,KAAI,KAAK,SAAS,SAAS,aAAa,EAAE;EACxC,MAAM,KAAK,eAAe,KAAK,QAAQ;AACvC,MAAI,IAAI;GAEN,MAAM,UAAU,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,KAAK,SAAS,KAAK,KAAK;AAC9E,OAAI,SAAS;AACX,WAAO,aAAa,QAAQ;AAC5B,WAAO,SAAS,QAAQ;AACxB,WAAO,kBAAkB,QAAQ;;GAInC,MAAM,YAAY,MAAM,YAAY,GAAG,OAAO,GAAG,MAAM,KAAA,GAAW,OAAO,OAAO;AAChF,OAAI,UACF,QAAO,YAAY;;;AAMzB,KAAI,CAAC,OAAO,aAAa,CAAC,OAAO,YAAY;EAC3C,MAAM,aAAa,YAAY,UAAU,CAAC,MAAK,MAAK,gBAAgB,KAAK,EAAE,CAAC;AAC5E,MAAI,WACF,QAAO,YAAY,cAAc,KAAK,WAAW,WAAW,CAAC,CAAC;;AAIlE,KAAI,CAAC,OAAO,aAAa,CAAC,OAAO,WAC/B,QAAO;AAGT,QAAO;;AAUT,eAAsB,aAAa,MAAc,SAAyC;CACxF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,SAAS,KAAK,UAAU,MAAM;AAGpC,KAAI,WAAW,KAAK,QAAQ,eAAe,CAAC,CAC1C,QAAO;CAGT,MAAM,OAAO,MAAM,OACjB,8BAA8B,KAAK,GAAG,UACvC,CAAC,YAAY,KAAK;AACnB,KAAI,CAAC,KACH,QAAO;CACT,MAAM,aAAa,KAAK,MAAM;AAC9B,KAAI,CAAC,WACH,QAAO;CAGT,MAAM,aAAa,MAAM,MAAM,YAAY,EACzC,SAAS,EAAE,cAAc,cAAc,EACxC,CAAC,CAAC,YAAY,KAAK;AAEpB,KAAI,CAAC,YAAY,MAAM,CAAC,WAAW,KACjC,QAAO;AAET,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;CAEtC,MAAM,aAAa,KAAK,UAAU,WAAW;CAC7C,MAAM,aAAa,kBAAkB,WAAW;CAGhD,MAAM,SAAS,WAAW,KAAK,WAAW;AAC1C,OAAM,IAAI,SAAe,KAAK,WAAW;EACvC,MAAM,WAAW,IAAI,SAAS,EAC5B,MAAM,OAAO,WAAW,UAAU;AAChC,cAAW,MAAM,OAAO,SAAS;KAEpC,CAAC;AACF,WAAS,GAAG,gBAAgB;AAC1B,cAAW,KAAK;AAChB,QAAK;IACL;AACF,WAAS,GAAG,SAAS,OAAO;EAE5B,SAAS,OAAO;AACd,UAAO,MAAM,CAAC,MAAM,EAAE,MAAM,YAAY;AACtC,QAAI,MAAM;AACR,cAAS,KAAK;AACd;;AAEF,aAAS,MAAM,aAAa,MAAM,CAAC;KACnC,CAAC,MAAM,OAAO;;AAElB,QAAM;GACN;CAGF,MAAM,EAAE,WAAW,UAAU,OAAO;EAAC;EAAO;EAAY;EAAwB;EAAM;EAAO,EAAE,EAAE,OAAO,UAAU,CAAC;AACnH,KAAI,WAAW,GAAG;AAChB,SAAO,QAAQ;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;AAChD,SAAO,YAAY,EAAE,OAAO,MAAM,CAAC;AACnC,SAAO;;AAGT,YAAW,WAAW;AACtB,QAAO;;AAMT,eAAsB,mBAAmB,aAA6C;AAIpF,SAHa,MAAM,OACjB,qBAAqB,YAAY,eAClC,CAAC,YAAY,KAAK,GACN,WAAW;;AAM1B,SAAgB,yBAAyB,UAAiC;CACxE,MAAM,YAAY,KAAK,UAAU,WAAW;AAC5C,KAAI,CAAC,WAAW,UAAU,CACxB,QAAO;AAIT,QAFgB,aAAa,WAAW,QAAQ,CAC1B,MAAM,6BAA6B,GAC1C,MAAM"}
1
+ {"version":3,"file":"npm.mjs","names":["fsExistsSync","fsReadFileSync"],"sources":["../../src/sources/github-common.ts","../../src/sources/issues.ts","../../src/sources/utils.ts","../../src/sources/releases.ts","../../src/sources/blog-releases.ts","../../src/sources/discussions.ts","../../src/sources/entries.ts","../../src/sources/git-skills.ts","../../src/sources/github.ts","../../src/sources/llms.ts","../../src/sources/npm.ts"],"sourcesContent":["/**\n * Shared constants and helpers for GitHub source modules (issues, discussions, releases)\n */\n\nexport const BOT_USERS = new Set([\n 'renovate[bot]',\n 'dependabot[bot]',\n 'renovate-bot',\n 'dependabot',\n 'github-actions[bot]',\n])\n\n/** Extract YYYY-MM-DD date from an ISO timestamp */\nexport const isoDate = (iso: string) => iso.split('T')[0]\n\n/** Build YAML frontmatter from a key-value object, auto-quoting strings with special chars */\nexport function buildFrontmatter(fields: Record<string, string | number | boolean | undefined>): string {\n const lines = ['---']\n for (const [k, v] of Object.entries(fields)) {\n if (v !== undefined)\n lines.push(`${k}: ${typeof v === 'string' && /[:\"[\\]]/.test(v) ? `\"${v.replace(/\"/g, '\\\\\"')}\"` : v}`)\n }\n lines.push('---')\n return lines.join('\\n')\n}\n","/**\n * GitHub issues fetching via gh CLI Search API\n * Sorted by reactions (upvotes), 75% open / 25% closed (within last year)\n * Categorized by labels, noise filtered out\n */\n\nimport { spawnSync } from 'node:child_process'\n\nimport { BOT_USERS, buildFrontmatter, isoDate } from './github-common'\n\nexport type IssueType = 'bug' | 'question' | 'docs' | 'feature' | 'other'\n\nexport interface IssueComment {\n body: string\n author: string\n reactions: number\n}\n\nexport interface GitHubIssue {\n number: number\n title: string\n state: string\n labels: string[]\n body: string\n createdAt: string\n url: string\n reactions: number\n comments: number\n type: IssueType\n topComments: IssueComment[]\n}\n\nlet _ghAvailable: boolean | undefined\n\n/**\n * Check if gh CLI is installed and authenticated (cached)\n */\nexport function isGhAvailable(): boolean {\n if (_ghAvailable !== undefined)\n return _ghAvailable\n const { status } = spawnSync('gh', ['auth', 'status'], { stdio: 'ignore' })\n return (_ghAvailable = status === 0)\n}\n\n/** Labels that indicate noise — filter these out entirely */\nconst NOISE_LABELS = new Set([\n 'duplicate',\n 'stale',\n 'invalid',\n 'wontfix',\n 'won\\'t fix',\n 'spam',\n 'off-topic',\n 'needs triage',\n 'triage',\n])\n\n/** Labels that indicate feature requests — deprioritize */\nconst FEATURE_LABELS = new Set([\n 'enhancement',\n 'feature',\n 'feature request',\n 'feature-request',\n 'proposal',\n 'rfc',\n 'idea',\n 'suggestion',\n])\n\nconst BUG_LABELS = new Set([\n 'bug',\n 'defect',\n 'regression',\n 'error',\n 'crash',\n 'fix',\n 'confirmed',\n 'verified',\n])\n\nconst QUESTION_LABELS = new Set([\n 'question',\n 'help wanted',\n 'support',\n 'usage',\n 'how-to',\n 'help',\n 'assistance',\n])\n\nconst DOCS_LABELS = new Set([\n 'documentation',\n 'docs',\n 'doc',\n 'typo',\n])\n\n/**\n * Classify an issue by its labels into a type useful for skill generation\n */\nexport function classifyIssue(labels: string[]): IssueType {\n const lower = labels.map(l => l.toLowerCase())\n if (lower.some(l => BUG_LABELS.has(l)))\n return 'bug'\n if (lower.some(l => QUESTION_LABELS.has(l)))\n return 'question'\n if (lower.some(l => DOCS_LABELS.has(l)))\n return 'docs'\n if (lower.some(l => FEATURE_LABELS.has(l)))\n return 'feature'\n return 'other'\n}\n\n/**\n * Check if an issue should be filtered out entirely\n */\nfunction isNoiseIssue(issue: { labels: string[], title: string, body: string }): boolean {\n const lower = issue.labels.map(l => l.toLowerCase())\n if (lower.some(l => NOISE_LABELS.has(l)))\n return true\n // Tracking/umbrella issues — low signal for skill generation\n if (issue.title.startsWith('☂️') || issue.title.startsWith('[META]') || issue.title.startsWith('[Tracking]'))\n return true\n return false\n}\n\n/**\n * Body truncation limit based on reactions — high-reaction issues deserve more space\n */\nfunction bodyLimit(reactions: number): number {\n if (reactions >= 10)\n return 2000\n if (reactions >= 5)\n return 1500\n return 800\n}\n\n/**\n * Fetch issues for a state using GitHub Search API sorted by reactions\n */\nfunction fetchIssuesByState(\n owner: string,\n repo: string,\n state: 'open' | 'closed',\n count: number,\n releasedAt?: string,\n): GitHubIssue[] {\n const fetchCount = Math.min(count * 3, 100)\n let datePart = ''\n if (state === 'closed') {\n if (releasedAt) {\n // For older versions, include issues closed up to 6 months after release\n const date = new Date(releasedAt)\n date.setMonth(date.getMonth() + 6)\n datePart = `+closed:<=${isoDate(date.toISOString())}`\n }\n else {\n datePart = `+closed:>${oneYearAgo()}`\n }\n }\n else if (releasedAt) {\n // For older versions, only include issues created around or before release\n const date = new Date(releasedAt)\n date.setMonth(date.getMonth() + 6)\n datePart = `+created:<=${isoDate(date.toISOString())}`\n }\n\n const q = `repo:${owner}/${repo}+is:issue+is:${state}${datePart}`\n\n const { stdout: result } = spawnSync('gh', [\n 'api',\n `search/issues?q=${q}&sort=reactions&order=desc&per_page=${fetchCount}`,\n '-q',\n '.items[] | {number, title, state, labels: [.labels[]?.name], body, createdAt: .created_at, url: .html_url, reactions: .reactions[\"+1\"], comments: .comments, user: .user.login, userType: .user.type}',\n ], { encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 })\n\n if (!result)\n return []\n\n return result\n .trim()\n .split('\\n')\n .filter(Boolean)\n .map(line => JSON.parse(line) as GitHubIssue & { user: string, userType: string })\n .filter(issue => !BOT_USERS.has(issue.user) && issue.userType !== 'Bot')\n .filter(issue => !isNoiseIssue(issue))\n .map(({ user: _, userType: __, ...issue }) => ({\n ...issue,\n type: classifyIssue(issue.labels),\n topComments: [] as IssueComment[],\n }))\n // Deprioritize feature requests — push to end\n .sort((a, b) => (a.type === 'feature' ? 1 : 0) - (b.type === 'feature' ? 1 : 0))\n .slice(0, count)\n}\n\nfunction oneYearAgo(): string {\n const d = new Date()\n d.setFullYear(d.getFullYear() - 1)\n return isoDate(d.toISOString())!\n}\n\n/**\n * Batch-fetch top comments for issues via GraphQL.\n * Enriches the top N highest-reaction issues with their most-reacted comments.\n */\nfunction enrichWithComments(owner: string, repo: string, issues: GitHubIssue[], topN = 10): void {\n // Only fetch comments for issues worth enriching\n const worth = issues\n .filter(i => i.comments > 0 && (i.type === 'bug' || i.type === 'question' || i.reactions >= 3))\n .sort((a, b) => b.reactions - a.reactions)\n .slice(0, topN)\n\n if (worth.length === 0)\n return\n\n // Build a single GraphQL query fetching comments for all selected issues\n const fragments = worth.map((issue, i) =>\n `i${i}: issue(number: ${issue.number}) { comments(first: 3) { nodes { body author { login } reactions { totalCount } } } }`,\n ).join(' ')\n\n const query = `query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { ${fragments} } }`\n\n try {\n const { stdout: result } = spawnSync('gh', [\n 'api',\n 'graphql',\n '-f',\n `query=${query}`,\n '-f',\n `owner=${owner}`,\n '-f',\n `repo=${repo}`,\n ], { encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 })\n\n if (!result)\n return\n\n const data = JSON.parse(result)\n const repo_ = data?.data?.repository\n if (!repo_)\n return\n\n for (let i = 0; i < worth.length; i++) {\n const nodes = repo_[`i${i}`]?.comments?.nodes\n if (!Array.isArray(nodes))\n continue\n worth[i]!.topComments = nodes\n .filter((c: any) => c.author && !BOT_USERS.has(c.author.login))\n .map((c: any) => ({\n body: c.body || '',\n author: c.author.login,\n reactions: c.reactions?.totalCount || 0,\n }))\n }\n }\n catch {\n // Non-critical — issues still useful without comments\n }\n}\n\n/**\n * Fetch issues from a GitHub repo sorted by reactions (upvotes).\n * Returns 75% open issues + 25% recently closed issues (within last year).\n * Filters noise (duplicates, stale, tracking) and deprioritizes feature requests.\n * Enriches top issues with their most-reacted comments via GraphQL.\n */\nexport async function fetchGitHubIssues(\n owner: string,\n repo: string,\n limit = 30,\n releasedAt?: string,\n): Promise<GitHubIssue[]> {\n if (!isGhAvailable())\n return []\n\n const openCount = Math.ceil(limit * 0.75)\n const closedCount = limit - openCount\n\n try {\n const open = fetchIssuesByState(owner, repo, 'open', openCount, releasedAt)\n const closed = fetchIssuesByState(owner, repo, 'closed', closedCount, releasedAt)\n const all = [...open, ...closed]\n enrichWithComments(owner, repo, all)\n return all\n }\n catch {\n return []\n }\n}\n\n/**\n * Format a single issue as markdown with YAML frontmatter\n */\nexport function formatIssueAsMarkdown(issue: GitHubIssue): string {\n const limit = bodyLimit(issue.reactions)\n const fmFields: Record<string, string | number | boolean | undefined> = {\n number: issue.number,\n title: issue.title,\n type: issue.type,\n state: issue.state,\n created: isoDate(issue.createdAt),\n url: issue.url,\n reactions: issue.reactions,\n comments: issue.comments,\n }\n if (issue.labels.length > 0)\n fmFields.labels = `[${issue.labels.join(', ')}]`\n const fm = buildFrontmatter(fmFields)\n\n const lines = [fm, '', `# ${issue.title}`]\n\n if (issue.body) {\n const body = issue.body.length > limit\n ? `${issue.body.slice(0, limit)}...`\n : issue.body\n lines.push('', body)\n }\n\n if (issue.topComments.length > 0) {\n lines.push('', '---', '', '## Top Comments')\n for (const c of issue.topComments) {\n const reactions = c.reactions > 0 ? ` (+${c.reactions})` : ''\n const commentBody = c.body.length > 600\n ? `${c.body.slice(0, 600)}...`\n : c.body\n lines.push('', `**@${c.author}**${reactions}:`, '', commentBody)\n }\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Generate a summary index of all issues for quick LLM scanning.\n * Groups by type so the LLM can quickly find bugs vs questions.\n */\nexport function generateIssueIndex(issues: GitHubIssue[]): string {\n const byType = new Map<IssueType, GitHubIssue[]>()\n for (const issue of issues) {\n const list = byType.get(issue.type) || []\n list.push(issue)\n byType.set(issue.type, list)\n }\n\n const typeLabels: Record<IssueType, string> = {\n bug: 'Bugs & Regressions',\n question: 'Questions & Usage Help',\n docs: 'Documentation',\n feature: 'Feature Requests',\n other: 'Other',\n }\n\n const typeOrder: IssueType[] = ['bug', 'question', 'docs', 'other', 'feature']\n\n const fm = [\n '---',\n `total: ${issues.length}`,\n `open: ${issues.filter(i => i.state === 'open').length}`,\n `closed: ${issues.filter(i => i.state !== 'open').length}`,\n '---',\n ]\n\n const sections: string[] = [fm.join('\\n'), '', '# Issues Index', '']\n\n for (const type of typeOrder) {\n const group = byType.get(type)\n if (!group?.length)\n continue\n sections.push(`## ${typeLabels[type]} (${group.length})`, '')\n for (const issue of group) {\n const reactions = issue.reactions > 0 ? ` (+${issue.reactions})` : ''\n const state = issue.state === 'open' ? '' : ' [closed]'\n const date = isoDate(issue.createdAt)\n sections.push(`- [#${issue.number}](./issue-${issue.number}.md): ${issue.title}${reactions}${state} (${date})`)\n }\n sections.push('')\n }\n\n return sections.join('\\n')\n}\n","/**\n * Shared utilities for doc resolution\n */\n\nimport { ofetch } from 'ofetch'\n\nexport const $fetch = ofetch.create({\n retry: 3,\n retryDelay: 500,\n timeout: 15_000,\n headers: { 'User-Agent': 'skilld/1.0' },\n})\n\n/**\n * Fetch text content from URL\n */\nexport async function fetchText(url: string): Promise<string | null> {\n return $fetch(url, { responseType: 'text' }).catch(() => null)\n}\n\n/**\n * Verify URL exists and is not HTML (likely 404 page)\n */\nexport async function verifyUrl(url: string): Promise<boolean> {\n const res = await $fetch.raw(url, { method: 'HEAD' }).catch(() => null)\n if (!res)\n return false\n const contentType = res.headers.get('content-type') || ''\n return !contentType.includes('text/html')\n}\n\n/**\n * Check if URL points to a social media or package registry site (not real docs)\n */\nconst USELESS_HOSTS = new Set([\n 'twitter.com',\n 'x.com',\n 'facebook.com',\n 'linkedin.com',\n 'youtube.com',\n 'instagram.com',\n 'npmjs.com',\n 'www.npmjs.com',\n 'yarnpkg.com',\n])\n\nexport function isUselessDocsUrl(url: string): boolean {\n try {\n const { hostname } = new URL(url)\n return USELESS_HOSTS.has(hostname)\n }\n catch { return false }\n}\n\n/**\n * Check if URL is a GitHub repo URL (not a docs site)\n */\nexport function isGitHubRepoUrl(url: string): boolean {\n try {\n const parsed = new URL(url)\n return parsed.hostname === 'github.com' || parsed.hostname === 'www.github.com'\n }\n catch {\n return false\n }\n}\n\n/**\n * Parse owner/repo from GitHub URL\n */\nexport function parseGitHubUrl(url: string): { owner: string, repo: string } | null {\n const match = url.match(/github\\.com\\/([^/]+)\\/([^/]+?)(?:\\.git)?(?:[/#]|$)/)\n if (!match)\n return null\n return { owner: match[1]!, repo: match[2]! }\n}\n\n/**\n * Normalize git repo URL to https\n */\nexport function normalizeRepoUrl(url: string): string {\n return url\n .replace(/^git\\+/, '')\n .replace(/#.*$/, '')\n .replace(/\\.git$/, '')\n .replace(/^git:\\/\\//, 'https://')\n .replace(/^ssh:\\/\\/git@github\\.com/, 'https://github.com')\n // SSH format: git@github.com:owner/repo\n .replace(/^git@github\\.com:/, 'https://github.com/')\n}\n\n/**\n * Extract branch hint from URL fragment (e.g. \"git+https://...#main\" → \"main\")\n */\nexport function extractBranchHint(url: string): string | undefined {\n const hash = url.indexOf('#')\n if (hash === -1)\n return undefined\n const fragment = url.slice(hash + 1)\n // Ignore non-branch fragments like \"readme\"\n if (!fragment || fragment === 'readme')\n return undefined\n return fragment\n}\n","/**\n * GitHub release notes fetching via gh CLI (preferred) with ungh.cc fallback\n */\n\nimport { spawnSync } from 'node:child_process'\nimport { isoDate } from './github-common'\nimport { isGhAvailable } from './issues'\nimport { $fetch } from './utils'\n\nexport interface GitHubRelease {\n id: number\n tag: string\n name: string\n prerelease: boolean\n createdAt: string\n publishedAt: string\n markdown: string\n}\n\ninterface UnghReleasesResponse {\n releases: GitHubRelease[]\n}\n\ninterface CachedDoc {\n path: string\n content: string\n}\n\nexport interface SemVer {\n major: number\n minor: number\n patch: number\n raw: string\n}\n\nexport function parseSemver(version: string): SemVer | null {\n const clean = version.replace(/^v/, '')\n const match = clean.match(/^(\\d+)(?:\\.(\\d+))?(?:\\.(\\d+))?/)\n if (!match)\n return null\n return {\n major: +match[1]!,\n minor: match[2] ? +match[2] : 0,\n patch: match[3] ? +match[3] : 0,\n raw: clean,\n }\n}\n\n/**\n * Extract version from a release tag, handling monorepo formats:\n * - `pkg@1.2.3` → `1.2.3`\n * - `pkg-v1.2.3` → `1.2.3`\n * - `v1.2.3` → `1.2.3`\n * - `1.2.3` → `1.2.3`\n */\nfunction extractVersion(tag: string, packageName?: string): string | null {\n if (packageName) {\n // Monorepo: pkg@version or pkg-vversion\n const atMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}@(.+)$`))\n if (atMatch)\n return atMatch[1]!\n const dashMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}-v?(.+)$`))\n if (dashMatch)\n return dashMatch[1]!\n }\n // Standard: v1.2.3 or 1.2.3\n return tag.replace(/^v/, '')\n}\n\nfunction escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n}\n\n/**\n * Check if a release tag belongs to a specific package\n */\nfunction tagMatchesPackage(tag: string, packageName: string): boolean {\n // Exact match: pkg@version or pkg-vversion\n return tag.startsWith(`${packageName}@`) || tag.startsWith(`${packageName}-v`) || tag.startsWith(`${packageName}-`)\n}\n\nexport function compareSemver(a: SemVer, b: SemVer): number {\n if (a.major !== b.major)\n return a.major - b.major\n if (a.minor !== b.minor)\n return a.minor - b.minor\n return a.patch - b.patch\n}\n\n/**\n * Fetch releases via gh CLI (fast, authenticated, paginated)\n */\nfunction fetchReleasesViaGh(owner: string, repo: string): GitHubRelease[] {\n try {\n const { stdout: json } = spawnSync('gh', [\n 'api',\n `repos/${owner}/${repo}/releases?per_page=100`,\n '--jq',\n '[.[] | {id: .id, tag: .tag_name, name: .name, prerelease: .prerelease, createdAt: .created_at, publishedAt: .published_at, markdown: .body}]',\n ], { encoding: 'utf-8', timeout: 15_000, stdio: ['ignore', 'pipe', 'ignore'] })\n if (!json)\n return []\n return JSON.parse(json) as GitHubRelease[]\n }\n catch {\n return []\n }\n}\n\n/**\n * Fetch all releases from a GitHub repo via ungh.cc (fallback)\n */\nasync function fetchReleasesViaUngh(owner: string, repo: string): Promise<GitHubRelease[]> {\n const data = await $fetch<UnghReleasesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/releases`,\n { signal: AbortSignal.timeout(15_000) },\n ).catch(() => null)\n return data?.releases ?? []\n}\n\n/**\n * Fetch all releases — gh CLI first, ungh.cc fallback\n */\nasync function fetchAllReleases(owner: string, repo: string): Promise<GitHubRelease[]> {\n if (isGhAvailable()) {\n const releases = fetchReleasesViaGh(owner, repo)\n if (releases.length > 0)\n return releases\n }\n return fetchReleasesViaUngh(owner, repo)\n}\n\n/**\n * Select last 20 stable releases for a package, sorted newest first.\n * For monorepos, filters to package-specific tags (pkg@version).\n * Falls back to generic tags (v1.2.3) only if no package-specific found.\n * If installedVersion is provided, filters out releases newer than it.\n */\nexport function selectReleases(releases: GitHubRelease[], packageName?: string, installedVersion?: string): GitHubRelease[] {\n // Check if this looks like a monorepo (has package-prefixed tags)\n const hasMonorepoTags = packageName && releases.some(r => tagMatchesPackage(r.tag, packageName))\n const installedSv = installedVersion ? parseSemver(installedVersion) : null\n\n const filtered = releases.filter((r) => {\n if (r.prerelease)\n return false\n\n const ver = extractVersion(r.tag, hasMonorepoTags ? packageName : undefined)\n if (!ver)\n return false\n\n const sv = parseSemver(ver)\n if (!sv)\n return false\n\n // Monorepo: only include tags for this package\n if (hasMonorepoTags && packageName && !tagMatchesPackage(r.tag, packageName))\n return false\n\n // Filter out releases newer than installed version\n if (installedSv && compareSemver(sv, installedSv) > 0)\n return false\n\n return true\n })\n\n return filtered\n .sort((a, b) => {\n const verA = extractVersion(a.tag, hasMonorepoTags ? packageName : undefined)\n const verB = extractVersion(b.tag, hasMonorepoTags ? packageName : undefined)\n if (!verA || !verB)\n return 0\n return compareSemver(parseSemver(verB)!, parseSemver(verA)!)\n })\n .slice(0, 20)\n}\n\n/**\n * Format a release as markdown with YAML frontmatter\n */\nfunction formatRelease(release: GitHubRelease, packageName?: string): string {\n const date = isoDate(release.publishedAt || release.createdAt)\n const version = extractVersion(release.tag, packageName) || release.tag\n\n const fm = [\n '---',\n `tag: ${release.tag}`,\n `version: ${version}`,\n `published: ${date}`,\n ]\n if (release.name && release.name !== release.tag)\n fm.push(`name: \"${release.name.replace(/\"/g, '\\\\\"')}\"`)\n fm.push('---')\n\n return `${fm.join('\\n')}\\n\\n# ${release.name || release.tag}\\n\\n${release.markdown}`\n}\n\nexport interface ReleaseIndexOptions {\n releases: GitHubRelease[]\n packageName?: string\n blogReleases?: Array<{ version: string, title: string, date: string }>\n hasChangelog?: boolean\n}\n\n/**\n * Generate a unified summary index of all releases for quick LLM scanning.\n * Includes GitHub releases, blog release posts, and CHANGELOG link.\n */\nexport function generateReleaseIndex(releasesOrOpts: GitHubRelease[] | ReleaseIndexOptions, packageName?: string): string {\n // Support both old signature and new options object\n const opts: ReleaseIndexOptions = Array.isArray(releasesOrOpts)\n ? { releases: releasesOrOpts, packageName }\n : releasesOrOpts\n\n const { releases, blogReleases, hasChangelog } = opts\n const pkg = opts.packageName\n\n const total = releases.length + (blogReleases?.length ?? 0)\n const fm = [\n '---',\n `total: ${total}`,\n `latest: ${releases[0]?.tag || 'unknown'}`,\n '---',\n ]\n\n const lines: string[] = [fm.join('\\n'), '', '# Releases Index', '']\n\n // Blog release posts (major version announcements)\n if (blogReleases && blogReleases.length > 0) {\n lines.push('## Blog Releases', '')\n for (const b of blogReleases) {\n lines.push(`- [${b.version}](./blog-${b.version}.md): ${b.title} (${b.date})`)\n }\n lines.push('')\n }\n\n // GitHub release notes\n if (releases.length > 0) {\n if (blogReleases && blogReleases.length > 0)\n lines.push('## Release Notes', '')\n for (const r of releases) {\n const date = isoDate(r.publishedAt || r.createdAt)\n const filename = r.tag.includes('@') || r.tag.startsWith('v') ? r.tag : `v${r.tag}`\n const version = extractVersion(r.tag, pkg) || r.tag\n const sv = parseSemver(version)\n const label = sv?.patch === 0 && sv.minor === 0 ? ' **[MAJOR]**' : sv?.patch === 0 ? ' **[MINOR]**' : ''\n lines.push(`- [${r.tag}](./${filename}.md): ${r.name || r.tag} (${date})${label}`)\n }\n lines.push('')\n }\n\n // CHANGELOG link\n if (hasChangelog) {\n lines.push('## Changelog', '')\n lines.push('- [CHANGELOG.md](./CHANGELOG.md)')\n lines.push('')\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Detect if releases are just short stubs redirecting to CHANGELOG.md.\n * Samples up to 3 releases — if all are short (<500 chars) and mention CHANGELOG, it's a redirect pattern.\n */\nexport function isChangelogRedirectPattern(releases: GitHubRelease[]): boolean {\n const sample = releases.slice(0, 3)\n if (sample.length === 0)\n return false\n return sample.every((r) => {\n const body = (r.markdown || '').trim()\n return body.length < 500 && /changelog\\.md/i.test(body)\n })\n}\n\n/**\n * Fetch CHANGELOG.md from a GitHub repo at a specific ref as fallback\n */\nasync function fetchChangelog(owner: string, repo: string, ref: string): Promise<string | null> {\n for (const filename of ['CHANGELOG.md', 'changelog.md', 'CHANGES.md']) {\n const url = `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${filename}`\n const content = await $fetch(url, { responseType: 'text', signal: AbortSignal.timeout(10_000) }).catch(() => null)\n if (content)\n return content\n }\n return null\n}\n\n/**\n * Fetch release notes for a package. Returns CachedDoc[] with releases/{tag}.md files.\n *\n * Strategy:\n * 1. Fetch GitHub releases, filter to package-specific tags for monorepos\n * 2. If no releases found, try CHANGELOG.md as fallback\n */\nexport async function fetchReleaseNotes(\n owner: string,\n repo: string,\n installedVersion: string,\n gitRef?: string,\n packageName?: string,\n): Promise<CachedDoc[]> {\n const releases = await fetchAllReleases(owner, repo)\n const selected = selectReleases(releases, packageName, installedVersion)\n\n if (selected.length > 0) {\n // Detect changelog-redirect pattern: short stubs that just link to CHANGELOG.md\n // Sample up to 3 releases to check\n if (isChangelogRedirectPattern(selected)) {\n const ref = gitRef || selected[0]!.tag\n const changelog = await fetchChangelog(owner, repo, ref)\n if (changelog)\n return [{ path: 'releases/CHANGELOG.md', content: changelog }]\n }\n\n const docs = selected.map((r) => {\n const filename = r.tag.includes('@') || r.tag.startsWith('v')\n ? r.tag\n : `v${r.tag}`\n return {\n path: `releases/${filename}.md`,\n content: formatRelease(r, packageName),\n }\n })\n\n // Also fetch CHANGELOG.md alongside individual releases (unless redirect pattern)\n const ref = gitRef || selected[0]!.tag\n const changelog = await fetchChangelog(owner, repo, ref)\n if (changelog && changelog.length < 500_000) {\n docs.push({ path: 'releases/CHANGELOG.md', content: changelog })\n }\n\n return docs\n }\n\n // Fallback: CHANGELOG.md (indexed as single file)\n const ref = gitRef || 'main'\n const changelog = await fetchChangelog(owner, repo, ref)\n if (!changelog)\n return []\n\n return [{ path: 'releases/CHANGELOG.md', content: changelog }]\n}\n","/**\n * Blog release notes fetching for packages with curated blog releases\n * Supports version filtering and extensible for multiple packages\n */\n\nimport type { BlogRelease } from './package-registry'\nimport { htmlToMarkdown } from 'mdream'\nimport { getBlogPreset } from './package-registry'\nimport { compareSemver, parseSemver } from './releases'\nimport { $fetch } from './utils'\n\nexport interface BlogReleasePost {\n version: string\n title: string\n date: string\n markdown: string\n url: string\n}\n\ninterface CachedDoc {\n path: string\n content: string\n}\n\n/**\n * Parse version from blog URL\n * Handles: https://blog.vuejs.org/posts/vue-3-5 → 3.5\n */\nfunction parseVersionFromUrl(url: string): string | null {\n const match = url.match(/\\/posts\\/\\w+-(\\d+)-(\\d+)/)\n if (match)\n return `${match[1]}.${match[2]}`\n return null\n}\n\n/**\n * Format a blog release as markdown with YAML frontmatter\n */\nfunction formatBlogRelease(release: BlogReleasePost): string {\n const fm = [\n '---',\n `version: ${release.version}`,\n `title: \"${release.title.replace(/\"/g, '\\\\\"')}\"`,\n `date: ${release.date}`,\n `url: ${release.url}`,\n `source: blog-release`,\n '---',\n ]\n\n return `${fm.join('\\n')}\\n\\n# ${release.title}\\n\\n${release.markdown}`\n}\n\n/**\n * Fetch and parse a single blog post\n */\nasync function fetchBlogPost(url: string): Promise<BlogReleasePost | null> {\n try {\n const html = await $fetch(url, { responseType: 'text', signal: AbortSignal.timeout(10_000) }).catch(() => null)\n if (!html)\n return null\n\n // Extract version from URL\n const version = parseVersionFromUrl(url)\n if (!version)\n return null\n\n // Extract title from <h1> or <title>\n let title = ''\n const titleMatch = html.match(/<h1[^>]*>([^<]+)<\\/h1>/)\n if (titleMatch)\n title = titleMatch[1]!.trim()\n\n // If no h1, try meta title\n if (!title) {\n const metaTitleMatch = html.match(/<title>([^<]+)<\\/title>/)\n if (metaTitleMatch)\n title = metaTitleMatch[1]!.trim()\n }\n\n // Extract date from article metadata or ISO date pattern\n let date = new Date().toISOString().split('T')[0]!\n const dateMatch = html.match(/(?:published|date|posted)[\"']?\\s*:\\s*[\"']?(\\d{4}-\\d{2}-\\d{2})/)\n if (dateMatch)\n date = dateMatch[1]!\n\n // Convert HTML to markdown using mdream\n const markdown = htmlToMarkdown(html)\n if (!markdown)\n return null\n\n return {\n version,\n title: title || `Release ${version}`,\n date,\n markdown,\n url,\n }\n }\n catch {\n return null\n }\n}\n\n/**\n * Filter blog releases by installed version\n * Only includes releases where version <= installedVersion\n * Returns all releases if version parsing fails (fail-safe)\n */\nfunction filterBlogsByVersion(entries: BlogRelease[], installedVersion: string): BlogRelease[] {\n const installedSv = parseSemver(installedVersion)\n if (!installedSv)\n return entries // Fail-safe: include all if version parsing fails\n\n return entries.filter((entry) => {\n const entrySv = parseSemver(entry.version)\n if (!entrySv)\n return false\n // Include only releases where version <= installed version\n return compareSemver(entrySv, installedSv) <= 0\n })\n}\n\n/**\n * Fetch blog release notes from package presets\n * Filters to only releases matching or older than the installed version\n * Returns CachedDoc[] with releases/blog-{version}.md files\n */\nexport async function fetchBlogReleases(\n packageName: string,\n installedVersion: string,\n): Promise<CachedDoc[]> {\n const preset = getBlogPreset(packageName)\n if (!preset)\n return []\n\n const filteredReleases = filterBlogsByVersion(preset.releases, installedVersion)\n if (filteredReleases.length === 0)\n return []\n\n const releases: BlogReleasePost[] = []\n\n // Fetch all blog posts in parallel with 3 concurrent requests\n const batchSize = 3\n for (let i = 0; i < filteredReleases.length; i += batchSize) {\n const batch = filteredReleases.slice(i, i + batchSize)\n const results = await Promise.all(batch.map(entry => fetchBlogPost(entry.url)))\n for (const result of results) {\n if (result)\n releases.push(result)\n }\n }\n\n if (releases.length === 0)\n return []\n\n // Sort by version descending (newest first)\n releases.sort((a, b) => {\n const aVer = a.version.split('.').map(Number)\n const bVer = b.version.split('.').map(Number)\n for (let i = 0; i < Math.max(aVer.length, bVer.length); i++) {\n const diff = (bVer[i] ?? 0) - (aVer[i] ?? 0)\n if (diff !== 0)\n return diff\n }\n return 0\n })\n\n // Format as cached docs — stored in releases/ alongside regular releases\n return releases.map(r => ({\n path: `releases/blog-${r.version}.md`,\n content: formatBlogRelease(r),\n }))\n}\n","/**\n * GitHub discussions fetching via gh CLI GraphQL\n * Prioritizes Q&A and Help categories, includes accepted answers\n */\n\nimport { spawnSync } from 'node:child_process'\nimport { BOT_USERS, buildFrontmatter, isoDate } from './github-common'\nimport { isGhAvailable } from './issues'\n\n/** Categories most useful for skill generation (in priority order) */\nconst HIGH_VALUE_CATEGORIES = new Set([\n 'q&a',\n 'help',\n 'troubleshooting',\n 'support',\n])\n\nconst LOW_VALUE_CATEGORIES = new Set([\n 'show and tell',\n 'ideas',\n 'polls',\n])\n\nexport interface DiscussionComment {\n body: string\n author: string\n}\n\nexport interface GitHubDiscussion {\n number: number\n title: string\n body: string\n category: string\n createdAt: string\n url: string\n upvoteCount: number\n comments: number\n answer?: string\n topComments: DiscussionComment[]\n}\n\n/**\n * Fetch discussions from a GitHub repo using gh CLI GraphQL.\n * Prioritizes Q&A and Help categories. Includes accepted answer body for answered discussions.\n */\nexport async function fetchGitHubDiscussions(\n owner: string,\n repo: string,\n limit = 20,\n releasedAt?: string,\n): Promise<GitHubDiscussion[]> {\n if (!isGhAvailable())\n return []\n\n // GraphQL discussions endpoint doesn't support date filtering,\n // so we fetch latest N and filter client-side. Skip entirely\n // if the cutoff is in the past — results would be empty anyway.\n if (releasedAt) {\n const cutoff = new Date(releasedAt)\n cutoff.setMonth(cutoff.getMonth() + 6)\n if (cutoff < new Date())\n return []\n }\n\n try {\n // Fetch more to compensate for filtering\n const fetchCount = Math.min(limit * 3, 80)\n const query = `query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { discussions(first: ${fetchCount}, orderBy: {field: CREATED_AT, direction: DESC}) { nodes { number title body category { name } createdAt url upvoteCount comments(first: 3) { totalCount nodes { body author { login } } } answer { body } author { login } } } } }`\n\n const { stdout: result } = spawnSync('gh', ['api', 'graphql', '-f', `query=${query}`, '-f', `owner=${owner}`, '-f', `repo=${repo}`], {\n encoding: 'utf-8',\n maxBuffer: 10 * 1024 * 1024,\n })\n if (!result)\n return []\n\n const data = JSON.parse(result)\n const nodes = data?.data?.repository?.discussions?.nodes\n if (!Array.isArray(nodes))\n return []\n\n const discussions = nodes\n .filter((d: any) => d.author && !BOT_USERS.has(d.author.login))\n .filter((d: any) => {\n const cat = (d.category?.name || '').toLowerCase()\n return !LOW_VALUE_CATEGORIES.has(cat)\n })\n .map((d: any) => ({\n number: d.number,\n title: d.title,\n body: d.body || '',\n category: d.category?.name || '',\n createdAt: d.createdAt,\n url: d.url,\n upvoteCount: d.upvoteCount || 0,\n comments: d.comments?.totalCount || 0,\n answer: d.answer?.body || undefined,\n topComments: (d.comments?.nodes || [])\n .filter((c: any) => c.author && !BOT_USERS.has(c.author.login))\n .map((c: any) => ({ body: c.body || '', author: c.author.login })),\n }))\n // Prioritize high-value categories, then sort by engagement\n .sort((a: GitHubDiscussion, b: GitHubDiscussion) => {\n const aHigh = HIGH_VALUE_CATEGORIES.has(a.category.toLowerCase()) ? 1 : 0\n const bHigh = HIGH_VALUE_CATEGORIES.has(b.category.toLowerCase()) ? 1 : 0\n if (aHigh !== bHigh)\n return bHigh - aHigh\n return (b.upvoteCount + b.comments) - (a.upvoteCount + a.comments)\n })\n .slice(0, limit)\n\n return discussions\n }\n catch {\n return []\n }\n}\n\n/**\n * Format a single discussion as markdown with YAML frontmatter\n */\nexport function formatDiscussionAsMarkdown(d: GitHubDiscussion): string {\n const fm = buildFrontmatter({\n number: d.number,\n title: d.title,\n category: d.category,\n created: isoDate(d.createdAt),\n url: d.url,\n upvotes: d.upvoteCount,\n comments: d.comments,\n answered: !!d.answer,\n })\n\n const bodyLimit = d.upvoteCount >= 5 ? 1500 : 800\n const lines = [fm, '', `# ${d.title}`]\n\n if (d.body) {\n const body = d.body.length > bodyLimit\n ? `${d.body.slice(0, bodyLimit)}...`\n : d.body\n lines.push('', body)\n }\n\n if (d.answer) {\n const answerLimit = 1000\n const answer = d.answer.length > answerLimit\n ? `${d.answer.slice(0, answerLimit)}...`\n : d.answer\n lines.push('', '---', '', '## Accepted Answer', '', answer)\n }\n else if (d.topComments.length > 0) {\n // No accepted answer — include top comments as context\n lines.push('', '---', '', '## Top Comments')\n for (const c of d.topComments) {\n const commentBody = c.body.length > 600\n ? `${c.body.slice(0, 600)}...`\n : c.body\n lines.push('', `**@${c.author}:**`, '', commentBody)\n }\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Generate a summary index of all discussions for quick LLM scanning.\n * Groups by category so the LLM can quickly find Q&A vs general discussions.\n */\nexport function generateDiscussionIndex(discussions: GitHubDiscussion[]): string {\n const byCategory = new Map<string, GitHubDiscussion[]>()\n for (const d of discussions) {\n const cat = d.category || 'Uncategorized'\n const list = byCategory.get(cat) || []\n list.push(d)\n byCategory.set(cat, list)\n }\n\n const answered = discussions.filter(d => d.answer).length\n\n const fm = [\n '---',\n `total: ${discussions.length}`,\n `answered: ${answered}`,\n '---',\n ]\n\n const sections: string[] = [fm.join('\\n'), '', '# Discussions Index', '']\n\n // Sort categories: high-value first\n const cats = [...byCategory.keys()].sort((a, b) => {\n const aHigh = HIGH_VALUE_CATEGORIES.has(a.toLowerCase()) ? 0 : 1\n const bHigh = HIGH_VALUE_CATEGORIES.has(b.toLowerCase()) ? 0 : 1\n return aHigh - bHigh || a.localeCompare(b)\n })\n\n for (const cat of cats) {\n const group = byCategory.get(cat)!\n sections.push(`## ${cat} (${group.length})`, '')\n for (const d of group) {\n const upvotes = d.upvoteCount > 0 ? ` (+${d.upvoteCount})` : ''\n const answered = d.answer ? ' [answered]' : ''\n const date = isoDate(d.createdAt)\n sections.push(`- [#${d.number}](./discussion-${d.number}.md): ${d.title}${upvotes}${answered} (${date})`)\n }\n sections.push('')\n }\n\n return sections.join('\\n')\n}\n","/**\n * Globs .d.ts type definition files from a package for search indexing.\n * Only types — source code is too verbose.\n */\nimport { existsSync, readFileSync } from 'node:fs'\nimport { globby } from 'globby'\nimport { join } from 'pathe'\n\nexport interface EntryFile {\n path: string\n content: string\n type: 'types' | 'source'\n}\n\nconst SKIP_DIRS = [\n 'node_modules',\n '_vendor',\n '__tests__',\n '__mocks__',\n '__fixtures__',\n 'test',\n 'tests',\n 'fixture',\n 'fixtures',\n 'locales',\n 'locale',\n 'i18n',\n '.git',\n]\n\nconst SKIP_PATTERNS = [\n '*.min.*',\n '*.prod.*',\n '*.global.*',\n '*.browser.*',\n '*.map',\n '*.map.js',\n 'CHANGELOG*',\n 'LICENSE*',\n 'README*',\n]\n\nconst MAX_FILE_SIZE = 500 * 1024 // 500KB per file\n\n/**\n * Glob .d.ts type definition files from a package directory, skipping junk.\n */\nexport async function resolveEntryFiles(packageDir: string): Promise<EntryFile[]> {\n if (!existsSync(join(packageDir, 'package.json')))\n return []\n\n const ignore = [\n ...SKIP_DIRS.map(d => `**/${d}/**`),\n ...SKIP_PATTERNS,\n ]\n\n const files = await globby(['**/*.d.{ts,mts,cts}'], {\n cwd: packageDir,\n ignore,\n absolute: false,\n })\n\n const entries: EntryFile[] = []\n\n for (const file of files) {\n const absPath = join(packageDir, file)\n let content: string\n try {\n content = readFileSync(absPath, 'utf-8')\n }\n catch {\n continue\n }\n\n if (content.length > MAX_FILE_SIZE)\n continue\n\n entries.push({ path: file, content, type: 'types' })\n }\n\n return entries\n}\n","/**\n * Git repo skill source — parse inputs + fetch pre-authored skills from repos\n *\n * Supports GitHub shorthand (owner/repo), full URLs, SSH, GitLab, and local paths.\n * Skills are pre-authored SKILL.md files — no doc resolution or LLM generation needed.\n */\n\nimport { existsSync, readdirSync, readFileSync } from 'node:fs'\nimport pLimit from 'p-limit'\nimport { resolve } from 'pathe'\nimport { yamlParseKV } from '../core/yaml'\nimport { $fetch, normalizeRepoUrl, parseGitHubUrl } from './utils'\n\nexport interface GitSkillSource {\n type: 'github' | 'gitlab' | 'git-ssh' | 'local'\n owner?: string\n repo?: string\n /** Direct path to a specific skill (from /tree/ref/path URLs) */\n skillPath?: string\n /** Branch/tag parsed from URL */\n ref?: string\n /** Absolute path for local sources */\n localPath?: string\n}\n\nexport interface RemoteSkill {\n /** From SKILL.md frontmatter `name` field, or directory name */\n name: string\n /** From SKILL.md frontmatter `description` field */\n description: string\n /** Path within repo (e.g., \"skills/web-design-guidelines\") */\n path: string\n /** Full SKILL.md content */\n content: string\n /** Supporting files (scripts/, references/, assets/) */\n files: Array<{ path: string, content: string }>\n}\n\n/**\n * Detect whether an input string is a git skill source.\n * Returns null for npm package names (including scoped @scope/pkg).\n */\nexport function parseGitSkillInput(input: string): GitSkillSource | null {\n const trimmed = input.trim()\n\n // Scoped npm packages → not git\n if (trimmed.startsWith('@'))\n return null\n\n // Local paths\n if (trimmed.startsWith('./') || trimmed.startsWith('../') || trimmed.startsWith('/') || trimmed.startsWith('~')) {\n const localPath = trimmed.startsWith('~')\n ? resolve(process.env.HOME || '', trimmed.slice(1))\n : resolve(trimmed)\n return { type: 'local', localPath }\n }\n\n // SSH format: git@github.com:owner/repo\n if (trimmed.startsWith('git@')) {\n const normalized = normalizeRepoUrl(trimmed)\n const gh = parseGitHubUrl(normalized)\n if (gh)\n return { type: 'github', owner: gh.owner, repo: gh.repo }\n return null\n }\n\n // Full URLs\n if (trimmed.startsWith('https://') || trimmed.startsWith('http://')) {\n return parseGitUrl(trimmed)\n }\n\n // GitHub shorthand: owner/repo (exactly one slash, no spaces, no commas)\n if (/^[\\w.-]+\\/[\\w.-]+$/.test(trimmed)) {\n return { type: 'github', owner: trimmed.split('/')[0], repo: trimmed.split('/')[1] }\n }\n\n // Everything else → npm\n return null\n}\n\nfunction parseGitUrl(url: string): GitSkillSource | null {\n try {\n const parsed = new URL(url)\n\n if (parsed.hostname === 'github.com' || parsed.hostname === 'www.github.com') {\n const parts = parsed.pathname.replace(/^\\//, '').replace(/\\.git$/, '').split('/')\n const owner = parts[0]\n const repo = parts[1]\n if (!owner || !repo)\n return null\n\n // Handle /tree/ref/path URLs → extract specific skill path\n if (parts[2] === 'tree' && parts.length >= 4) {\n const ref = parts[3]\n const skillPath = parts.length > 4 ? parts.slice(4).join('/') : undefined\n return { type: 'github', owner, repo, ref, skillPath }\n }\n\n return { type: 'github', owner, repo }\n }\n\n if (parsed.hostname === 'gitlab.com') {\n const parts = parsed.pathname.replace(/^\\//, '').replace(/\\.git$/, '').split('/')\n const owner = parts[0]\n const repo = parts[1]\n if (!owner || !repo)\n return null\n return { type: 'gitlab', owner, repo }\n }\n\n return null\n }\n catch {\n return null\n }\n}\n\n/**\n * Parse name and description from SKILL.md frontmatter.\n */\nexport function parseSkillFrontmatterName(content: string): { name?: string, description?: string } {\n const match = content.match(/^---\\n([\\s\\S]*?)\\n---/)\n if (!match)\n return {}\n\n const result: { name?: string, description?: string } = {}\n for (const line of match[1].split('\\n')) {\n const kv = yamlParseKV(line)\n if (!kv)\n continue\n if (kv[0] === 'name')\n result.name = kv[1]\n if (kv[0] === 'description')\n result.description = kv[1]\n }\n return result\n}\n\ninterface UnghFilesResponse {\n meta: { sha: string }\n files: Array<{ path: string, mode: string, sha: string, size: number }>\n}\n\n/** Supporting file dirs within a skill directory */\nconst SUPPORTING_DIRS = ['scripts', 'references', 'assets']\n\n/**\n * Fetch skills from a git source. Returns list of discovered skills + commit SHA.\n */\nexport async function fetchGitSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n if (source.type === 'local')\n return fetchLocalSkills(source)\n if (source.type === 'github')\n return fetchGitHubSkills(source, onProgress)\n if (source.type === 'gitlab')\n return fetchGitLabSkills(source, onProgress)\n return { skills: [] }\n}\n\n// ── Local ──\n\nfunction fetchLocalSkills(source: GitSkillSource): { skills: RemoteSkill[] } {\n const base = source.localPath!\n if (!existsSync(base))\n return { skills: [] }\n\n const skills: RemoteSkill[] = []\n\n // Check for skills/ subdirectory\n const skillsDir = resolve(base, 'skills')\n if (existsSync(skillsDir)) {\n for (const entry of readdirSync(skillsDir, { withFileTypes: true })) {\n if (!entry.isDirectory())\n continue\n const skill = readLocalSkill(resolve(skillsDir, entry.name), `skills/${entry.name}`)\n if (skill)\n skills.push(skill)\n }\n }\n\n // Check for root SKILL.md\n if (skills.length === 0) {\n const skill = readLocalSkill(base, '')\n if (skill)\n skills.push(skill)\n }\n\n return { skills }\n}\n\nfunction readLocalSkill(dir: string, repoPath: string): RemoteSkill | null {\n const skillMdPath = resolve(dir, 'SKILL.md')\n if (!existsSync(skillMdPath))\n return null\n\n const content = readFileSync(skillMdPath, 'utf-8')\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = dir.split('/').pop()!\n const name = frontmatter.name || dirName\n\n const files: Array<{ path: string, content: string }> = []\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPath = resolve(dir, subdir)\n if (!existsSync(subdirPath))\n continue\n for (const file of readdirSync(subdirPath, { withFileTypes: true })) {\n if (!file.isFile())\n continue\n files.push({\n path: `${subdir}/${file.name}`,\n content: readFileSync(resolve(subdirPath, file.name), 'utf-8'),\n })\n }\n }\n\n return {\n name,\n description: frontmatter.description || '',\n path: repoPath,\n content,\n files,\n }\n}\n\n// ── GitHub ──\n\nasync function fetchGitHubSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const { owner, repo } = source\n if (!owner || !repo)\n return { skills: [] }\n\n const ref = source.ref || 'main'\n onProgress?.(`Listing files at ${owner}/${repo}@${ref}`)\n\n const data = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/${ref}`,\n ).catch(() => null)\n\n if (!data?.files?.length) {\n // Try 'master' fallback if default ref failed\n if (ref === 'main') {\n const fallback = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/master`,\n ).catch(() => null)\n if (fallback?.files?.length)\n return extractGitHubSkills(owner!, repo!, 'master', fallback, source.skillPath, onProgress)\n }\n return { skills: [] }\n }\n\n return extractGitHubSkills(owner!, repo!, ref, data, source.skillPath, onProgress)\n}\n\nasync function extractGitHubSkills(\n owner: string,\n repo: string,\n ref: string,\n data: UnghFilesResponse,\n skillPath?: string,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const allFiles = data.files.map(f => f.path)\n const commitSha = data.meta?.sha\n\n // Find SKILL.md files\n let skillMdPaths: string[]\n\n if (skillPath) {\n // Direct skill path: look for SKILL.md at that path\n const candidates = [\n `${skillPath}/SKILL.md`,\n // In case they linked directly to the SKILL.md\n skillPath.endsWith('/SKILL.md') ? skillPath : null,\n ].filter(Boolean) as string[]\n\n skillMdPaths = allFiles.filter(f => candidates.includes(f))\n }\n else {\n // Discover: skills/*/SKILL.md or root SKILL.md\n skillMdPaths = allFiles.filter(f =>\n f.match(/^skills\\/[^/]+\\/SKILL\\.md$/) || f === 'SKILL.md',\n )\n }\n\n if (skillMdPaths.length === 0)\n return { skills: [], commitSha }\n\n const limit = pLimit(5)\n const skills: RemoteSkill[] = []\n\n onProgress?.(`Found ${skillMdPaths.length} skill(s), downloading...`)\n\n await Promise.all(skillMdPaths.map(mdPath => limit(async () => {\n const skillDir = mdPath === 'SKILL.md' ? '' : mdPath.replace(/\\/SKILL\\.md$/, '')\n const content = await fetchRawGitHub(owner, repo, ref, mdPath)\n if (!content)\n return\n\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = skillDir ? skillDir.split('/').pop()! : repo\n const name = frontmatter.name || dirName\n\n // Fetch supporting files\n const supportingFiles: Array<{ path: string, content: string }> = []\n const prefix = skillDir ? `${skillDir}/` : ''\n\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPrefix = `${prefix}${subdir}/`\n const matching = allFiles.filter(f => f.startsWith(subdirPrefix))\n for (const filePath of matching) {\n const fileContent = await fetchRawGitHub(owner, repo, ref, filePath)\n if (fileContent) {\n const relativePath = filePath.slice(prefix.length)\n supportingFiles.push({ path: relativePath, content: fileContent })\n }\n }\n }\n\n skills.push({\n name,\n description: frontmatter.description || '',\n path: skillDir,\n content,\n files: supportingFiles,\n })\n })))\n\n return { skills, commitSha }\n}\n\nasync function fetchRawGitHub(owner: string, repo: string, ref: string, path: string): Promise<string | null> {\n return $fetch(\n `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${path}`,\n { responseType: 'text' },\n ).catch(() => null)\n}\n\n// ── GitLab ──\n\ninterface GitLabTreeEntry {\n id: string\n name: string\n type: string\n path: string\n mode: string\n}\n\nasync function fetchGitLabSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const { owner, repo } = source\n if (!owner || !repo)\n return { skills: [] }\n\n const ref = source.ref || 'main'\n const projectId = encodeURIComponent(`${owner}/${repo}`)\n\n onProgress?.(`Listing files at ${owner}/${repo}@${ref}`)\n\n const tree = await $fetch<GitLabTreeEntry[]>(\n `https://gitlab.com/api/v4/projects/${projectId}/repository/tree?ref=${ref}&recursive=true&per_page=100`,\n ).catch(() => null)\n\n if (!tree?.length)\n return { skills: [] }\n\n const allFiles = tree.filter(e => e.type === 'blob').map(e => e.path)\n\n // Find SKILL.md files\n const skillMdPaths = source.skillPath\n ? allFiles.filter(f => f === `${source.skillPath}/SKILL.md`)\n : allFiles.filter(f => f.match(/^skills\\/[^/]+\\/SKILL\\.md$/) || f === 'SKILL.md')\n\n if (skillMdPaths.length === 0)\n return { skills: [] }\n\n const limit = pLimit(5)\n const skills: RemoteSkill[] = []\n\n onProgress?.(`Found ${skillMdPaths.length} skill(s), downloading...`)\n\n await Promise.all(skillMdPaths.map(mdPath => limit(async () => {\n const skillDir = mdPath === 'SKILL.md' ? '' : mdPath.replace(/\\/SKILL\\.md$/, '')\n const content = await fetchRawGitLab(owner!, repo!, ref, mdPath)\n if (!content)\n return\n\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = skillDir ? skillDir.split('/').pop()! : repo!\n const name = frontmatter.name || dirName\n\n // Fetch supporting files\n const supportingFiles: Array<{ path: string, content: string }> = []\n const prefix = skillDir ? `${skillDir}/` : ''\n\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPrefix = `${prefix}${subdir}/`\n const matching = allFiles.filter(f => f.startsWith(subdirPrefix))\n for (const filePath of matching) {\n const fileContent = await fetchRawGitLab(owner!, repo!, ref, filePath)\n if (fileContent) {\n const relativePath = filePath.slice(prefix.length)\n supportingFiles.push({ path: relativePath, content: fileContent })\n }\n }\n }\n\n skills.push({\n name,\n description: frontmatter.description || '',\n path: skillDir,\n content,\n files: supportingFiles,\n })\n })))\n\n return { skills }\n}\n\nasync function fetchRawGitLab(owner: string, repo: string, ref: string, path: string): Promise<string | null> {\n return $fetch(\n `https://gitlab.com/${owner}/${repo}/-/raw/${ref}/${path}`,\n { responseType: 'text' },\n ).catch(() => null)\n}\n","/**\n * GitHub/ungh README resolution + versioned docs\n */\n\nimport type { LlmsLink } from './types'\nimport { spawnSync } from 'node:child_process'\nimport { existsSync as fsExistsSync, readFileSync as fsReadFileSync } from 'node:fs'\nimport { fileURLToPath } from 'node:url'\nimport { isGhAvailable } from './issues'\nimport { getDocOverride } from './package-registry'\nimport { $fetch, extractBranchHint, fetchText, parseGitHubUrl } from './utils'\n\n/** Minimum git-doc file count to prefer over llms.txt */\nexport const MIN_GIT_DOCS = 5\n\n/** True when git-docs exist but are too few to be useful (< MIN_GIT_DOCS) */\nexport const isShallowGitDocs = (n: number) => n > 0 && n < MIN_GIT_DOCS\n\nexport interface GitDocsResult {\n /** URL pattern for fetching docs (use with ref) */\n baseUrl: string\n /** Git ref (tag) used */\n ref: string\n /** List of doc file paths relative to repo root */\n files: string[]\n /** Prefix to strip when normalizing paths to docs/ (e.g. 'apps/evalite-docs/src/content/') for nested monorepo docs */\n docsPrefix?: string\n /** Full repo file tree — only set when discoverDocFiles() heuristic was used (not standard docs/ prefix) */\n allFiles?: string[]\n /** True when ref is a branch (main/master) rather than a version-specific tag */\n fallback?: boolean\n}\n\ninterface UnghFilesResponse {\n meta: { sha: string }\n files: Array<{ path: string, mode: string, sha: string, size: number }>\n}\n\n/**\n * List files at a git ref using ungh (no rate limits)\n */\nasync function listFilesAtRef(owner: string, repo: string, ref: string): Promise<string[]> {\n const data = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/${ref}`,\n ).catch(() => null)\n return data?.files?.map(f => f.path) ?? []\n}\n\ninterface TagResult {\n ref: string\n files: string[]\n /** True when ref is a branch fallback (main/master) rather than a version tag */\n fallback?: boolean\n}\n\n/**\n * Find git tag for a version by checking if ungh can list files at that ref.\n * Tries v{version}, {version}, and optionally {packageName}@{version} (changeset convention).\n */\nasync function findGitTag(owner: string, repo: string, version: string, packageName?: string, branchHint?: string): Promise<TagResult | null> {\n const candidates = [`v${version}`, version]\n if (packageName)\n candidates.push(`${packageName}@${version}`)\n\n for (const tag of candidates) {\n const files = await listFilesAtRef(owner, repo, tag)\n if (files.length > 0)\n return { ref: tag, files }\n }\n\n // Fallback: find latest release tag matching {packageName}@* (version mismatch in monorepos)\n if (packageName) {\n const latestTag = await findLatestReleaseTag(owner, repo, packageName)\n if (latestTag) {\n const files = await listFilesAtRef(owner, repo, latestTag)\n if (files.length > 0)\n return { ref: latestTag, files }\n }\n }\n\n // Last resort: try default branch (prefer hint from repo URL fragment)\n const branches = branchHint\n ? [branchHint, ...['main', 'master'].filter(b => b !== branchHint)]\n : ['main', 'master']\n for (const branch of branches) {\n const files = await listFilesAtRef(owner, repo, branch)\n if (files.length > 0)\n return { ref: branch, files, fallback: true }\n }\n\n return null\n}\n\n/**\n * Find the latest release tag matching `{packageName}@*` via ungh releases API.\n * Handles monorepos where npm version doesn't match git tag version.\n */\nasync function findLatestReleaseTag(owner: string, repo: string, packageName: string): Promise<string | null> {\n const data = await $fetch<{ releases?: Array<{ tag: string }> }>(\n `https://ungh.cc/repos/${owner}/${repo}/releases`,\n ).catch(() => null)\n const prefix = `${packageName}@`\n return data?.releases?.find(r => r.tag.startsWith(prefix))?.tag ?? null\n}\n\n/**\n * Filter file paths by prefix and md/mdx extension\n */\nfunction filterDocFiles(files: string[], pathPrefix: string): string[] {\n return files.filter(f => f.startsWith(pathPrefix) && /\\.(?:md|mdx)$/.test(f))\n}\n\n/** Known noise paths to exclude from doc discovery */\nconst NOISE_PATTERNS = [\n /^\\.changeset\\//,\n /CHANGELOG\\.md$/i,\n /CONTRIBUTING\\.md$/i,\n /^\\.github\\//,\n]\n\n/** Directories to exclude from \"best directory\" heuristic */\nconst EXCLUDE_DIRS = new Set([\n 'test',\n 'tests',\n '__tests__',\n 'fixtures',\n 'fixture',\n 'examples',\n 'example',\n 'node_modules',\n '.git',\n 'dist',\n 'build',\n 'coverage',\n 'e2e',\n 'spec',\n 'mocks',\n '__mocks__',\n])\n\n/** Directory names that suggest documentation */\nconst DOC_DIR_BONUS = new Set([\n 'docs',\n 'documentation',\n 'pages',\n 'content',\n 'website',\n 'guide',\n 'guides',\n 'wiki',\n 'manual',\n 'api',\n])\n\ninterface DiscoveredDocs {\n files: string[]\n /** Prefix before 'docs/' to strip when normalizing (e.g. 'apps/evalite-docs/src/content/') */\n prefix: string\n}\n\n/**\n * Check if a path contains any excluded directory\n */\nfunction hasExcludedDir(path: string): boolean {\n const parts = path.split('/')\n return parts.some(p => EXCLUDE_DIRS.has(p.toLowerCase()))\n}\n\n/**\n * Get the depth of a path (number of directory levels)\n */\nfunction getPathDepth(path: string): number {\n return path.split('/').filter(Boolean).length\n}\n\n/**\n * Check if path contains a doc-related directory name\n */\nfunction hasDocDirBonus(path: string): boolean {\n const parts = path.split('/')\n return parts.some(p => DOC_DIR_BONUS.has(p.toLowerCase()))\n}\n\n/**\n * Score a directory for doc likelihood.\n * Higher = better. Formula: count * nameBonus / depth\n */\nfunction scoreDocDir(dir: string, fileCount: number): number {\n const depth = getPathDepth(dir) || 1\n const nameBonus = hasDocDirBonus(dir) ? 1.5 : 1\n return (fileCount * nameBonus) / depth\n}\n\n/**\n * Discover doc files in non-standard locations.\n * First tries to find clusters of md/mdx files in paths containing /docs/.\n * Falls back to finding the directory with the most markdown files (≥5).\n */\nfunction discoverDocFiles(allFiles: string[]): DiscoveredDocs | null {\n const mdFiles = allFiles\n .filter(f => /\\.(?:md|mdx)$/.test(f))\n .filter(f => !NOISE_PATTERNS.some(p => p.test(f)))\n .filter(f => f.includes('/'))\n\n // Strategy 1: Look for /docs/ clusters (existing behavior)\n const docsGroups = new Map<string, string[]>()\n\n for (const file of mdFiles) {\n const docsIdx = file.lastIndexOf('/docs/')\n if (docsIdx === -1)\n continue\n\n const prefix = file.slice(0, docsIdx + '/docs/'.length)\n const group = docsGroups.get(prefix) || []\n group.push(file)\n docsGroups.set(prefix, group)\n }\n\n if (docsGroups.size > 0) {\n const largest = [...docsGroups.entries()].sort((a, b) => b[1].length - a[1].length)[0]!\n if (largest[1].length >= 3) {\n const fullPrefix = largest[0]\n const docsIdx = fullPrefix.lastIndexOf('docs/')\n const stripPrefix = docsIdx > 0 ? fullPrefix.slice(0, docsIdx) : ''\n return { files: largest[1], prefix: stripPrefix }\n }\n }\n\n // Strategy 2: Find best directory by file count (for non-standard structures)\n const dirGroups = new Map<string, string[]>()\n\n for (const file of mdFiles) {\n if (hasExcludedDir(file))\n continue\n\n // Group by immediate parent directory\n const lastSlash = file.lastIndexOf('/')\n if (lastSlash === -1)\n continue\n\n const dir = file.slice(0, lastSlash + 1)\n const group = dirGroups.get(dir) || []\n group.push(file)\n dirGroups.set(dir, group)\n }\n\n if (dirGroups.size === 0)\n return null\n\n // Score and sort directories\n const scored = [...dirGroups.entries()]\n .map(([dir, files]) => ({ dir, files, score: scoreDocDir(dir, files.length) }))\n .filter(d => d.files.length >= 5) // Minimum threshold\n .sort((a, b) => b.score - a.score)\n\n if (scored.length === 0)\n return null\n\n const best = scored[0]!\n\n // For non-docs paths, the prefix is everything up to (but not including) the final dir\n // e.g. 'website/pages/' -> prefix is 'website/' so files normalize to 'pages/...'\n // But actually we want the full prefix so downstream can strip it\n return { files: best.files, prefix: best.dir }\n}\n\n/**\n * List markdown files in a folder at a specific git ref\n */\nasync function listDocsAtRef(owner: string, repo: string, ref: string, pathPrefix = 'docs/'): Promise<string[]> {\n const files = await listFilesAtRef(owner, repo, ref)\n return filterDocFiles(files, pathPrefix)\n}\n\n/**\n * Fetch versioned docs from GitHub repo's docs/ folder.\n * Pass packageName to check doc overrides (e.g. vue -> vuejs/docs).\n */\nexport async function fetchGitDocs(owner: string, repo: string, version: string, packageName?: string, repoUrl?: string): Promise<GitDocsResult | null> {\n const override = packageName ? getDocOverride(packageName) : undefined\n if (override) {\n const ref = override.ref || 'main'\n const fallback = !override.ref\n const files = await listDocsAtRef(override.owner, override.repo, ref, `${override.path}/`)\n if (files.length === 0)\n return null\n return {\n baseUrl: `https://raw.githubusercontent.com/${override.owner}/${override.repo}/${ref}`,\n ref,\n files,\n fallback,\n }\n }\n\n const branchHint = repoUrl ? extractBranchHint(repoUrl) : undefined\n const tag = await findGitTag(owner, repo, version, packageName, branchHint)\n if (!tag)\n return null\n\n let docs = filterDocFiles(tag.files, 'docs/')\n let docsPrefix: string | undefined\n let allFiles: string[] | undefined\n\n // Fallback: discover docs in nested paths (monorepos, content collections)\n if (docs.length === 0) {\n const discovered = discoverDocFiles(tag.files)\n if (discovered) {\n docs = discovered.files\n docsPrefix = discovered.prefix || undefined\n allFiles = tag.files\n }\n }\n\n if (docs.length === 0)\n return null\n\n return {\n baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,\n ref: tag.ref,\n files: docs,\n docsPrefix,\n allFiles,\n fallback: tag.fallback,\n }\n}\n\n/**\n * Strip file extension (.md, .mdx) and leading slash from a path\n */\nfunction normalizePath(p: string): string {\n return p.replace(/^\\//, '').replace(/\\.(?:md|mdx)$/, '')\n}\n\n/**\n * Validate that discovered git docs are relevant by cross-referencing llms.txt links\n * against the repo file tree. Uses extensionless suffix matching to handle monorepo nesting.\n *\n * Returns { isValid, matchRatio } where isValid = matchRatio >= 0.3\n */\nexport function validateGitDocsWithLlms(\n llmsLinks: LlmsLink[],\n repoFiles: string[],\n): { isValid: boolean, matchRatio: number } {\n if (llmsLinks.length === 0)\n return { isValid: true, matchRatio: 1 }\n\n // Sample up to 10 links\n const sample = llmsLinks.slice(0, 10)\n\n // Normalize llms link paths\n const normalizedLinks = sample.map((link) => {\n let path = link.url\n // Strip absolute URL to pathname\n if (path.startsWith('http')) {\n try {\n path = new URL(path).pathname\n }\n catch { /* keep as-is */ }\n }\n return normalizePath(path)\n })\n\n // Pre-process repo files: strip extensions to get extensionless paths\n const repoNormalized = new Set(repoFiles.map(normalizePath))\n\n let matches = 0\n for (const linkPath of normalizedLinks) {\n // Check if any repo file ends with this path (suffix matching for monorepo nesting)\n for (const repoPath of repoNormalized) {\n if (repoPath === linkPath || repoPath.endsWith(`/${linkPath}`)) {\n matches++\n break\n }\n }\n }\n\n const matchRatio = matches / sample.length\n return { isValid: matchRatio >= 0.3, matchRatio }\n}\n\n/**\n * Verify a GitHub repo is the source for an npm package by checking package.json name field.\n * Checks root first, then common monorepo paths (packages/{shortName}, packages/{name}).\n */\nasync function verifyNpmRepo(owner: string, repo: string, packageName: string): Promise<boolean> {\n const base = `https://raw.githubusercontent.com/${owner}/${repo}/HEAD`\n const shortName = packageName.replace(/^@.*\\//, '')\n const paths = [\n 'package.json',\n `packages/${shortName}/package.json`,\n `packages/${packageName.replace(/^@/, '').replace('/', '-')}/package.json`,\n ]\n for (const path of paths) {\n const text = await fetchText(`${base}/${path}`)\n if (!text)\n continue\n try {\n const pkg = JSON.parse(text) as { name?: string }\n if (pkg.name === packageName)\n return true\n }\n catch {}\n }\n return false\n}\n\nexport async function searchGitHubRepo(packageName: string): Promise<string | null> {\n // Try ungh heuristic first — check if repo name matches package name\n const shortName = packageName.replace(/^@.*\\//, '')\n for (const candidate of [packageName.replace(/^@/, '').replace('/', '/'), shortName]) {\n // Only try if it looks like owner/repo\n if (!candidate.includes('/')) {\n // Try common patterns: {name}/{name}\n const unghRes = await $fetch.raw(`https://ungh.cc/repos/${shortName}/${shortName}`).catch(() => null)\n if (unghRes?.ok)\n return `https://github.com/${shortName}/${shortName}`\n continue\n }\n const unghRes = await $fetch.raw(`https://ungh.cc/repos/${candidate}`).catch(() => null)\n if (unghRes?.ok)\n return `https://github.com/${candidate}`\n }\n\n // Try gh CLI — strip @ to avoid GitHub search syntax issues\n const searchTerm = packageName.replace(/^@/, '')\n if (isGhAvailable()) {\n try {\n const { stdout: json } = spawnSync('gh', ['search', 'repos', searchTerm, '--json', 'fullName', '--limit', '5'], {\n encoding: 'utf-8',\n timeout: 15_000,\n })\n if (!json)\n throw new Error('no output')\n const repos = JSON.parse(json) as Array<{ fullName: string }>\n // Prefer exact suffix match\n const match = repos.find(r =>\n r.fullName.toLowerCase().endsWith(`/${packageName.toLowerCase()}`)\n || r.fullName.toLowerCase().endsWith(`/${shortName.toLowerCase()}`),\n )\n if (match)\n return `https://github.com/${match.fullName}`\n // Validate remaining results via package.json\n for (const candidate of repos) {\n const gh = parseGitHubUrl(`https://github.com/${candidate.fullName}`)\n if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName))\n return `https://github.com/${candidate.fullName}`\n }\n }\n catch {\n // fall through to REST API\n }\n }\n\n // Fallback: GitHub REST search API (no auth needed, but rate-limited)\n const query = encodeURIComponent(`${searchTerm} in:name`)\n const data = await $fetch<{ items?: Array<{ full_name: string }> }>(\n `https://api.github.com/search/repositories?q=${query}&per_page=5`,\n ).catch(() => null)\n if (!data?.items?.length)\n return null\n\n // Prefer exact suffix match\n const match = data.items.find(r =>\n r.full_name.toLowerCase().endsWith(`/${packageName.toLowerCase()}`)\n || r.full_name.toLowerCase().endsWith(`/${shortName.toLowerCase()}`),\n )\n if (match)\n return `https://github.com/${match.full_name}`\n\n // Validate remaining results via package.json\n for (const candidate of data.items) {\n const gh = parseGitHubUrl(`https://github.com/${candidate.full_name}`)\n if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName))\n return `https://github.com/${candidate.full_name}`\n }\n\n return null\n}\n\n/**\n * Fetch GitHub repo metadata to get website URL.\n * Pass packageName to check doc overrides first (avoids API call).\n */\nexport async function fetchGitHubRepoMeta(owner: string, repo: string, packageName?: string): Promise<{ homepage?: string } | null> {\n const override = packageName ? getDocOverride(packageName) : undefined\n if (override?.homepage)\n return { homepage: override.homepage }\n\n // Prefer gh CLI to avoid rate limits\n if (isGhAvailable()) {\n try {\n const { stdout: json } = spawnSync('gh', ['api', `repos/${owner}/${repo}`, '-q', '{homepage}'], {\n encoding: 'utf-8',\n timeout: 10_000,\n })\n if (!json)\n throw new Error('no output')\n const data = JSON.parse(json) as { homepage?: string }\n return data?.homepage ? { homepage: data.homepage } : null\n }\n catch {\n // fall through to fetch\n }\n }\n\n const data = await $fetch<{ homepage?: string }>(\n `https://api.github.com/repos/${owner}/${repo}`,\n ).catch(() => null)\n return data?.homepage ? { homepage: data.homepage } : null\n}\n\n/**\n * Resolve README URL for a GitHub repo, returns ungh:// pseudo-URL or raw URL\n */\nexport async function fetchReadme(owner: string, repo: string, subdir?: string, ref?: string): Promise<string | null> {\n const branch = ref || 'main'\n\n // Try ungh first\n const unghUrl = subdir\n ? `https://ungh.cc/repos/${owner}/${repo}/files/${branch}/${subdir}/README.md`\n : `https://ungh.cc/repos/${owner}/${repo}/readme${ref ? `?ref=${ref}` : ''}`\n\n const unghRes = await $fetch.raw(unghUrl).catch(() => null)\n\n if (unghRes?.ok) {\n return `ungh://${owner}/${repo}${subdir ? `/${subdir}` : ''}${ref ? `@${ref}` : ''}`\n }\n\n // Fallback to raw.githubusercontent.com — use GET instead of HEAD\n // because raw.githubusercontent.com sometimes returns HTML on HEAD for valid URLs\n const basePath = subdir ? `${subdir}/` : ''\n const branches = ref ? [ref] : ['main', 'master']\n for (const b of branches) {\n for (const filename of ['README.md', 'Readme.md', 'readme.md']) {\n const readmeUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${b}/${basePath}${filename}`\n const res = await $fetch.raw(readmeUrl).catch(() => null)\n if (res?.ok)\n return readmeUrl\n }\n }\n\n return null\n}\n\n/**\n * Fetch README content from ungh:// pseudo-URL, file:// URL, or regular URL\n */\nexport interface GitSourceResult {\n /** URL pattern for fetching source */\n baseUrl: string\n /** Git ref (tag) used */\n ref: string\n /** List of source file paths relative to repo root */\n files: string[]\n}\n\n/** Source file extensions to include */\nconst SOURCE_EXTENSIONS = new Set([\n '.ts',\n '.tsx',\n '.mts',\n '.cts',\n '.js',\n '.jsx',\n '.mjs',\n '.cjs',\n '.vue',\n '.svelte',\n '.astro',\n])\n\n/** Paths/patterns to exclude */\nconst EXCLUDE_PATTERNS = [\n /\\.test\\./,\n /\\.spec\\./,\n /\\.d\\.ts$/,\n /__tests__/,\n /__mocks__/,\n /\\.config\\./,\n /fixtures?\\//,\n]\n\n/**\n * Filter source files from a file list\n */\nfunction filterSourceFiles(files: string[]): string[] {\n return files.filter((path) => {\n if (!path.startsWith('src/'))\n return false\n\n const ext = path.slice(path.lastIndexOf('.'))\n if (!SOURCE_EXTENSIONS.has(ext))\n return false\n if (EXCLUDE_PATTERNS.some(p => p.test(path)))\n return false\n\n return true\n })\n}\n\n/**\n * Fetch source files from GitHub repo's src/ folder\n */\nexport async function fetchGitSource(owner: string, repo: string, version: string, packageName?: string, repoUrl?: string): Promise<GitSourceResult | null> {\n const branchHint = repoUrl ? extractBranchHint(repoUrl) : undefined\n const tag = await findGitTag(owner, repo, version, packageName, branchHint)\n if (!tag)\n return null\n\n const files = filterSourceFiles(tag.files)\n if (files.length === 0)\n return null\n\n return {\n baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,\n ref: tag.ref,\n files,\n }\n}\n\n/**\n * Fetch README content from ungh:// pseudo-URL, file:// URL, or regular URL\n */\nexport async function fetchReadmeContent(url: string): Promise<string | null> {\n // Local file\n if (url.startsWith('file://')) {\n const filePath = fileURLToPath(url)\n if (!fsExistsSync(filePath))\n return null\n return fsReadFileSync(filePath, 'utf-8')\n }\n\n if (url.startsWith('ungh://')) {\n let path = url.replace('ungh://', '')\n let ref = 'main'\n\n // Parse ref from owner/repo/subdir@ref\n const atIdx = path.lastIndexOf('@')\n if (atIdx !== -1) {\n ref = path.slice(atIdx + 1)\n path = path.slice(0, atIdx)\n }\n\n const parts = path.split('/')\n const owner = parts[0]\n const repo = parts[1]\n const subdir = parts.slice(2).join('/')\n\n const unghUrl = subdir\n ? `https://ungh.cc/repos/${owner}/${repo}/files/${ref}/${subdir}/README.md`\n : `https://ungh.cc/repos/${owner}/${repo}/readme?ref=${ref}`\n\n const text = await $fetch(unghUrl, { responseType: 'text' }).catch(() => null)\n if (!text)\n return null\n\n try {\n const json = JSON.parse(text) as { markdown?: string, file?: { contents?: string } }\n return json.markdown || json.file?.contents || null\n }\n catch {\n return text\n }\n }\n\n return fetchText(url)\n}\n","/**\n * llms.txt fetching and parsing\n */\n\nimport type { FetchedDoc, LlmsContent, LlmsLink } from './types'\nimport pLimit from 'p-limit'\nimport { fetchText, verifyUrl } from './utils'\n\n/**\n * Check for llms.txt at a docs URL, returns the llms.txt URL if found\n */\nexport async function fetchLlmsUrl(docsUrl: string): Promise<string | null> {\n const origin = new URL(docsUrl).origin\n const llmsUrl = `${origin}/llms.txt`\n if (await verifyUrl(llmsUrl))\n return llmsUrl\n return null\n}\n\n/**\n * Fetch and parse llms.txt content\n */\nexport async function fetchLlmsTxt(url: string): Promise<LlmsContent | null> {\n const content = await fetchText(url)\n if (!content || content.length < 50)\n return null\n\n return {\n raw: content,\n links: parseMarkdownLinks(content),\n }\n}\n\n/**\n * Parse markdown links from llms.txt to get .md file paths\n */\nexport function parseMarkdownLinks(content: string): LlmsLink[] {\n const links: LlmsLink[] = []\n const seen = new Set<string>()\n const linkRegex = /\\[([^\\]]+)\\]\\(([^)]+\\.md)\\)/g\n for (let match = linkRegex.exec(content); match !== null; match = linkRegex.exec(content)) {\n const url = match[2]!\n if (!seen.has(url)) {\n seen.add(url)\n links.push({ title: match[1]!, url })\n }\n }\n\n return links\n}\n\n/**\n * Download all .md files referenced in llms.txt\n */\n/** Reject non-https URLs and private/link-local IPs */\nfunction isSafeUrl(url: string): boolean {\n try {\n const parsed = new URL(url)\n if (parsed.protocol !== 'https:')\n return false\n const host = parsed.hostname\n // Reject private/link-local/loopback\n if (host === 'localhost' || host === '127.0.0.1' || host === '::1')\n return false\n if (host === '169.254.169.254') // cloud metadata\n return false\n if (/^(?:10\\.|172\\.(?:1[6-9]|2\\d|3[01])\\.|192\\.168\\.)/.test(host))\n return false\n if (host.startsWith('[')) // IPv6 link-local\n return false\n return true\n }\n catch { return false }\n}\n\nexport async function downloadLlmsDocs(\n llmsContent: LlmsContent,\n baseUrl: string,\n onProgress?: (url: string, index: number, total: number) => void,\n): Promise<FetchedDoc[]> {\n const limit = pLimit(5)\n let completed = 0\n\n const results = await Promise.all(\n llmsContent.links.map(link => limit(async () => {\n const url = link.url.startsWith('http')\n ? link.url\n : `${baseUrl.replace(/\\/$/, '')}${link.url.startsWith('/') ? '' : '/'}${link.url}`\n\n if (!isSafeUrl(url))\n return null\n\n onProgress?.(link.url, completed++, llmsContent.links.length)\n\n const content = await fetchText(url)\n if (content && content.length > 100)\n return { url: link.url, title: link.title, content } as FetchedDoc\n return null\n })),\n )\n\n return results.filter((d): d is FetchedDoc => d !== null)\n}\n\n/**\n * Normalize llms.txt links to relative paths for local access\n * Handles: absolute URLs, root-relative paths, and relative paths\n */\nexport function normalizeLlmsLinks(content: string, baseUrl?: string): string {\n let normalized = content\n\n // Handle absolute URLs: https://example.com/docs/foo.md → ./docs/foo.md\n if (baseUrl) {\n const base = baseUrl.replace(/\\/$/, '')\n const escaped = base.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n normalized = normalized.replace(\n new RegExp(`\\\\]\\\\(${escaped}(/[^)]+\\\\.md)\\\\)`, 'g'),\n '](./docs$1)',\n )\n }\n\n // Handle root-relative paths: /foo.md → ./docs/foo.md\n normalized = normalized.replace(/\\]\\(\\/([^)]+\\.md)\\)/g, '](./docs/$1)')\n\n return normalized\n}\n\n/**\n * Extract sections from llms-full.txt by URL patterns\n * Format: ---\\nurl: /path.md\\n---\\n<content>\\n\\n---\\nurl: ...\n */\nexport function extractSections(content: string, patterns: string[]): string | null {\n const sections: string[] = []\n const parts = content.split(/\\n---\\n/)\n\n for (const part of parts) {\n const urlMatch = part.match(/^url: *(\\S.*)$/m)\n if (!urlMatch)\n continue\n\n const url = urlMatch[1]!\n if (patterns.some(p => url.includes(p))) {\n const contentStart = part.indexOf('\\n', part.indexOf('url:'))\n if (contentStart > -1) {\n sections.push(part.slice(contentStart + 1))\n }\n }\n }\n\n if (sections.length === 0)\n return null\n return sections.join('\\n\\n---\\n\\n')\n}\n","/**\n * NPM registry lookup\n */\n\nimport type { LocalDependency, NpmPackageInfo, ResolveAttempt, ResolvedPackage, ResolveResult } from './types'\nimport { spawnSync } from 'node:child_process'\nimport { createWriteStream, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, unlinkSync } from 'node:fs'\nimport { Writable } from 'node:stream'\nimport { pathToFileURL } from 'node:url'\nimport { resolvePathSync } from 'mlly'\nimport { basename, dirname, join, resolve } from 'pathe'\nimport { getCacheDir } from '../cache/version'\nimport { fetchGitDocs, fetchGitHubRepoMeta, fetchReadme, searchGitHubRepo, validateGitDocsWithLlms } from './github'\nimport { fetchLlmsTxt, fetchLlmsUrl } from './llms'\nimport { $fetch, isGitHubRepoUrl, isUselessDocsUrl, normalizeRepoUrl, parseGitHubUrl } from './utils'\n\n/**\n * Search npm registry for packages matching a query.\n * Used as a fallback when direct package lookup fails.\n */\nexport async function searchNpmPackages(query: string, size = 5): Promise<Array<{ name: string, description?: string, version: string }>> {\n const data = await $fetch<{\n objects: Array<{ package: { name: string, description?: string, version: string } }>\n }>(`https://registry.npmjs.org/-/v1/search?text=${encodeURIComponent(query)}&size=${size}`).catch(() => null)\n\n if (!data?.objects?.length)\n return []\n\n return data.objects.map(o => ({\n name: o.package.name,\n description: o.package.description,\n version: o.package.version,\n }))\n}\n\n/**\n * Fetch package info from npm registry\n */\nexport async function fetchNpmPackage(packageName: string): Promise<NpmPackageInfo | null> {\n // Try unpkg first (faster, CDN)\n const data = await $fetch<NpmPackageInfo>(`https://unpkg.com/${packageName}/package.json`).catch(() => null)\n if (data)\n return data\n\n // Fallback to npm registry\n return $fetch<NpmPackageInfo>(`https://registry.npmjs.org/${packageName}/latest`).catch(() => null)\n}\n\nexport interface DistTagInfo {\n version: string\n releasedAt?: string\n}\n\nexport interface NpmRegistryMeta {\n releasedAt?: string\n distTags?: Record<string, DistTagInfo>\n}\n\n/**\n * Fetch release date and dist-tags from npm registry\n */\nexport async function fetchNpmRegistryMeta(packageName: string, version: string): Promise<NpmRegistryMeta> {\n const data = await $fetch<{\n 'time'?: Record<string, string>\n 'dist-tags'?: Record<string, string>\n }>(`https://registry.npmjs.org/${packageName}`).catch(() => null)\n\n if (!data)\n return {}\n\n // Enrich dist-tags with release dates\n const distTags: Record<string, DistTagInfo> | undefined = data['dist-tags']\n ? Object.fromEntries(\n Object.entries(data['dist-tags']).map(([tag, ver]) => [\n tag,\n { version: ver, releasedAt: data.time?.[ver] },\n ]),\n )\n : undefined\n\n return {\n releasedAt: data.time?.[version] || undefined,\n distTags,\n }\n}\n\nexport type ResolveStep = 'npm' | 'github-docs' | 'github-meta' | 'github-search' | 'readme' | 'llms.txt' | 'local'\n\nexport interface ResolveOptions {\n /** User's installed version - used to fetch versioned git docs */\n version?: string\n /** Current working directory - for local readme fallback */\n cwd?: string\n /** Progress callback - called before each resolution step */\n onProgress?: (step: ResolveStep) => void\n}\n\n/**\n * Shared GitHub resolution cascade: git docs → repo meta (homepage) → README.\n * Used for both \"repo URL found in package.json\" and \"repo URL found via search\" paths.\n */\nasync function resolveGitHub(\n gh: { owner: string, repo: string },\n targetVersion: string | undefined,\n pkg: { name: string },\n result: ResolvedPackage,\n attempts: ResolveAttempt[],\n onProgress?: (step: ResolveStep) => void,\n opts?: { rawRepoUrl?: string, subdir?: string },\n): Promise<string[] | undefined> {\n let allFiles: string[] | undefined\n\n // Try versioned git docs first (docs/**/*.md at git tag)\n if (targetVersion) {\n onProgress?.('github-docs')\n const gitDocs = await fetchGitDocs(gh.owner, gh.repo, targetVersion, pkg.name, opts?.rawRepoUrl)\n if (gitDocs) {\n result.gitDocsUrl = gitDocs.baseUrl\n result.gitRef = gitDocs.ref\n result.gitDocsFallback = gitDocs.fallback\n allFiles = gitDocs.allFiles\n attempts.push({\n source: 'github-docs',\n url: gitDocs.baseUrl,\n status: 'success',\n message: gitDocs.fallback\n ? `Found ${gitDocs.files.length} docs at ${gitDocs.ref} (no tag for v${targetVersion})`\n : `Found ${gitDocs.files.length} docs at ${gitDocs.ref}`,\n })\n }\n else {\n attempts.push({\n source: 'github-docs',\n url: `${result.repoUrl}/tree/v${targetVersion}/docs`,\n status: 'not-found',\n message: 'No docs/ folder found at version tag',\n })\n }\n }\n\n // If no docsUrl yet (npm had no homepage), try GitHub repo metadata\n if (!result.docsUrl) {\n onProgress?.('github-meta')\n const repoMeta = await fetchGitHubRepoMeta(gh.owner, gh.repo, pkg.name)\n if (repoMeta?.homepage && !isUselessDocsUrl(repoMeta.homepage)) {\n result.docsUrl = repoMeta.homepage\n attempts.push({\n source: 'github-meta',\n url: result.repoUrl!,\n status: 'success',\n message: `Found homepage: ${repoMeta.homepage}`,\n })\n }\n else {\n attempts.push({\n source: 'github-meta',\n url: result.repoUrl!,\n status: 'not-found',\n message: 'No homepage in repo metadata',\n })\n }\n }\n\n // README fallback via ungh\n onProgress?.('readme')\n const readmeUrl = await fetchReadme(gh.owner, gh.repo, opts?.subdir, result.gitRef)\n if (readmeUrl) {\n result.readmeUrl = readmeUrl\n attempts.push({\n source: 'readme',\n url: readmeUrl,\n status: 'success',\n })\n }\n else {\n attempts.push({\n source: 'readme',\n url: `${result.repoUrl}/README.md`,\n status: 'not-found',\n message: 'No README found',\n })\n }\n\n return allFiles\n}\n\n/**\n * Resolve documentation URL for a package (legacy - returns null on failure)\n */\nexport async function resolvePackageDocs(packageName: string, options: ResolveOptions = {}): Promise<ResolvedPackage | null> {\n const result = await resolvePackageDocsWithAttempts(packageName, options)\n return result.package\n}\n\n/**\n * Resolve documentation URL for a package with attempt tracking\n */\nexport async function resolvePackageDocsWithAttempts(packageName: string, options: ResolveOptions = {}): Promise<ResolveResult> {\n const attempts: ResolveAttempt[] = []\n const { onProgress } = options\n\n onProgress?.('npm')\n const pkg = await fetchNpmPackage(packageName)\n if (!pkg) {\n attempts.push({\n source: 'npm',\n url: `https://registry.npmjs.org/${packageName}/latest`,\n status: 'not-found',\n message: 'Package not found on npm registry',\n })\n return { package: null, attempts }\n }\n\n attempts.push({\n source: 'npm',\n url: `https://registry.npmjs.org/${packageName}/latest`,\n status: 'success',\n message: `Found ${pkg.name}@${pkg.version}`,\n })\n\n // Fetch release date and dist-tags for this version\n const registryMeta = pkg.version\n ? await fetchNpmRegistryMeta(packageName, pkg.version)\n : {}\n\n const result: ResolvedPackage = {\n name: pkg.name,\n version: pkg.version,\n releasedAt: registryMeta.releasedAt,\n description: pkg.description,\n dependencies: pkg.dependencies,\n distTags: registryMeta.distTags,\n }\n\n // Track allFiles from heuristic git doc discovery for llms.txt validation\n let gitDocsAllFiles: string[] | undefined\n\n // Extract repo URL (handle both object and shorthand string formats)\n let subdir: string | undefined\n let rawRepoUrl: string | undefined\n if (typeof pkg.repository === 'object' && pkg.repository?.url) {\n rawRepoUrl = pkg.repository.url\n const normalized = normalizeRepoUrl(rawRepoUrl)\n // Handle shorthand \"owner/repo\" in repository.url field (e.g. cac)\n if (!normalized.includes('://') && normalized.includes('/') && !normalized.includes(':'))\n result.repoUrl = `https://github.com/${normalized}`\n else\n result.repoUrl = normalized\n subdir = pkg.repository.directory\n }\n else if (typeof pkg.repository === 'string') {\n if (pkg.repository.includes('://')) {\n // Full URL string (e.g. \"https://github.com/org/repo/tree/main/packages/sub\")\n const gh = parseGitHubUrl(pkg.repository)\n if (gh)\n result.repoUrl = `https://github.com/${gh.owner}/${gh.repo}`\n }\n else {\n // Shorthand: \"owner/repo\" or \"github:owner/repo\"\n const repo = pkg.repository.replace(/^github:/, '')\n if (repo.includes('/') && !repo.includes(':'))\n result.repoUrl = `https://github.com/${repo}`\n }\n }\n\n // Use npm homepage early (skip GitHub repo URLs)\n if (pkg.homepage && !isGitHubRepoUrl(pkg.homepage) && !isUselessDocsUrl(pkg.homepage)) {\n result.docsUrl = pkg.homepage\n }\n\n // GitHub repo handling - try versioned git docs first\n if (result.repoUrl?.includes('github.com')) {\n const gh = parseGitHubUrl(result.repoUrl)\n if (gh) {\n const targetVersion = options.version || pkg.version\n gitDocsAllFiles = await resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgress, { rawRepoUrl, subdir })\n }\n }\n else if (!result.repoUrl) {\n // No repo URL in package.json — try to find it via GitHub search\n onProgress?.('github-search')\n const searchedUrl = await searchGitHubRepo(pkg.name)\n if (searchedUrl) {\n result.repoUrl = searchedUrl\n attempts.push({\n source: 'github-search',\n url: searchedUrl,\n status: 'success',\n message: `Found via GitHub search: ${searchedUrl}`,\n })\n\n const gh = parseGitHubUrl(searchedUrl)\n if (gh) {\n const targetVersion = options.version || pkg.version\n gitDocsAllFiles = await resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgress)\n }\n }\n else {\n attempts.push({\n source: 'github-search',\n status: 'not-found',\n message: 'No repository URL in package.json and GitHub search found no match',\n })\n }\n }\n\n // Check for llms.txt on docsUrl\n if (result.docsUrl) {\n onProgress?.('llms.txt')\n const llmsUrl = await fetchLlmsUrl(result.docsUrl)\n if (llmsUrl) {\n result.llmsUrl = llmsUrl\n attempts.push({\n source: 'llms.txt',\n url: llmsUrl,\n status: 'success',\n })\n }\n else {\n attempts.push({\n source: 'llms.txt',\n url: `${new URL(result.docsUrl).origin}/llms.txt`,\n status: 'not-found',\n message: 'No llms.txt at docs URL',\n })\n }\n }\n\n // Validate heuristic git docs against llms.txt links\n if (result.gitDocsUrl && result.llmsUrl && gitDocsAllFiles) {\n const llmsContent = await fetchLlmsTxt(result.llmsUrl)\n if (llmsContent && llmsContent.links.length > 0) {\n const validation = validateGitDocsWithLlms(llmsContent.links, gitDocsAllFiles)\n if (!validation.isValid) {\n attempts.push({\n source: 'github-docs',\n url: result.gitDocsUrl,\n status: 'not-found',\n message: `Heuristic git docs don't match llms.txt links (${Math.round(validation.matchRatio * 100)}% match), preferring llms.txt`,\n })\n result.gitDocsUrl = undefined\n result.gitRef = undefined\n }\n }\n }\n\n // Fallback: check local node_modules readme when all else fails\n if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl && options.cwd) {\n onProgress?.('local')\n const pkgDir = join(options.cwd, 'node_modules', packageName)\n // Check common readme variations (case-insensitive)\n const readmeFile = existsSync(pkgDir) && readdirSync(pkgDir).find(f => /^readme\\.md$/i.test(f))\n if (readmeFile) {\n const readmePath = join(pkgDir, readmeFile)\n result.readmeUrl = pathToFileURL(readmePath).href\n attempts.push({\n source: 'readme',\n url: readmePath,\n status: 'success',\n message: 'Found local readme in node_modules',\n })\n }\n }\n\n // Must have at least one source\n if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl) {\n return { package: null, attempts }\n }\n\n return { package: result, attempts }\n}\n\n/**\n * Parse version specifier, handling protocols like link:, workspace:, npm:, file:\n */\nexport function parseVersionSpecifier(\n name: string,\n version: string,\n cwd: string,\n): LocalDependency | null {\n // link: - resolve local package.json\n if (version.startsWith('link:')) {\n const linkPath = resolve(cwd, version.slice(5))\n const linkedPkgPath = join(linkPath, 'package.json')\n if (existsSync(linkedPkgPath)) {\n const linkedPkg = JSON.parse(readFileSync(linkedPkgPath, 'utf-8'))\n return {\n name: linkedPkg.name || name,\n version: linkedPkg.version || '0.0.0',\n }\n }\n return null // linked package doesn't exist\n }\n\n // npm: - extract aliased package name\n if (version.startsWith('npm:')) {\n const specifier = version.slice(4)\n const atIndex = specifier.startsWith('@')\n ? specifier.indexOf('@', 1)\n : specifier.indexOf('@')\n const realName = atIndex > 0 ? specifier.slice(0, atIndex) : specifier\n return { name: realName, version: resolveInstalledVersion(realName, cwd) || '*' }\n }\n\n // file: and git: - skip (local/custom sources)\n if (version.startsWith('file:') || version.startsWith('git:') || version.startsWith('git+')) {\n return null\n }\n\n // For everything else (semver, catalog:, workspace:, etc.)\n // resolve the actual installed version from node_modules\n const installed = resolveInstalledVersion(name, cwd)\n if (installed)\n return { name, version: installed }\n\n // Fallback: strip semver prefix if it looks like one\n if (/^[\\^~>=<\\d]/.test(version))\n return { name, version: version.replace(/^[\\^~>=<]/, '') }\n\n // catalog: and workspace: specifiers - include with wildcard version\n // so the dep isn't silently dropped from state.deps\n if (version.startsWith('catalog:') || version.startsWith('workspace:'))\n return { name, version: '*' }\n\n return null\n}\n\n/**\n * Resolve the actual installed version of a package by finding its package.json\n * via mlly's resolvePathSync. Works regardless of package manager or version protocol.\n */\nexport function resolveInstalledVersion(name: string, cwd: string): string | null {\n try {\n const resolved = resolvePathSync(`${name}/package.json`, { url: cwd })\n const pkg = JSON.parse(readFileSync(resolved, 'utf-8'))\n return pkg.version || null\n }\n catch {\n // Packages with `exports` that don't expose ./package.json\n // Resolve the entry point, then walk up to find package.json\n try {\n const entry = resolvePathSync(name, { url: cwd })\n let dir = dirname(entry)\n while (dir && basename(dir) !== 'node_modules') {\n const pkgPath = join(dir, 'package.json')\n if (existsSync(pkgPath)) {\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n return pkg.version || null\n }\n dir = dirname(dir)\n }\n }\n catch {}\n return null\n }\n}\n\n/**\n * Read package.json dependencies with versions\n */\nexport async function readLocalDependencies(cwd: string): Promise<LocalDependency[]> {\n const pkgPath = join(cwd, 'package.json')\n if (!existsSync(pkgPath)) {\n throw new Error('No package.json found in current directory')\n }\n\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n const deps: Record<string, string> = {\n ...pkg.dependencies,\n ...pkg.devDependencies,\n }\n\n const results: LocalDependency[] = []\n\n for (const [name, version] of Object.entries(deps)) {\n // Skip types and dev tools\n if (name.startsWith('@types/') || ['typescript', 'eslint', 'prettier', 'vitest', 'jest'].includes(name)) {\n continue\n }\n\n const parsed = parseVersionSpecifier(name, version, cwd)\n if (parsed) {\n results.push(parsed)\n }\n }\n\n return results\n}\n\nexport interface LocalPackageInfo {\n name: string\n version: string\n description?: string\n repoUrl?: string\n localPath: string\n}\n\n/**\n * Read package info from a local path (for link: deps)\n */\nexport function readLocalPackageInfo(localPath: string): LocalPackageInfo | null {\n const pkgPath = join(localPath, 'package.json')\n if (!existsSync(pkgPath))\n return null\n\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n\n let repoUrl: string | undefined\n if (pkg.repository?.url) {\n repoUrl = normalizeRepoUrl(pkg.repository.url)\n }\n else if (typeof pkg.repository === 'string') {\n repoUrl = normalizeRepoUrl(pkg.repository)\n }\n\n return {\n name: pkg.name,\n version: pkg.version || '0.0.0',\n description: pkg.description,\n repoUrl,\n localPath,\n }\n}\n\n/**\n * Resolve docs for a local package (link: dependency)\n */\nexport async function resolveLocalPackageDocs(localPath: string): Promise<ResolvedPackage | null> {\n const info = readLocalPackageInfo(localPath)\n if (!info)\n return null\n\n const result: ResolvedPackage = {\n name: info.name,\n version: info.version,\n description: info.description,\n repoUrl: info.repoUrl,\n }\n\n // Try GitHub if repo URL available\n if (info.repoUrl?.includes('github.com')) {\n const gh = parseGitHubUrl(info.repoUrl)\n if (gh) {\n // Try versioned git docs\n const gitDocs = await fetchGitDocs(gh.owner, gh.repo, info.version, info.name)\n if (gitDocs) {\n result.gitDocsUrl = gitDocs.baseUrl\n result.gitRef = gitDocs.ref\n result.gitDocsFallback = gitDocs.fallback\n }\n\n // README fallback via ungh\n const readmeUrl = await fetchReadme(gh.owner, gh.repo, undefined, result.gitRef)\n if (readmeUrl) {\n result.readmeUrl = readmeUrl\n }\n }\n }\n\n // Fallback: read local readme (case-insensitive)\n if (!result.readmeUrl && !result.gitDocsUrl) {\n const readmeFile = readdirSync(localPath).find(f => /^readme\\.md$/i.test(f))\n if (readmeFile) {\n result.readmeUrl = pathToFileURL(join(localPath, readmeFile)).href\n }\n }\n\n if (!result.readmeUrl && !result.gitDocsUrl) {\n return null\n }\n\n return result\n}\n\n/**\n * Download and extract npm package tarball to cache directory.\n * Used when the package isn't available in node_modules.\n *\n * Extracts to: ~/.skilld/references/<pkg>@<version>/pkg/\n * Returns the extracted directory path, or null on failure.\n */\nexport async function fetchPkgDist(name: string, version: string): Promise<string | null> {\n const cacheDir = getCacheDir(name, version)\n const pkgDir = join(cacheDir, 'pkg')\n\n // Already extracted\n if (existsSync(join(pkgDir, 'package.json')))\n return pkgDir\n\n // Fetch version metadata to get tarball URL\n const data = await $fetch<{ dist?: { tarball?: string } }>(\n `https://registry.npmjs.org/${name}/${version}`,\n ).catch(() => null)\n if (!data)\n return null\n const tarballUrl = data.dist?.tarball\n if (!tarballUrl)\n return null\n\n // Download tarball to temp file\n const tarballRes = await fetch(tarballUrl, {\n headers: { 'User-Agent': 'skilld/1.0' },\n }).catch(() => null)\n\n if (!tarballRes?.ok || !tarballRes.body)\n return null\n\n mkdirSync(pkgDir, { recursive: true })\n\n const tmpTarball = join(cacheDir, '_pkg.tgz')\n const fileStream = createWriteStream(tmpTarball)\n\n // Stream response body to file\n const reader = tarballRes.body.getReader()\n await new Promise<void>((res, reject) => {\n const writable = new Writable({\n write(chunk, _encoding, callback) {\n fileStream.write(chunk, callback)\n },\n })\n writable.on('finish', () => {\n fileStream.end()\n res()\n })\n writable.on('error', reject)\n\n function pump() {\n reader.read().then(({ done, value }) => {\n if (done) {\n writable.end()\n return\n }\n writable.write(value, () => pump())\n }).catch(reject)\n }\n pump()\n })\n\n // Extract tarball — npm tarballs have a \"package/\" prefix\n const { status } = spawnSync('tar', ['xzf', tmpTarball, '--strip-components=1', '-C', pkgDir], { stdio: 'ignore' })\n if (status !== 0) {\n rmSync(pkgDir, { recursive: true, force: true })\n rmSync(tmpTarball, { force: true })\n return null\n }\n\n unlinkSync(tmpTarball)\n return pkgDir\n}\n\n/**\n * Fetch just the latest version string from npm (lightweight)\n */\nexport async function fetchLatestVersion(packageName: string): Promise<string | null> {\n const data = await $fetch<{ version?: string }>(\n `https://unpkg.com/${packageName}/package.json`,\n ).catch(() => null)\n return data?.version || null\n}\n\n/**\n * Get installed skill version from SKILL.md\n */\nexport function getInstalledSkillVersion(skillDir: string): string | null {\n const skillPath = join(skillDir, 'SKILL.md')\n if (!existsSync(skillPath))\n return null\n\n const content = readFileSync(skillPath, 'utf-8')\n const match = content.match(/^version:\\s*\"?([^\"\\n]+)\"?/m)\n return match?.[1] || null\n}\n"],"mappings":";;;;;;;;;;;;AAIA,MAAa,YAAY,IAAI,IAAI;CAC/B;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAa,WAAW,QAAgB,IAAI,MAAM,IAAI,CAAC;AAGvD,SAAgB,iBAAiB,QAAuE;CACtG,MAAM,QAAQ,CAAC,MAAM;AACrB,MAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,OAAO,CACzC,KAAI,MAAM,KAAA,EACR,OAAM,KAAK,GAAG,EAAE,IAAI,OAAO,MAAM,YAAY,UAAU,KAAK,EAAE,GAAG,IAAI,EAAE,QAAQ,MAAM,OAAM,CAAC,KAAK,IAAI;AAEzG,OAAM,KAAK,MAAM;AACjB,QAAO,MAAM,KAAK,KAAK;;ACSzB,IAAI;AAKJ,SAAgB,gBAAyB;AACvC,KAAI,iBAAiB,KAAA,EACnB,QAAO;CACT,MAAM,EAAE,WAAW,UAAU,MAAM,CAAC,QAAQ,SAAS,EAAE,EAAE,OAAO,UAAU,CAAC;AAC3E,QAAQ,eAAe,WAAW;;AAIpC,MAAM,eAAe,IAAI,IAAI;CAC3B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAM,iBAAiB,IAAI,IAAI;CAC7B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,aAAa,IAAI,IAAI;CACzB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,kBAAkB,IAAI,IAAI;CAC9B;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,cAAc,IAAI,IAAI;CAC1B;CACA;CACA;CACA;CACD,CAAC;AAKF,SAAgB,cAAc,QAA6B;CACzD,MAAM,QAAQ,OAAO,KAAI,MAAK,EAAE,aAAa,CAAC;AAC9C,KAAI,MAAM,MAAK,MAAK,WAAW,IAAI,EAAE,CAAC,CACpC,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,gBAAgB,IAAI,EAAE,CAAC,CACzC,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,YAAY,IAAI,EAAE,CAAC,CACrC,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,eAAe,IAAI,EAAE,CAAC,CACxC,QAAO;AACT,QAAO;;AAMT,SAAS,aAAa,OAAmE;AAEvF,KADc,MAAM,OAAO,KAAI,MAAK,EAAE,aAAa,CAAC,CAC1C,MAAK,MAAK,aAAa,IAAI,EAAE,CAAC,CACtC,QAAO;AAET,KAAI,MAAM,MAAM,WAAW,KAAK,IAAI,MAAM,MAAM,WAAW,SAAS,IAAI,MAAM,MAAM,WAAW,aAAa,CAC1G,QAAO;AACT,QAAO;;AAMT,SAAS,UAAU,WAA2B;AAC5C,KAAI,aAAa,GACf,QAAO;AACT,KAAI,aAAa,EACf,QAAO;AACT,QAAO;;AAMT,SAAS,mBACP,OACA,MACA,OACA,OACA,YACe;CACf,MAAM,aAAa,KAAK,IAAI,QAAQ,GAAG,IAAI;CAC3C,IAAI,WAAW;AACf,KAAI,UAAU,SACZ,KAAI,YAAY;EAEd,MAAM,OAAO,IAAI,KAAK,WAAW;AACjC,OAAK,SAAS,KAAK,UAAU,GAAG,EAAE;AAClC,aAAW,aAAa,QAAQ,KAAK,aAAa,CAAC;OAGnD,YAAW,YAAY,YAAY;UAG9B,YAAY;EAEnB,MAAM,OAAO,IAAI,KAAK,WAAW;AACjC,OAAK,SAAS,KAAK,UAAU,GAAG,EAAE;AAClC,aAAW,cAAc,QAAQ,KAAK,aAAa,CAAC;;CAKtD,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;EACzC;EACA,mBAJQ,QAAQ,MAAM,GAAG,KAAK,eAAe,QAAQ,WAIhC,sCAAsC;EAC3D;EACA;EACD,EAAE;EAAE,UAAU;EAAS,WAAW,KAAK,OAAO;EAAM,CAAC;AAEtD,KAAI,CAAC,OACH,QAAO,EAAE;AAEX,QAAO,OACJ,MAAM,CACN,MAAM,KAAK,CACX,OAAO,QAAQ,CACf,KAAI,SAAQ,KAAK,MAAM,KAAK,CAAqD,CACjF,QAAO,UAAS,CAAC,UAAU,IAAI,MAAM,KAAK,IAAI,MAAM,aAAa,MAAM,CACvE,QAAO,UAAS,CAAC,aAAa,MAAM,CAAC,CACrC,KAAK,EAAE,MAAM,GAAG,UAAU,IAAI,GAAG,aAAa;EAC7C,GAAG;EACH,MAAM,cAAc,MAAM,OAAO;EACjC,aAAa,EAAA;EACd,EAAE,CAEF,MAAM,GAAG,OAAO,EAAE,SAAS,YAAY,IAAI,MAAM,EAAE,SAAS,YAAY,IAAI,GAAG,CAC/E,MAAM,GAAG,MAAM;;AAGpB,SAAS,aAAqB;CAC5B,MAAM,oBAAI,IAAI,MAAM;AACpB,GAAE,YAAY,EAAE,aAAa,GAAG,EAAE;AAClC,QAAO,QAAQ,EAAE,aAAa,CAAC;;AAOjC,SAAS,mBAAmB,OAAe,MAAc,QAAuB,OAAO,IAAU;CAE/F,MAAM,QAAQ,OACX,QAAO,MAAK,EAAE,WAAW,MAAM,EAAE,SAAS,SAAS,EAAE,SAAS,cAAc,EAAE,aAAa,GAAG,CAC9F,MAAM,GAAG,MAAM,EAAE,YAAY,EAAE,UAAU,CACzC,MAAM,GAAG,KAAK;AAEjB,KAAI,MAAM,WAAW,EACnB;CAOF,MAAM,QAAQ,qFAJI,MAAM,KAAK,OAAO,MAClC,IAAI,EAAE,kBAAkB,MAAM,OAAO,uFACtC,CAAC,KAAK,IAAI,CAEkG;AAE7G,KAAI;EACF,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;GACzC;GACA;GACA;GACA,SAAS;GACT;GACA,SAAS;GACT;GACA,QAAQ;GACT,EAAE;GAAE,UAAU;GAAS,WAAW,KAAK,OAAO;GAAM,CAAC;AAEtD,MAAI,CAAC,OACH;EAGF,MAAM,QADO,KAAK,MAAM,OAAO,EACX,MAAM;AAC1B,MAAI,CAAC,MACH;AAEF,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,QAAQ,MAAM,IAAI,MAAM,UAAU;AACxC,OAAI,CAAC,MAAM,QAAQ,MAAM,CACvB;AACF,SAAM,GAAI,cAAc,MACrB,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,KAAK,OAAY;IAChB,MAAM,EAAE,QAAQ;IAChB,QAAQ,EAAE,OAAO;IACjB,WAAW,EAAE,WAAW,cAAc;IACvC,EAAE;;SAGH;;AAWR,eAAsB,kBACpB,OACA,MACA,QAAQ,IACR,YACwB;AACxB,KAAI,CAAC,eAAe,CAClB,QAAO,EAAE;CAEX,MAAM,YAAY,KAAK,KAAK,QAAQ,IAAK;CACzC,MAAM,cAAc,QAAQ;AAE5B,KAAI;EACF,MAAM,OAAO,mBAAmB,OAAO,MAAM,QAAQ,WAAW,WAAW;EAC3E,MAAM,SAAS,mBAAmB,OAAO,MAAM,UAAU,aAAa,WAAW;EACjF,MAAM,MAAM,CAAC,GAAG,MAAM,GAAG,OAAO;AAChC,qBAAmB,OAAO,MAAM,IAAI;AACpC,SAAO;SAEH;AACJ,SAAO,EAAE;;;AAOb,SAAgB,sBAAsB,OAA4B;CAChE,MAAM,QAAQ,UAAU,MAAM,UAAU;CACxC,MAAM,WAAkE;EACtE,QAAQ,MAAM;EACd,OAAO,MAAM;EACb,MAAM,MAAM;EACZ,OAAO,MAAM;EACb,SAAS,QAAQ,MAAM,UAAU;EACjC,KAAK,MAAM;EACX,WAAW,MAAM;EACjB,UAAU,MAAM;EACjB;AACD,KAAI,MAAM,OAAO,SAAS,EACxB,UAAS,SAAS,IAAI,MAAM,OAAO,KAAK,KAAK,CAAC;CAGhD,MAAM,QAAQ;EAFH,iBAAiB,SAAS;EAElB;EAAI,KAAK,MAAM;EAAQ;AAE1C,KAAI,MAAM,MAAM;EACd,MAAM,OAAO,MAAM,KAAK,SAAS,QAC7B,GAAG,MAAM,KAAK,MAAM,GAAG,MAAM,CAAC,OAC9B,MAAM;AACV,QAAM,KAAK,IAAI,KAAK;;AAGtB,KAAI,MAAM,YAAY,SAAS,GAAG;AAChC,QAAM,KAAK,IAAI,OAAO,IAAI,kBAAkB;AAC5C,OAAK,MAAM,KAAK,MAAM,aAAa;GACjC,MAAM,YAAY,EAAE,YAAY,IAAI,MAAM,EAAE,UAAU,KAAK;GAC3D,MAAM,cAAc,EAAE,KAAK,SAAS,MAChC,GAAG,EAAE,KAAK,MAAM,GAAG,IAAI,CAAC,OACxB,EAAE;AACN,SAAM,KAAK,IAAI,MAAM,EAAE,OAAO,IAAI,UAAU,IAAI,IAAI,YAAY;;;AAIpE,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,mBAAmB,QAA+B;CAChE,MAAM,yBAAS,IAAI,KAA+B;AAClD,MAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,OAAO,OAAO,IAAI,MAAM,KAAK,IAAI,EAAE;AACzC,OAAK,KAAK,MAAM;AAChB,SAAO,IAAI,MAAM,MAAM,KAAK;;CAG9B,MAAM,aAAwC;EAC5C,KAAK;EACL,UAAU;EACV,MAAM;EACN,SAAS;EACT,OAAO;EACR;CAED,MAAM,YAAyB;EAAC;EAAO;EAAY;EAAQ;EAAS;EAAU;CAU9E,MAAM,WAAqB;EARhB;GACT;GACA,UAAU,OAAO;GACjB,SAAS,OAAO,QAAO,MAAK,EAAE,UAAU,OAAO,CAAC;GAChD,WAAW,OAAO,QAAO,MAAK,EAAE,UAAU,OAAO,CAAC;GAClD;GACD,CAE8B,KAAK,KAAK;EAAE;EAAI;EAAkB;EAAG;AAEpE,MAAK,MAAM,QAAQ,WAAW;EAC5B,MAAM,QAAQ,OAAO,IAAI,KAAK;AAC9B,MAAI,CAAC,OAAO,OACV;AACF,WAAS,KAAK,MAAM,WAAW,MAAM,IAAI,MAAM,OAAO,IAAI,GAAG;AAC7D,OAAK,MAAM,SAAS,OAAO;GACzB,MAAM,YAAY,MAAM,YAAY,IAAI,MAAM,MAAM,UAAU,KAAK;GACnE,MAAM,QAAQ,MAAM,UAAU,SAAS,KAAK;GAC5C,MAAM,OAAO,QAAQ,MAAM,UAAU;AACrC,YAAS,KAAK,OAAO,MAAM,OAAO,YAAY,MAAM,OAAO,QAAQ,MAAM,QAAQ,YAAY,MAAM,IAAI,KAAK,GAAG;;AAEjH,WAAS,KAAK,GAAG;;AAGnB,QAAO,SAAS,KAAK,KAAK;;ACrX5B,MAAa,SAAS,OAAO,OAAO;CAClC,OAAO;CACP,YAAY;CACZ,SAAS;CACT,SAAS,EAAE,cAAc,cAAA;CAC1B,CAAC;AAKF,eAAsB,UAAU,KAAqC;AACnE,QAAO,OAAO,KAAK,EAAE,cAAc,QAAQ,CAAC,CAAC,YAAY,KAAK;;AAMhE,eAAsB,UAAU,KAA+B;CAC7D,MAAM,MAAM,MAAM,OAAO,IAAI,KAAK,EAAE,QAAQ,QAAQ,CAAC,CAAC,YAAY,KAAK;AACvE,KAAI,CAAC,IACH,QAAO;AAET,QAAO,EADa,IAAI,QAAQ,IAAI,eAAe,IAAI,IACnC,SAAS,YAAY;;AAM3C,MAAM,gBAAgB,IAAI,IAAI;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,SAAgB,iBAAiB,KAAsB;AACrD,KAAI;EACF,MAAM,EAAE,aAAa,IAAI,IAAI,IAAI;AACjC,SAAO,cAAc,IAAI,SAAS;SAE9B;AAAE,SAAO;;;AAMjB,SAAgB,gBAAgB,KAAsB;AACpD,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,SAAO,OAAO,aAAa,gBAAgB,OAAO,aAAa;SAE3D;AACJ,SAAO;;;AAOX,SAAgB,eAAe,KAAqD;CAClF,MAAM,QAAQ,IAAI,MAAM,qDAAqD;AAC7E,KAAI,CAAC,MACH,QAAO;AACT,QAAO;EAAE,OAAO,MAAM;EAAK,MAAM,MAAM;EAAK;;AAM9C,SAAgB,iBAAiB,KAAqB;AACpD,QAAO,IACJ,QAAQ,UAAU,GAAG,CACrB,QAAQ,QAAQ,GAAG,CACnB,QAAQ,UAAU,GAAG,CACrB,QAAQ,aAAa,WAAW,CAChC,QAAQ,4BAA4B,qBAAqB,CAEzD,QAAQ,qBAAqB,sBAAsB;;AAMxD,SAAgB,kBAAkB,KAAiC;CACjE,MAAM,OAAO,IAAI,QAAQ,IAAI;AAC7B,KAAI,SAAS,GACX,QAAO,KAAA;CACT,MAAM,WAAW,IAAI,MAAM,OAAO,EAAE;AAEpC,KAAI,CAAC,YAAY,aAAa,SAC5B,QAAO,KAAA;AACT,QAAO;;ACnET,SAAgB,YAAY,SAAgC;CAC1D,MAAM,QAAQ,QAAQ,QAAQ,MAAM,GAAG;CACvC,MAAM,QAAQ,MAAM,MAAM,iCAAiC;AAC3D,KAAI,CAAC,MACH,QAAO;AACT,QAAO;EACL,OAAO,CAAC,MAAM;EACd,OAAO,MAAM,KAAK,CAAC,MAAM,KAAK;EAC9B,OAAO,MAAM,KAAK,CAAC,MAAM,KAAK;EAC9B,KAAK;EACN;;AAUH,SAAS,eAAe,KAAa,aAAqC;AACxE,KAAI,aAAa;EAEf,MAAM,UAAU,IAAI,MAAM,IAAI,OAAO,IAAI,YAAY,YAAY,CAAC,QAAQ,CAAC;AAC3E,MAAI,QACF,QAAO,QAAQ;EACjB,MAAM,YAAY,IAAI,MAAM,IAAI,OAAO,IAAI,YAAY,YAAY,CAAC,UAAU,CAAC;AAC/E,MAAI,UACF,QAAO,UAAU;;AAGrB,QAAO,IAAI,QAAQ,MAAM,GAAG;;AAG9B,SAAS,YAAY,KAAqB;AACxC,QAAO,IAAI,QAAQ,uBAAuB,OAAO;;AAMnD,SAAS,kBAAkB,KAAa,aAA8B;AAEpE,QAAO,IAAI,WAAW,GAAG,YAAY,GAAG,IAAI,IAAI,WAAW,GAAG,YAAY,IAAI,IAAI,IAAI,WAAW,GAAG,YAAY,GAAG;;AAGrH,SAAgB,cAAc,GAAW,GAAmB;AAC1D,KAAI,EAAE,UAAU,EAAE,MAChB,QAAO,EAAE,QAAQ,EAAE;AACrB,KAAI,EAAE,UAAU,EAAE,MAChB,QAAO,EAAE,QAAQ,EAAE;AACrB,QAAO,EAAE,QAAQ,EAAE;;AAMrB,SAAS,mBAAmB,OAAe,MAA+B;AACxE,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GACvC;GACA,SAAS,MAAM,GAAG,KAAK;GACvB;GACA;GACD,EAAE;GAAE,UAAU;GAAS,SAAS;GAAQ,OAAO;IAAC;IAAU;IAAQ;;GAAW,CAAC;AAC/E,MAAI,CAAC,KACH,QAAO,EAAE;AACX,SAAO,KAAK,MAAM,KAAK;SAEnB;AACJ,SAAO,EAAE;;;AAOb,eAAe,qBAAqB,OAAe,MAAwC;AAKzF,SAJa,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,YACvC,EAAE,QAAQ,YAAY,QAAQ,KAAO,EAAE,CACxC,CAAC,YAAY,KAAK,GACN,YAAY,EAAE;;AAM7B,eAAe,iBAAiB,OAAe,MAAwC;AACrF,KAAI,eAAe,EAAE;EACnB,MAAM,WAAW,mBAAmB,OAAO,KAAK;AAChD,MAAI,SAAS,SAAS,EACpB,QAAO;;AAEX,QAAO,qBAAqB,OAAO,KAAK;;AAS1C,SAAgB,eAAe,UAA2B,aAAsB,kBAA4C;CAE1H,MAAM,kBAAkB,eAAe,SAAS,MAAK,MAAK,kBAAkB,EAAE,KAAK,YAAY,CAAC;CAChG,MAAM,cAAc,mBAAmB,YAAY,iBAAiB,GAAG;AAyBvE,QAvBiB,SAAS,QAAQ,MAAM;AACtC,MAAI,EAAE,WACJ,QAAO;EAET,MAAM,MAAM,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;AAC5E,MAAI,CAAC,IACH,QAAO;EAET,MAAM,KAAK,YAAY,IAAI;AAC3B,MAAI,CAAC,GACH,QAAO;AAGT,MAAI,mBAAmB,eAAe,CAAC,kBAAkB,EAAE,KAAK,YAAY,CAC1E,QAAO;AAGT,MAAI,eAAe,cAAc,IAAI,YAAY,GAAG,EAClD,QAAO;AAET,SAAO;GACP,CAGC,MAAM,GAAG,MAAM;EACd,MAAM,OAAO,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;EAC7E,MAAM,OAAO,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;AAC7E,MAAI,CAAC,QAAQ,CAAC,KACZ,QAAO;AACT,SAAO,cAAc,YAAY,KAAK,EAAG,YAAY,KAAK,CAAE;GAC5D,CACD,MAAM,GAAG,GAAG;;AAMjB,SAAS,cAAc,SAAwB,aAA8B;CAC3E,MAAM,OAAO,QAAQ,QAAQ,eAAe,QAAQ,UAAU;CAC9D,MAAM,UAAU,eAAe,QAAQ,KAAK,YAAY,IAAI,QAAQ;CAEpE,MAAM,KAAK;EACT;EACA,QAAQ,QAAQ;EAChB,YAAY;EACZ,cAAc;EACf;AACD,KAAI,QAAQ,QAAQ,QAAQ,SAAS,QAAQ,IAC3C,IAAG,KAAK,UAAU,QAAQ,KAAK,QAAQ,MAAM,OAAM,CAAC,GAAG;AACzD,IAAG,KAAK,MAAM;AAEd,QAAO,GAAG,GAAG,KAAK,KAAK,CAAC,QAAQ,QAAQ,QAAQ,QAAQ,IAAI,MAAM,QAAQ;;AAc5E,SAAgB,qBAAqB,gBAAuD,aAA8B;CAExH,MAAM,OAA4B,MAAM,QAAQ,eAAe,GAC3D;EAAE,UAAU;EAAgB;EAAa,GACzC;CAEJ,MAAM,EAAE,UAAU,cAAc,iBAAiB;CACjD,MAAM,MAAM,KAAK;CAUjB,MAAM,QAAkB;EAPb;GACT;GACA,UAHY,SAAS,UAAU,cAAc,UAAU;GAIvD,WAAW,SAAS,IAAI,OAAO;GAC/B;GACD,CAE2B,KAAK,KAAK;EAAE;EAAI;EAAoB;EAAG;AAGnE,KAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,QAAM,KAAK,oBAAoB,GAAG;AAClC,OAAK,MAAM,KAAK,aACd,OAAM,KAAK,MAAM,EAAE,QAAQ,WAAW,EAAE,QAAQ,QAAQ,EAAE,MAAM,IAAI,EAAE,KAAK,GAAG;AAEhF,QAAM,KAAK,GAAG;;AAIhB,KAAI,SAAS,SAAS,GAAG;AACvB,MAAI,gBAAgB,aAAa,SAAS,EACxC,OAAM,KAAK,oBAAoB,GAAG;AACpC,OAAK,MAAM,KAAK,UAAU;GACxB,MAAM,OAAO,QAAQ,EAAE,eAAe,EAAE,UAAU;GAClD,MAAM,WAAW,EAAE,IAAI,SAAS,IAAI,IAAI,EAAE,IAAI,WAAW,IAAI,GAAG,EAAE,MAAM,IAAI,EAAE;GAE9E,MAAM,KAAK,YADK,eAAe,EAAE,KAAK,IAAI,IAAI,EAAE,IACjB;GAC/B,MAAM,QAAQ,IAAI,UAAU,KAAK,GAAG,UAAU,IAAI,iBAAiB,IAAI,UAAU,IAAI,iBAAiB;AACtG,SAAM,KAAK,MAAM,EAAE,IAAI,MAAM,SAAS,QAAQ,EAAE,QAAQ,EAAE,IAAI,IAAI,KAAK,GAAG,QAAQ;;AAEpF,QAAM,KAAK,GAAG;;AAIhB,KAAI,cAAc;AAChB,QAAM,KAAK,gBAAgB,GAAG;AAC9B,QAAM,KAAK,mCAAmC;AAC9C,QAAM,KAAK,GAAG;;AAGhB,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,2BAA2B,UAAoC;CAC7E,MAAM,SAAS,SAAS,MAAM,GAAG,EAAE;AACnC,KAAI,OAAO,WAAW,EACpB,QAAO;AACT,QAAO,OAAO,OAAO,MAAM;EACzB,MAAM,QAAQ,EAAE,YAAY,IAAI,MAAM;AACtC,SAAO,KAAK,SAAS,OAAO,iBAAiB,KAAK,KAAK;GACvD;;AAMJ,eAAe,eAAe,OAAe,MAAc,KAAqC;AAC9F,MAAK,MAAM,YAAY;EAAC;EAAgB;EAAgB;EAAa,EAAE;EAErE,MAAM,UAAU,MAAM,OADV,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI,GAAG,YACvC;GAAE,cAAc;GAAQ,QAAQ,YAAY,QAAQ,IAAA;GAAS,CAAC,CAAC,YAAY,KAAK;AAClH,MAAI,QACF,QAAO;;AAEX,QAAO;;AAUT,eAAsB,kBACpB,OACA,MACA,kBACA,QACA,aACsB;CAEtB,MAAM,WAAW,eADA,MAAM,iBAAiB,OAAO,KAAK,EACV,aAAa,iBAAiB;AAExE,KAAI,SAAS,SAAS,GAAG;AAGvB,MAAI,2BAA2B,SAAS,EAAE;GAExC,MAAM,YAAY,MAAM,eAAe,OAAO,MADlC,UAAU,SAAS,GAAI,IACqB;AACxD,OAAI,UACF,QAAO,CAAC;IAAE,MAAM;IAAyB,SAAS;IAAW,CAAC;;EAGlE,MAAM,OAAO,SAAS,KAAK,MAAM;AAI/B,UAAO;IACL,MAAM,YAJS,EAAE,IAAI,SAAS,IAAI,IAAI,EAAE,IAAI,WAAW,IAAI,GACzD,EAAE,MACF,IAAI,EAAE,MAEmB;IAC3B,SAAS,cAAc,GAAG,YAAA;IAC3B;IACD;EAIF,MAAM,YAAY,MAAM,eAAe,OAAO,MADlC,UAAU,SAAS,GAAI,IACqB;AACxD,MAAI,aAAa,UAAU,SAAS,IAClC,MAAK,KAAK;GAAE,MAAM;GAAyB,SAAS;GAAW,CAAC;AAGlE,SAAO;;CAKT,MAAM,YAAY,MAAM,eAAe,OAAO,MADlC,UAAU,OACkC;AACxD,KAAI,CAAC,UACH,QAAO,EAAE;AAEX,QAAO,CAAC;EAAE,MAAM;EAAyB,SAAS;EAAW,CAAC;;ACzThE,SAAS,oBAAoB,KAA4B;CACvD,MAAM,QAAQ,IAAI,MAAM,2BAA2B;AACnD,KAAI,MACF,QAAO,GAAG,MAAM,GAAG,GAAG,MAAM;AAC9B,QAAO;;AAMT,SAAS,kBAAkB,SAAkC;AAW3D,QAAO,GAVI;EACT;EACA,YAAY,QAAQ;EACpB,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAM,CAAC;EAC9C,SAAS,QAAQ;EACjB,QAAQ,QAAQ;EAChB;EACA;EACD,CAEY,KAAK,KAAK,CAAC,QAAQ,QAAQ,MAAM,MAAM,QAAQ;;AAM9D,eAAe,cAAc,KAA8C;AACzE,KAAI;EACF,MAAM,OAAO,MAAM,OAAO,KAAK;GAAE,cAAc;GAAQ,QAAQ,YAAY,QAAQ,IAAA;GAAS,CAAC,CAAC,YAAY,KAAK;AAC/G,MAAI,CAAC,KACH,QAAO;EAGT,MAAM,UAAU,oBAAoB,IAAI;AACxC,MAAI,CAAC,QACH,QAAO;EAGT,IAAI,QAAQ;EACZ,MAAM,aAAa,KAAK,MAAM,yBAAyB;AACvD,MAAI,WACF,SAAQ,WAAW,GAAI,MAAM;AAG/B,MAAI,CAAC,OAAO;GACV,MAAM,iBAAiB,KAAK,MAAM,0BAA0B;AAC5D,OAAI,eACF,SAAQ,eAAe,GAAI,MAAM;;EAIrC,IAAI,wBAAO,IAAI,MAAM,EAAC,aAAa,CAAC,MAAM,IAAI,CAAC;EAC/C,MAAM,YAAY,KAAK,MAAM,gEAAgE;AAC7F,MAAI,UACF,QAAO,UAAU;EAGnB,MAAM,WAAW,eAAe,KAAK;AACrC,MAAI,CAAC,SACH,QAAO;AAET,SAAO;GACL;GACA,OAAO,SAAS,WAAW;GAC3B;GACA;GACA;GACD;SAEG;AACJ,SAAO;;;AASX,SAAS,qBAAqB,SAAwB,kBAAyC;CAC7F,MAAM,cAAc,YAAY,iBAAiB;AACjD,KAAI,CAAC,YACH,QAAO;AAET,QAAO,QAAQ,QAAQ,UAAU;EAC/B,MAAM,UAAU,YAAY,MAAM,QAAQ;AAC1C,MAAI,CAAC,QACH,QAAO;AAET,SAAO,cAAc,SAAS,YAAY,IAAI;GAC9C;;AAQJ,eAAsB,kBACpB,aACA,kBACsB;CACtB,MAAM,SAAS,cAAc,YAAY;AACzC,KAAI,CAAC,OACH,QAAO,EAAE;CAEX,MAAM,mBAAmB,qBAAqB,OAAO,UAAU,iBAAiB;AAChF,KAAI,iBAAiB,WAAW,EAC9B,QAAO,EAAE;CAEX,MAAM,WAA8B,EAAE;CAGtC,MAAM,YAAY;AAClB,MAAK,IAAI,IAAI,GAAG,IAAI,iBAAiB,QAAQ,KAAK,WAAW;EAC3D,MAAM,QAAQ,iBAAiB,MAAM,GAAG,IAAI,UAAU;EACtD,MAAM,UAAU,MAAM,QAAQ,IAAI,MAAM,KAAI,UAAS,cAAc,MAAM,IAAI,CAAC,CAAC;AAC/E,OAAK,MAAM,UAAU,QACnB,KAAI,OACF,UAAS,KAAK,OAAO;;AAI3B,KAAI,SAAS,WAAW,EACtB,QAAO,EAAE;AAGX,UAAS,MAAM,GAAG,MAAM;EACtB,MAAM,OAAO,EAAE,QAAQ,MAAM,IAAI,CAAC,IAAI,OAAO;EAC7C,MAAM,OAAO,EAAE,QAAQ,MAAM,IAAI,CAAC,IAAI,OAAO;AAC7C,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,KAAK,QAAQ,KAAK,OAAO,EAAE,KAAK;GAC3D,MAAM,QAAQ,KAAK,MAAM,MAAM,KAAK,MAAM;AAC1C,OAAI,SAAS,EACX,QAAO;;AAEX,SAAO;GACP;AAGF,QAAO,SAAS,KAAI,OAAM;EACxB,MAAM,iBAAiB,EAAE,QAAQ;EACjC,SAAS,kBAAkB,EAAA;EAC5B,EAAE;;ACjKL,MAAM,wBAAwB,IAAI,IAAI;CACpC;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,uBAAuB,IAAI,IAAI;CACnC;CACA;CACA;CACD,CAAC;AAwBF,eAAsB,uBACpB,OACA,MACA,QAAQ,IACR,YAC6B;AAC7B,KAAI,CAAC,eAAe,CAClB,QAAO,EAAE;AAKX,KAAI,YAAY;EACd,MAAM,SAAS,IAAI,KAAK,WAAW;AACnC,SAAO,SAAS,OAAO,UAAU,GAAG,EAAE;AACtC,MAAI,yBAAS,IAAI,MAAM,CACrB,QAAO,EAAE;;AAGb,KAAI;EAKF,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;GAAC;GAAO;GAAW;GAAM,SAFtD,wGADK,KAAK,IAAI,QAAQ,GAAG,GAAG,CACuF;GAE3C;GAAM,SAAS;GAAS;GAAM,QAAQ;GAAO,EAAE;GACnI,UAAU;GACV,WAAW,KAAK,OAAO;GACxB,CAAC;AACF,MAAI,CAAC,OACH,QAAO,EAAE;EAGX,MAAM,QADO,KAAK,MAAM,OAAO,EACX,MAAM,YAAY,aAAa;AACnD,MAAI,CAAC,MAAM,QAAQ,MAAM,CACvB,QAAO,EAAE;AAgCX,SA9BoB,MACjB,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,QAAQ,MAAW;GAClB,MAAM,OAAO,EAAE,UAAU,QAAQ,IAAI,aAAa;AAClD,UAAO,CAAC,qBAAqB,IAAI,IAAI;IACrC,CACD,KAAK,OAAY;GAChB,QAAQ,EAAE;GACV,OAAO,EAAE;GACT,MAAM,EAAE,QAAQ;GAChB,UAAU,EAAE,UAAU,QAAQ;GAC9B,WAAW,EAAE;GACb,KAAK,EAAE;GACP,aAAa,EAAE,eAAe;GAC9B,UAAU,EAAE,UAAU,cAAc;GACpC,QAAQ,EAAE,QAAQ,QAAQ,KAAA;GAC1B,cAAc,EAAE,UAAU,SAAS,EAAE,EAClC,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,KAAK,OAAY;IAAE,MAAM,EAAE,QAAQ;IAAI,QAAQ,EAAE,OAAO;IAAO,EAAA;GACnE,EAAE,CAEF,MAAM,GAAqB,MAAwB;GAClD,MAAM,QAAQ,sBAAsB,IAAI,EAAE,SAAS,aAAa,CAAC,GAAG,IAAI;GACxE,MAAM,QAAQ,sBAAsB,IAAI,EAAE,SAAS,aAAa,CAAC,GAAG,IAAI;AACxE,OAAI,UAAU,MACZ,QAAO,QAAQ;AACjB,UAAQ,EAAE,cAAc,EAAE,YAAa,EAAE,cAAc,EAAE;IACzD,CACD,MAAM,GAAG,MAAM;SAId;AACJ,SAAO,EAAE;;;AAOb,SAAgB,2BAA2B,GAA6B;CACtE,MAAM,KAAK,iBAAiB;EAC1B,QAAQ,EAAE;EACV,OAAO,EAAE;EACT,UAAU,EAAE;EACZ,SAAS,QAAQ,EAAE,UAAU;EAC7B,KAAK,EAAE;EACP,SAAS,EAAE;EACX,UAAU,EAAE;EACZ,UAAU,CAAC,CAAC,EAAE;EACf,CAAC;CAEF,MAAM,YAAY,EAAE,eAAe,IAAI,OAAO;CAC9C,MAAM,QAAQ;EAAC;EAAI;EAAI,KAAK,EAAE;EAAQ;AAEtC,KAAI,EAAE,MAAM;EACV,MAAM,OAAO,EAAE,KAAK,SAAS,YACzB,GAAG,EAAE,KAAK,MAAM,GAAG,UAAU,CAAC,OAC9B,EAAE;AACN,QAAM,KAAK,IAAI,KAAK;;AAGtB,KAAI,EAAE,QAAQ;EACZ,MAAM,cAAc;EACpB,MAAM,SAAS,EAAE,OAAO,SAAS,cAC7B,GAAG,EAAE,OAAO,MAAM,GAAG,YAAY,CAAC,OAClC,EAAE;AACN,QAAM,KAAK,IAAI,OAAO,IAAI,sBAAsB,IAAI,OAAO;YAEpD,EAAE,YAAY,SAAS,GAAG;AAEjC,QAAM,KAAK,IAAI,OAAO,IAAI,kBAAkB;AAC5C,OAAK,MAAM,KAAK,EAAE,aAAa;GAC7B,MAAM,cAAc,EAAE,KAAK,SAAS,MAChC,GAAG,EAAE,KAAK,MAAM,GAAG,IAAI,CAAC,OACxB,EAAE;AACN,SAAM,KAAK,IAAI,MAAM,EAAE,OAAO,MAAM,IAAI,YAAY;;;AAIxD,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,wBAAwB,aAAyC;CAC/E,MAAM,6BAAa,IAAI,KAAiC;AACxD,MAAK,MAAM,KAAK,aAAa;EAC3B,MAAM,MAAM,EAAE,YAAY;EAC1B,MAAM,OAAO,WAAW,IAAI,IAAI,IAAI,EAAE;AACtC,OAAK,KAAK,EAAE;AACZ,aAAW,IAAI,KAAK,KAAK;;CAG3B,MAAM,WAAW,YAAY,QAAO,MAAK,EAAE,OAAO,CAAC;CASnD,MAAM,WAAqB;EAPhB;GACT;GACA,UAAU,YAAY;GACtB,aAAa;GACb;GACD,CAE8B,KAAK,KAAK;EAAE;EAAI;EAAuB;EAAG;CAGzE,MAAM,OAAO,CAAC,GAAG,WAAW,MAAM,CAAC,CAAC,MAAM,GAAG,MAAM;AAGjD,UAFc,sBAAsB,IAAI,EAAE,aAAa,CAAC,GAAG,IAAI,MACjD,sBAAsB,IAAI,EAAE,aAAa,CAAC,GAAG,IAAI,MACvC,EAAE,cAAc,EAAE;GAC1C;AAEF,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,QAAQ,WAAW,IAAI,IAAI;AACjC,WAAS,KAAK,MAAM,IAAI,IAAI,MAAM,OAAO,IAAI,GAAG;AAChD,OAAK,MAAM,KAAK,OAAO;GACrB,MAAM,UAAU,EAAE,cAAc,IAAI,MAAM,EAAE,YAAY,KAAK;GAC7D,MAAM,WAAW,EAAE,SAAS,gBAAgB;GAC5C,MAAM,OAAO,QAAQ,EAAE,UAAU;AACjC,YAAS,KAAK,OAAO,EAAE,OAAO,iBAAiB,EAAE,OAAO,QAAQ,EAAE,QAAQ,UAAU,SAAS,IAAI,KAAK,GAAG;;AAE3G,WAAS,KAAK,GAAG;;AAGnB,QAAO,SAAS,KAAK,KAAK;;ACjM5B,MAAM,YAAY;CAChB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAM,gBAAgB;CACpB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAM,gBAAgB,MAAM;AAK5B,eAAsB,kBAAkB,YAA0C;AAChF,KAAI,CAAC,WAAW,KAAK,YAAY,eAAe,CAAC,CAC/C,QAAO,EAAE;CAOX,MAAM,QAAQ,MAAM,OAAO,CAAC,sBAAsB,EAAE;EAClD,KAAK;EACL,QAPa,CACb,GAAG,UAAU,KAAI,MAAK,MAAM,EAAE,KAAK,EACnC,GAAG,cACJ;EAKC,UAAU;EACX,CAAC;CAEF,MAAM,UAAuB,EAAE;AAE/B,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,UAAU,KAAK,YAAY,KAAK;EACtC,IAAI;AACJ,MAAI;AACF,aAAU,aAAa,SAAS,QAAQ;UAEpC;AACJ;;AAGF,MAAI,QAAQ,SAAS,cACnB;AAEF,UAAQ,KAAK;GAAE,MAAM;GAAM;GAAS,MAAM;GAAS,CAAC;;AAGtD,QAAO;;ACtCT,SAAgB,mBAAmB,OAAsC;CACvE,MAAM,UAAU,MAAM,MAAM;AAG5B,KAAI,QAAQ,WAAW,IAAI,CACzB,QAAO;AAGT,KAAI,QAAQ,WAAW,KAAK,IAAI,QAAQ,WAAW,MAAM,IAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,WAAW,IAAI,CAI7G,QAAO;EAAE,MAAM;EAAS,WAHN,QAAQ,WAAW,IAAI,GACrC,QAAQ,QAAQ,IAAI,QAAQ,IAAI,QAAQ,MAAM,EAAE,CAAC,GACjD,QAAQ,QAAA;EACuB;AAIrC,KAAI,QAAQ,WAAW,OAAO,EAAE;EAE9B,MAAM,KAAK,eADQ,iBAAiB,QAAQ,CACP;AACrC,MAAI,GACF,QAAO;GAAE,MAAM;GAAU,OAAO,GAAG;GAAO,MAAM,GAAG;GAAM;AAC3D,SAAO;;AAIT,KAAI,QAAQ,WAAW,WAAW,IAAI,QAAQ,WAAW,UAAU,CACjE,QAAO,YAAY,QAAQ;AAI7B,KAAI,qBAAqB,KAAK,QAAQ,CACpC,QAAO;EAAE,MAAM;EAAU,OAAO,QAAQ,MAAM,IAAI,CAAC;EAAI,MAAM,QAAQ,MAAM,IAAI,CAAC;EAAI;AAItF,QAAO;;AAGT,SAAS,YAAY,KAAoC;AACvD,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAE3B,MAAI,OAAO,aAAa,gBAAgB,OAAO,aAAa,kBAAkB;GAC5E,MAAM,QAAQ,OAAO,SAAS,QAAQ,OAAO,GAAG,CAAC,QAAQ,UAAU,GAAG,CAAC,MAAM,IAAI;GACjF,MAAM,QAAQ,MAAM;GACpB,MAAM,OAAO,MAAM;AACnB,OAAI,CAAC,SAAS,CAAC,KACb,QAAO;AAGT,OAAI,MAAM,OAAO,UAAU,MAAM,UAAU,EAGzC,QAAO;IAAE,MAAM;IAAU;IAAO;IAAM,KAF1B,MAAM;IAEyB,WADzB,MAAM,SAAS,IAAI,MAAM,MAAM,EAAE,CAAC,KAAK,IAAI,GAAG,KAAA;IACV;AAGxD,UAAO;IAAE,MAAM;IAAU;IAAO;IAAM;;AAGxC,MAAI,OAAO,aAAa,cAAc;GACpC,MAAM,QAAQ,OAAO,SAAS,QAAQ,OAAO,GAAG,CAAC,QAAQ,UAAU,GAAG,CAAC,MAAM,IAAI;GACjF,MAAM,QAAQ,MAAM;GACpB,MAAM,OAAO,MAAM;AACnB,OAAI,CAAC,SAAS,CAAC,KACb,QAAO;AACT,UAAO;IAAE,MAAM;IAAU;IAAO;IAAM;;AAGxC,SAAO;SAEH;AACJ,SAAO;;;AAOX,SAAgB,0BAA0B,SAA0D;CAClG,MAAM,QAAQ,QAAQ,MAAM,wBAAwB;AACpD,KAAI,CAAC,MACH,QAAO,EAAE;CAEX,MAAM,SAAkD,EAAE;AAC1D,MAAK,MAAM,QAAQ,MAAM,GAAG,MAAM,KAAK,EAAE;EACvC,MAAM,KAAK,YAAY,KAAK;AAC5B,MAAI,CAAC,GACH;AACF,MAAI,GAAG,OAAO,OACZ,QAAO,OAAO,GAAG;AACnB,MAAI,GAAG,OAAO,cACZ,QAAO,cAAc,GAAG;;AAE5B,QAAO;;AAST,MAAM,kBAAkB;CAAC;CAAW;CAAc;CAAS;AAK3D,eAAsB,eACpB,QACA,YACwD;AACxD,KAAI,OAAO,SAAS,QAClB,QAAO,iBAAiB,OAAO;AACjC,KAAI,OAAO,SAAS,SAClB,QAAO,kBAAkB,QAAQ,WAAW;AAC9C,KAAI,OAAO,SAAS,SAClB,QAAO,kBAAkB,QAAQ,WAAW;AAC9C,QAAO,EAAE,QAAQ,EAAE,EAAE;;AAKvB,SAAS,iBAAiB,QAAmD;CAC3E,MAAM,OAAO,OAAO;AACpB,KAAI,CAAC,WAAW,KAAK,CACnB,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,SAAwB,EAAE;CAGhC,MAAM,YAAY,QAAQ,MAAM,SAAS;AACzC,KAAI,WAAW,UAAU,CACvB,MAAK,MAAM,SAAS,YAAY,WAAW,EAAE,eAAe,MAAM,CAAC,EAAE;AACnE,MAAI,CAAC,MAAM,aAAa,CACtB;EACF,MAAM,QAAQ,eAAe,QAAQ,WAAW,MAAM,KAAK,EAAE,UAAU,MAAM,OAAO;AACpF,MAAI,MACF,QAAO,KAAK,MAAM;;AAKxB,KAAI,OAAO,WAAW,GAAG;EACvB,MAAM,QAAQ,eAAe,MAAM,GAAG;AACtC,MAAI,MACF,QAAO,KAAK,MAAM;;AAGtB,QAAO,EAAE,QAAQ;;AAGnB,SAAS,eAAe,KAAa,UAAsC;CACzE,MAAM,cAAc,QAAQ,KAAK,WAAW;AAC5C,KAAI,CAAC,WAAW,YAAY,CAC1B,QAAO;CAET,MAAM,UAAU,aAAa,aAAa,QAAQ;CAClD,MAAM,cAAc,0BAA0B,QAAQ;CACtD,MAAM,UAAU,IAAI,MAAM,IAAI,CAAC,KAAK;CACpC,MAAM,OAAO,YAAY,QAAQ;CAEjC,MAAM,QAAkD,EAAE;AAC1D,MAAK,MAAM,UAAU,iBAAiB;EACpC,MAAM,aAAa,QAAQ,KAAK,OAAO;AACvC,MAAI,CAAC,WAAW,WAAW,CACzB;AACF,OAAK,MAAM,QAAQ,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,EAAE;AACnE,OAAI,CAAC,KAAK,QAAQ,CAChB;AACF,SAAM,KAAK;IACT,MAAM,GAAG,OAAO,GAAG,KAAK;IACxB,SAAS,aAAa,QAAQ,YAAY,KAAK,KAAK,EAAE,QAAA;IACvD,CAAC;;;AAIN,QAAO;EACL;EACA,aAAa,YAAY,eAAe;EACxC,MAAM;EACN;EACA;EACD;;AAKH,eAAe,kBACb,QACA,YACwD;CACxD,MAAM,EAAE,OAAO,SAAS;AACxB,KAAI,CAAC,SAAS,CAAC,KACb,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,MAAM,OAAO,OAAO;AAC1B,cAAa,oBAAoB,MAAM,GAAG,KAAK,GAAG,MAAM;CAExD,MAAM,OAAO,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,SAAS,MACjD,CAAC,YAAY,KAAK;AAEnB,KAAI,CAAC,MAAM,OAAO,QAAQ;AAExB,MAAI,QAAQ,QAAQ;GAClB,MAAM,WAAW,MAAM,OACrB,yBAAyB,MAAM,GAAG,KAAK,eACxC,CAAC,YAAY,KAAK;AACnB,OAAI,UAAU,OAAO,OACnB,QAAO,oBAAoB,OAAQ,MAAO,UAAU,UAAU,OAAO,WAAW,WAAW;;AAE/F,SAAO,EAAE,QAAQ,EAAE,EAAE;;AAGvB,QAAO,oBAAoB,OAAQ,MAAO,KAAK,MAAM,OAAO,WAAW,WAAW;;AAGpF,eAAe,oBACb,OACA,MACA,KACA,MACA,WACA,YACwD;CACxD,MAAM,WAAW,KAAK,MAAM,KAAI,MAAK,EAAE,KAAK;CAC5C,MAAM,YAAY,KAAK,MAAM;CAG7B,IAAI;AAEJ,KAAI,WAAW;EAEb,MAAM,aAAa,CACjB,GAAG,UAAU,YAEb,UAAU,SAAS,YAAY,GAAG,YAAY,KAC/C,CAAC,OAAO,QAAQ;AAEjB,iBAAe,SAAS,QAAO,MAAK,WAAW,SAAS,EAAE,CAAC;OAI3D,gBAAe,SAAS,QAAO,MAC7B,EAAE,MAAM,6BAA6B,IAAI,MAAM,WAChD;AAGH,KAAI,aAAa,WAAW,EAC1B,QAAO;EAAE,QAAQ,EAAE;EAAE;EAAW;CAElC,MAAM,QAAQ,OAAO,EAAE;CACvB,MAAM,SAAwB,EAAE;AAEhC,cAAa,SAAS,aAAa,OAAO,2BAA2B;AAErE,OAAM,QAAQ,IAAI,aAAa,KAAI,WAAU,MAAM,YAAY;EAC7D,MAAM,WAAW,WAAW,aAAa,KAAK,OAAO,QAAQ,gBAAgB,GAAG;EAChF,MAAM,UAAU,MAAM,eAAe,OAAO,MAAM,KAAK,OAAO;AAC9D,MAAI,CAAC,QACH;EAEF,MAAM,cAAc,0BAA0B,QAAQ;EACtD,MAAM,UAAU,WAAW,SAAS,MAAM,IAAI,CAAC,KAAK,GAAI;EACxD,MAAM,OAAO,YAAY,QAAQ;EAGjC,MAAM,kBAA4D,EAAE;EACpE,MAAM,SAAS,WAAW,GAAG,SAAS,KAAK;AAE3C,OAAK,MAAM,UAAU,iBAAiB;GACpC,MAAM,eAAe,GAAG,SAAS,OAAO;GACxC,MAAM,WAAW,SAAS,QAAO,MAAK,EAAE,WAAW,aAAa,CAAC;AACjE,QAAK,MAAM,YAAY,UAAU;IAC/B,MAAM,cAAc,MAAM,eAAe,OAAO,MAAM,KAAK,SAAS;AACpE,QAAI,aAAa;KACf,MAAM,eAAe,SAAS,MAAM,OAAO,OAAO;AAClD,qBAAgB,KAAK;MAAE,MAAM;MAAc,SAAS;MAAa,CAAC;;;;AAKxE,SAAO,KAAK;GACV;GACA,aAAa,YAAY,eAAe;GACxC,MAAM;GACN;GACA,OAAO;GACR,CAAC;GACF,CAAC,CAAC;AAEJ,QAAO;EAAE;EAAQ;EAAW;;AAG9B,eAAe,eAAe,OAAe,MAAc,KAAa,MAAsC;AAC5G,QAAO,OACL,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI,GAAG,QAC7D,EAAE,cAAc,QAAQ,CACzB,CAAC,YAAY,KAAK;;AAarB,eAAe,kBACb,QACA,YACwD;CACxD,MAAM,EAAE,OAAO,SAAS;AACxB,KAAI,CAAC,SAAS,CAAC,KACb,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,MAAM,OAAO,OAAO;CAC1B,MAAM,YAAY,mBAAmB,GAAG,MAAM,GAAG,OAAO;AAExD,cAAa,oBAAoB,MAAM,GAAG,KAAK,GAAG,MAAM;CAExD,MAAM,OAAO,MAAM,OACjB,sCAAsC,UAAU,uBAAuB,IAAI,8BAC5E,CAAC,YAAY,KAAK;AAEnB,KAAI,CAAC,MAAM,OACT,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,WAAW,KAAK,QAAO,MAAK,EAAE,SAAS,OAAO,CAAC,KAAI,MAAK,EAAE,KAAK;CAGrE,MAAM,eAAe,OAAO,YACxB,SAAS,QAAO,MAAK,MAAM,GAAG,OAAO,UAAU,WAAW,GAC1D,SAAS,QAAO,MAAK,EAAE,MAAM,6BAA6B,IAAI,MAAM,WAAW;AAEnF,KAAI,aAAa,WAAW,EAC1B,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,QAAQ,OAAO,EAAE;CACvB,MAAM,SAAwB,EAAE;AAEhC,cAAa,SAAS,aAAa,OAAO,2BAA2B;AAErE,OAAM,QAAQ,IAAI,aAAa,KAAI,WAAU,MAAM,YAAY;EAC7D,MAAM,WAAW,WAAW,aAAa,KAAK,OAAO,QAAQ,gBAAgB,GAAG;EAChF,MAAM,UAAU,MAAM,eAAe,OAAQ,MAAO,KAAK,OAAO;AAChE,MAAI,CAAC,QACH;EAEF,MAAM,cAAc,0BAA0B,QAAQ;EACtD,MAAM,UAAU,WAAW,SAAS,MAAM,IAAI,CAAC,KAAK,GAAI;EACxD,MAAM,OAAO,YAAY,QAAQ;EAGjC,MAAM,kBAA4D,EAAE;EACpE,MAAM,SAAS,WAAW,GAAG,SAAS,KAAK;AAE3C,OAAK,MAAM,UAAU,iBAAiB;GACpC,MAAM,eAAe,GAAG,SAAS,OAAO;GACxC,MAAM,WAAW,SAAS,QAAO,MAAK,EAAE,WAAW,aAAa,CAAC;AACjE,QAAK,MAAM,YAAY,UAAU;IAC/B,MAAM,cAAc,MAAM,eAAe,OAAQ,MAAO,KAAK,SAAS;AACtE,QAAI,aAAa;KACf,MAAM,eAAe,SAAS,MAAM,OAAO,OAAO;AAClD,qBAAgB,KAAK;MAAE,MAAM;MAAc,SAAS;MAAa,CAAC;;;;AAKxE,SAAO,KAAK;GACV;GACA,aAAa,YAAY,eAAe;GACxC,MAAM;GACN;GACA,OAAO;GACR,CAAC;GACF,CAAC,CAAC;AAEJ,QAAO,EAAE,QAAQ;;AAGnB,eAAe,eAAe,OAAe,MAAc,KAAa,MAAsC;AAC5G,QAAO,OACL,sBAAsB,MAAM,GAAG,KAAK,SAAS,IAAI,GAAG,QACpD,EAAE,cAAc,QAAQ,CACzB,CAAC,YAAY,KAAK;;ACjarB,MAAa,eAAe;AAG5B,MAAa,oBAAoB,MAAc,IAAI,KAAK,IAAI;AAyB5D,eAAe,eAAe,OAAe,MAAc,KAAgC;AAIzF,SAHa,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,SAAS,MACjD,CAAC,YAAY,KAAK,GACN,OAAO,KAAI,MAAK,EAAE,KAAK,IAAI,EAAE;;AAc5C,eAAe,WAAW,OAAe,MAAc,SAAiB,aAAsB,YAAgD;CAC5I,MAAM,aAAa,CAAC,IAAI,WAAW,QAAQ;AAC3C,KAAI,YACF,YAAW,KAAK,GAAG,YAAY,GAAG,UAAU;AAE9C,MAAK,MAAM,OAAO,YAAY;EAC5B,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,IAAI;AACpD,MAAI,MAAM,SAAS,EACjB,QAAO;GAAE,KAAK;GAAK;GAAO;;AAI9B,KAAI,aAAa;EACf,MAAM,YAAY,MAAM,qBAAqB,OAAO,MAAM,YAAY;AACtE,MAAI,WAAW;GACb,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,UAAU;AAC1D,OAAI,MAAM,SAAS,EACjB,QAAO;IAAE,KAAK;IAAW;IAAO;;;CAKtC,MAAM,WAAW,aACb,CAAC,YAAY,GAAG,CAAC,QAAQ,SAAS,CAAC,QAAO,MAAK,MAAM,WAAW,CAAC,GACjE,CAAC,QAAQ,SAAS;AACtB,MAAK,MAAM,UAAU,UAAU;EAC7B,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,OAAO;AACvD,MAAI,MAAM,SAAS,EACjB,QAAO;GAAE,KAAK;GAAQ;GAAO,UAAU;GAAM;;AAGjD,QAAO;;AAOT,eAAe,qBAAqB,OAAe,MAAc,aAA6C;CAC5G,MAAM,OAAO,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,WACxC,CAAC,YAAY,KAAK;CACnB,MAAM,SAAS,GAAG,YAAY;AAC9B,QAAO,MAAM,UAAU,MAAK,MAAK,EAAE,IAAI,WAAW,OAAO,CAAC,EAAE,OAAO;;AAMrE,SAAS,eAAe,OAAiB,YAA8B;AACrE,QAAO,MAAM,QAAO,MAAK,EAAE,WAAW,WAAW,IAAI,gBAAgB,KAAK,EAAE,CAAC;;AAI/E,MAAM,iBAAiB;CACrB;CACA;CACA;CACA;CACD;AAGD,MAAM,eAAe,IAAI,IAAI;CAC3B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAM,gBAAgB,IAAI,IAAI;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAWF,SAAS,eAAe,MAAuB;AAE7C,QADc,KAAK,MAAM,IAAI,CAChB,MAAK,MAAK,aAAa,IAAI,EAAE,aAAa,CAAC,CAAC;;AAM3D,SAAS,aAAa,MAAsB;AAC1C,QAAO,KAAK,MAAM,IAAI,CAAC,OAAO,QAAQ,CAAC;;AAMzC,SAAS,eAAe,MAAuB;AAE7C,QADc,KAAK,MAAM,IAAI,CAChB,MAAK,MAAK,cAAc,IAAI,EAAE,aAAa,CAAC,CAAC;;AAO5D,SAAS,YAAY,KAAa,WAA2B;CAC3D,MAAM,QAAQ,aAAa,IAAI,IAAI;AAEnC,QAAQ,aADU,eAAe,IAAI,GAAG,MAAM,KACb;;AAQnC,SAAS,iBAAiB,UAA2C;CACnE,MAAM,UAAU,SACb,QAAO,MAAK,gBAAgB,KAAK,EAAE,CAAC,CACpC,QAAO,MAAK,CAAC,eAAe,MAAK,MAAK,EAAE,KAAK,EAAE,CAAC,CAAC,CACjD,QAAO,MAAK,EAAE,SAAS,IAAI,CAAC;CAG/B,MAAM,6BAAa,IAAI,KAAuB;AAE9C,MAAK,MAAM,QAAQ,SAAS;EAC1B,MAAM,UAAU,KAAK,YAAY,SAAS;AAC1C,MAAI,YAAY,GACd;EAEF,MAAM,SAAS,KAAK,MAAM,GAAG,UAAU,EAAgB;EACvD,MAAM,QAAQ,WAAW,IAAI,OAAO,IAAI,EAAE;AAC1C,QAAM,KAAK,KAAK;AAChB,aAAW,IAAI,QAAQ,MAAM;;AAG/B,KAAI,WAAW,OAAO,GAAG;EACvB,MAAM,UAAU,CAAC,GAAG,WAAW,SAAS,CAAC,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS,EAAE,GAAG,OAAO,CAAC;AACpF,MAAI,QAAQ,GAAG,UAAU,GAAG;GAC1B,MAAM,aAAa,QAAQ;GAC3B,MAAM,UAAU,WAAW,YAAY,QAAQ;GAC/C,MAAM,cAAc,UAAU,IAAI,WAAW,MAAM,GAAG,QAAQ,GAAG;AACjE,UAAO;IAAE,OAAO,QAAQ;IAAI,QAAQ;IAAa;;;CAKrD,MAAM,4BAAY,IAAI,KAAuB;AAE7C,MAAK,MAAM,QAAQ,SAAS;AAC1B,MAAI,eAAe,KAAK,CACtB;EAGF,MAAM,YAAY,KAAK,YAAY,IAAI;AACvC,MAAI,cAAc,GAChB;EAEF,MAAM,MAAM,KAAK,MAAM,GAAG,YAAY,EAAE;EACxC,MAAM,QAAQ,UAAU,IAAI,IAAI,IAAI,EAAE;AACtC,QAAM,KAAK,KAAK;AAChB,YAAU,IAAI,KAAK,MAAM;;AAG3B,KAAI,UAAU,SAAS,EACrB,QAAO;CAGT,MAAM,SAAS,CAAC,GAAG,UAAU,SAAS,CAAC,CACpC,KAAK,CAAC,KAAK,YAAY;EAAE;EAAK;EAAO,OAAO,YAAY,KAAK,MAAM,OAAA;EAAS,EAAE,CAC9E,QAAO,MAAK,EAAE,MAAM,UAAU,EAAE,CAChC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAEpC,KAAI,OAAO,WAAW,EACpB,QAAO;CAET,MAAM,OAAO,OAAO;AAKpB,QAAO;EAAE,OAAO,KAAK;EAAO,QAAQ,KAAK;EAAK;;AAMhD,eAAe,cAAc,OAAe,MAAc,KAAa,aAAa,SAA4B;AAE9G,QAAO,eADO,MAAM,eAAe,OAAO,MAAM,IAAI,EACvB,WAAW;;AAO1C,eAAsB,aAAa,OAAe,MAAc,SAAiB,aAAsB,SAAiD;CACtJ,MAAM,WAAW,cAAc,eAAe,YAAY,GAAG,KAAA;AAC7D,KAAI,UAAU;EACZ,MAAM,MAAM,SAAS,OAAO;EAC5B,MAAM,WAAW,CAAC,SAAS;EAC3B,MAAM,QAAQ,MAAM,cAAc,SAAS,OAAO,SAAS,MAAM,KAAK,GAAG,SAAS,KAAK,GAAG;AAC1F,MAAI,MAAM,WAAW,EACnB,QAAO;AACT,SAAO;GACL,SAAS,qCAAqC,SAAS,MAAM,GAAG,SAAS,KAAK,GAAG;GACjF;GACA;GACA;GACD;;CAIH,MAAM,MAAM,MAAM,WAAW,OAAO,MAAM,SAAS,aADhC,UAAU,kBAAkB,QAAQ,GAAG,KAAA,EACiB;AAC3E,KAAI,CAAC,IACH,QAAO;CAET,IAAI,OAAO,eAAe,IAAI,OAAO,QAAQ;CAC7C,IAAI;CACJ,IAAI;AAGJ,KAAI,KAAK,WAAW,GAAG;EACrB,MAAM,aAAa,iBAAiB,IAAI,MAAM;AAC9C,MAAI,YAAY;AACd,UAAO,WAAW;AAClB,gBAAa,WAAW,UAAU,KAAA;AAClC,cAAW,IAAI;;;AAInB,KAAI,KAAK,WAAW,EAClB,QAAO;AAET,QAAO;EACL,SAAS,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI;EACnE,KAAK,IAAI;EACT,OAAO;EACP;EACA;EACA,UAAU,IAAI;EACf;;AAMH,SAAS,cAAc,GAAmB;AACxC,QAAO,EAAE,QAAQ,OAAO,GAAG,CAAC,QAAQ,iBAAiB,GAAG;;AAS1D,SAAgB,wBACd,WACA,WAC0C;AAC1C,KAAI,UAAU,WAAW,EACvB,QAAO;EAAE,SAAS;EAAM,YAAY;EAAG;CAGzC,MAAM,SAAS,UAAU,MAAM,GAAG,GAAG;CAGrC,MAAM,kBAAkB,OAAO,KAAK,SAAS;EAC3C,IAAI,OAAO,KAAK;AAEhB,MAAI,KAAK,WAAW,OAAO,CACzB,KAAI;AACF,UAAO,IAAI,IAAI,KAAK,CAAC;UAEjB;AAER,SAAO,cAAc,KAAK;GAC1B;CAGF,MAAM,iBAAiB,IAAI,IAAI,UAAU,IAAI,cAAc,CAAC;CAE5D,IAAI,UAAU;AACd,MAAK,MAAM,YAAY,gBAErB,MAAK,MAAM,YAAY,eACrB,KAAI,aAAa,YAAY,SAAS,SAAS,IAAI,WAAW,EAAE;AAC9D;AACA;;CAKN,MAAM,aAAa,UAAU,OAAO;AACpC,QAAO;EAAE,SAAS,cAAc;EAAK;EAAY;;AAOnD,eAAe,cAAc,OAAe,MAAc,aAAuC;CAC/F,MAAM,OAAO,qCAAqC,MAAM,GAAG,KAAK;CAEhE,MAAM,QAAQ;EACZ;EACA,YAHgB,YAAY,QAAQ,UAAU,GAAG,CAG3B;EACtB,YAAY,YAAY,QAAQ,MAAM,GAAG,CAAC,QAAQ,KAAK,IAAI,CAAC;EAC7D;AACD,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,OAAO,MAAM,UAAU,GAAG,KAAK,GAAG,OAAO;AAC/C,MAAI,CAAC,KACH;AACF,MAAI;AAEF,OADY,KAAK,MAAM,KAAK,CACpB,SAAS,YACf,QAAO;UAEL;;AAER,QAAO;;AAGT,eAAsB,iBAAiB,aAA6C;CAElF,MAAM,YAAY,YAAY,QAAQ,UAAU,GAAG;AACnD,MAAK,MAAM,aAAa,CAAC,YAAY,QAAQ,MAAM,GAAG,CAAC,QAAQ,KAAK,IAAI,EAAE,UAAU,EAAE;AAEpF,MAAI,CAAC,UAAU,SAAS,IAAI,EAAE;AAG5B,QADgB,MAAM,OAAO,IAAI,yBAAyB,UAAU,GAAG,YAAY,CAAC,YAAY,KAAK,GACxF,GACX,QAAO,sBAAsB,UAAU,GAAG;AAC5C;;AAGF,OADgB,MAAM,OAAO,IAAI,yBAAyB,YAAY,CAAC,YAAY,KAAK,GAC3E,GACX,QAAO,sBAAsB;;CAIjC,MAAM,aAAa,YAAY,QAAQ,MAAM,GAAG;AAChD,KAAI,eAAe,CACjB,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GAAC;GAAU;GAAS;GAAY;GAAU;GAAY;GAAW;GAAI,EAAE;GAC9G,UAAU;GACV,SAAS;GACV,CAAC;AACF,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,YAAY;EAC9B,MAAM,QAAQ,KAAK,MAAM,KAAK;EAE9B,MAAM,QAAQ,MAAM,MAAK,MACvB,EAAE,SAAS,aAAa,CAAC,SAAS,IAAI,YAAY,aAAa,GAAG,IAC/D,EAAE,SAAS,aAAa,CAAC,SAAS,IAAI,UAAU,aAAa,GAAG,CACpE;AACD,MAAI,MACF,QAAO,sBAAsB,MAAM;AAErC,OAAK,MAAM,aAAa,OAAO;GAC7B,MAAM,KAAK,eAAe,sBAAsB,UAAU,WAAW;AACrE,OAAI,MAAM,MAAM,cAAc,GAAG,OAAO,GAAG,MAAM,YAAY,CAC3D,QAAO,sBAAsB,UAAU;;SAGvC;CAOR,MAAM,OAAO,MAAM,OACjB,gDAFY,mBAAmB,GAAG,WAAW,UAAU,CAED,aACvD,CAAC,YAAY,KAAK;AACnB,KAAI,CAAC,MAAM,OAAO,OAChB,QAAO;CAGT,MAAM,QAAQ,KAAK,MAAM,MAAK,MAC5B,EAAE,UAAU,aAAa,CAAC,SAAS,IAAI,YAAY,aAAa,GAAG,IAChE,EAAE,UAAU,aAAa,CAAC,SAAS,IAAI,UAAU,aAAa,GAAG,CACrE;AACD,KAAI,MACF,QAAO,sBAAsB,MAAM;AAGrC,MAAK,MAAM,aAAa,KAAK,OAAO;EAClC,MAAM,KAAK,eAAe,sBAAsB,UAAU,YAAY;AACtE,MAAI,MAAM,MAAM,cAAc,GAAG,OAAO,GAAG,MAAM,YAAY,CAC3D,QAAO,sBAAsB,UAAU;;AAG3C,QAAO;;AAOT,eAAsB,oBAAoB,OAAe,MAAc,aAA6D;CAClI,MAAM,WAAW,cAAc,eAAe,YAAY,GAAG,KAAA;AAC7D,KAAI,UAAU,SACZ,QAAO,EAAE,UAAU,SAAS,UAAU;AAGxC,KAAI,eAAe,CACjB,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GAAC;GAAO,SAAS,MAAM,GAAG;GAAQ;GAAM;GAAa,EAAE;GAC9F,UAAU;GACV,SAAS;GACV,CAAC;AACF,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,YAAY;EAC9B,MAAM,OAAO,KAAK,MAAM,KAAK;AAC7B,SAAO,MAAM,WAAW,EAAE,UAAU,KAAK,UAAU,GAAG;SAElD;CAKR,MAAM,OAAO,MAAM,OACjB,gCAAgC,MAAM,GAAG,OAC1C,CAAC,YAAY,KAAK;AACnB,QAAO,MAAM,WAAW,EAAE,UAAU,KAAK,UAAU,GAAG;;AAMxD,eAAsB,YAAY,OAAe,MAAc,QAAiB,KAAsC;CAIpH,MAAM,UAAU,SACZ,yBAAyB,MAAM,GAAG,KAAK,SAJ5B,OAAO,OAIqC,GAAG,OAAO,cACjE,yBAAyB,MAAM,GAAG,KAAK,SAAS,MAAM,QAAQ,QAAQ;AAI1E,MAFgB,MAAM,OAAO,IAAI,QAAQ,CAAC,YAAY,KAAK,GAE9C,GACX,QAAO,UAAU,MAAM,GAAG,OAAO,SAAS,IAAI,WAAW,KAAK,MAAM,IAAI,QAAQ;CAKlF,MAAM,WAAW,SAAS,GAAG,OAAO,KAAK;CACzC,MAAM,WAAW,MAAM,CAAC,IAAI,GAAG,CAAC,QAAQ,SAAS;AACjD,MAAK,MAAM,KAAK,SACd,MAAK,MAAM,YAAY;EAAC;EAAa;EAAa;EAAY,EAAE;EAC9D,MAAM,YAAY,qCAAqC,MAAM,GAAG,KAAK,GAAG,EAAE,GAAG,WAAW;AAExF,OADY,MAAM,OAAO,IAAI,UAAU,CAAC,YAAY,KAAK,GAChD,GACP,QAAO;;AAIb,QAAO;;AAkFT,eAAsB,mBAAmB,KAAqC;AAE5E,KAAI,IAAI,WAAW,UAAU,EAAE;EAC7B,MAAM,WAAW,cAAc,IAAI;AACnC,MAAI,CAACA,WAAa,SAAS,CACzB,QAAO;AACT,SAAOC,aAAe,UAAU,QAAQ;;AAG1C,KAAI,IAAI,WAAW,UAAU,EAAE;EAC7B,IAAI,OAAO,IAAI,QAAQ,WAAW,GAAG;EACrC,IAAI,MAAM;EAGV,MAAM,QAAQ,KAAK,YAAY,IAAI;AACnC,MAAI,UAAU,IAAI;AAChB,SAAM,KAAK,MAAM,QAAQ,EAAE;AAC3B,UAAO,KAAK,MAAM,GAAG,MAAM;;EAG7B,MAAM,QAAQ,KAAK,MAAM,IAAI;EAC7B,MAAM,QAAQ,MAAM;EACpB,MAAM,OAAO,MAAM;EACnB,MAAM,SAAS,MAAM,MAAM,EAAE,CAAC,KAAK,IAAI;EAMvC,MAAM,OAAO,MAAM,OAJH,SACZ,yBAAyB,MAAM,GAAG,KAAK,SAAS,IAAI,GAAG,OAAO,cAC9D,yBAAyB,MAAM,GAAG,KAAK,cAAc,OAEtB,EAAE,cAAc,QAAQ,CAAC,CAAC,YAAY,KAAK;AAC9E,MAAI,CAAC,KACH,QAAO;AAET,MAAI;GACF,MAAM,OAAO,KAAK,MAAM,KAAK;AAC7B,UAAO,KAAK,YAAY,KAAK,MAAM,YAAY;UAE3C;AACJ,UAAO;;;AAIX,QAAO,UAAU,IAAI;;AC9oBvB,eAAsB,aAAa,SAAyC;CAE1E,MAAM,UAAU,GADD,IAAI,IAAI,QAAQ,CAAC,OACN;AAC1B,KAAI,MAAM,UAAU,QAAQ,CAC1B,QAAO;AACT,QAAO;;AAMT,eAAsB,aAAa,KAA0C;CAC3E,MAAM,UAAU,MAAM,UAAU,IAAI;AACpC,KAAI,CAAC,WAAW,QAAQ,SAAS,GAC/B,QAAO;AAET,QAAO;EACL,KAAK;EACL,OAAO,mBAAmB,QAAA;EAC3B;;AAMH,SAAgB,mBAAmB,SAA6B;CAC9D,MAAM,QAAoB,EAAE;CAC5B,MAAM,uBAAO,IAAI,KAAa;CAC9B,MAAM,YAAY;AAClB,MAAK,IAAI,QAAQ,UAAU,KAAK,QAAQ,EAAE,UAAU,MAAM,QAAQ,UAAU,KAAK,QAAQ,EAAE;EACzF,MAAM,MAAM,MAAM;AAClB,MAAI,CAAC,KAAK,IAAI,IAAI,EAAE;AAClB,QAAK,IAAI,IAAI;AACb,SAAM,KAAK;IAAE,OAAO,MAAM;IAAK;IAAK,CAAC;;;AAIzC,QAAO;;AAOT,SAAS,UAAU,KAAsB;AACvC,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,MAAI,OAAO,aAAa,SACtB,QAAO;EACT,MAAM,OAAO,OAAO;AAEpB,MAAI,SAAS,eAAe,SAAS,eAAe,SAAS,MAC3D,QAAO;AACT,MAAI,SAAS,kBACX,QAAO;AACT,MAAI,mDAAmD,KAAK,KAAK,CAC/D,QAAO;AACT,MAAI,KAAK,WAAW,IAAI,CACtB,QAAO;AACT,SAAO;SAEH;AAAE,SAAO;;;AAGjB,eAAsB,iBACpB,aACA,SACA,YACuB;CACvB,MAAM,QAAQ,OAAO,EAAE;CACvB,IAAI,YAAY;AAoBhB,SAlBgB,MAAM,QAAQ,IAC5B,YAAY,MAAM,KAAI,SAAQ,MAAM,YAAY;EAC9C,MAAM,MAAM,KAAK,IAAI,WAAW,OAAO,GACnC,KAAK,MACL,GAAG,QAAQ,QAAQ,OAAO,GAAG,GAAG,KAAK,IAAI,WAAW,IAAI,GAAG,KAAK,MAAM,KAAK;AAE/E,MAAI,CAAC,UAAU,IAAI,CACjB,QAAO;AAET,eAAa,KAAK,KAAK,aAAa,YAAY,MAAM,OAAO;EAE7D,MAAM,UAAU,MAAM,UAAU,IAAI;AACpC,MAAI,WAAW,QAAQ,SAAS,IAC9B,QAAO;GAAE,KAAK,KAAK;GAAK,OAAO,KAAK;GAAO;GAAS;AACtD,SAAO;GACP,CAAC,CACJ,EAEc,QAAQ,MAAuB,MAAM,KAAK;;AAO3D,SAAgB,mBAAmB,SAAiB,SAA0B;CAC5E,IAAI,aAAa;AAGjB,KAAI,SAAS;EAEX,MAAM,UADO,QAAQ,QAAQ,OAAO,GAAG,CAClB,QAAQ,uBAAuB,OAAO;AAC3D,eAAa,WAAW,QACtB,IAAI,OAAO,SAAS,QAAQ,mBAAmB,IAAI,EACnD,cACD;;AAIH,cAAa,WAAW,QAAQ,wBAAwB,eAAe;AAEvE,QAAO;;AAOT,SAAgB,gBAAgB,SAAiB,UAAmC;CAClF,MAAM,WAAqB,EAAE;CAC7B,MAAM,QAAQ,QAAQ,MAAM,UAAU;AAEtC,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,WAAW,KAAK,MAAM,kBAAkB;AAC9C,MAAI,CAAC,SACH;EAEF,MAAM,MAAM,SAAS;AACrB,MAAI,SAAS,MAAK,MAAK,IAAI,SAAS,EAAE,CAAC,EAAE;GACvC,MAAM,eAAe,KAAK,QAAQ,MAAM,KAAK,QAAQ,OAAO,CAAC;AAC7D,OAAI,eAAe,GACjB,UAAS,KAAK,KAAK,MAAM,eAAe,EAAE,CAAC;;;AAKjD,KAAI,SAAS,WAAW,EACtB,QAAO;AACT,QAAO,SAAS,KAAK,cAAc;;ACnIrC,eAAsB,kBAAkB,OAAe,OAAO,GAA4E;CACxI,MAAM,OAAO,MAAM,OAEhB,+CAA+C,mBAAmB,MAAM,CAAC,QAAQ,OAAO,CAAC,YAAY,KAAK;AAE7G,KAAI,CAAC,MAAM,SAAS,OAClB,QAAO,EAAE;AAEX,QAAO,KAAK,QAAQ,KAAI,OAAM;EAC5B,MAAM,EAAE,QAAQ;EAChB,aAAa,EAAE,QAAQ;EACvB,SAAS,EAAE,QAAQ;EACpB,EAAE;;AAML,eAAsB,gBAAgB,aAAqD;CAEzF,MAAM,OAAO,MAAM,OAAuB,qBAAqB,YAAY,eAAe,CAAC,YAAY,KAAK;AAC5G,KAAI,KACF,QAAO;AAGT,QAAO,OAAuB,8BAA8B,YAAY,SAAS,CAAC,YAAY,KAAK;;AAgBrG,eAAsB,qBAAqB,aAAqB,SAA2C;CACzG,MAAM,OAAO,MAAM,OAGhB,8BAA8B,cAAc,CAAC,YAAY,KAAK;AAEjE,KAAI,CAAC,KACH,QAAO,EAAE;CAGX,MAAM,WAAoD,KAAK,eAC3D,OAAO,YACL,OAAO,QAAQ,KAAK,aAAa,CAAC,KAAK,CAAC,KAAK,SAAS,CACpD,KACA;EAAE,SAAS;EAAK,YAAY,KAAK,OAAO;EAAM,CAC/C,CAAC,CACH,GACD,KAAA;AAEJ,QAAO;EACL,YAAY,KAAK,OAAO,YAAY,KAAA;EACpC;EACD;;AAkBH,eAAe,cACb,IACA,eACA,KACA,QACA,UACA,YACA,MAC+B;CAC/B,IAAI;AAGJ,KAAI,eAAe;AACjB,eAAa,cAAc;EAC3B,MAAM,UAAU,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,eAAe,IAAI,MAAM,MAAM,WAAW;AAChG,MAAI,SAAS;AACX,UAAO,aAAa,QAAQ;AAC5B,UAAO,SAAS,QAAQ;AACxB,UAAO,kBAAkB,QAAQ;AACjC,cAAW,QAAQ;AACnB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK,QAAQ;IACb,QAAQ;IACR,SAAS,QAAQ,WACb,SAAS,QAAQ,MAAM,OAAO,WAAW,QAAQ,IAAI,gBAAgB,cAAc,KACnF,SAAS,QAAQ,MAAM,OAAO,WAAW,QAAQ;IACtD,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,GAAG,OAAO,QAAQ,SAAS,cAAc;GAC9C,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,CAAC,OAAO,SAAS;AACnB,eAAa,cAAc;EAC3B,MAAM,WAAW,MAAM,oBAAoB,GAAG,OAAO,GAAG,MAAM,IAAI,KAAK;AACvE,MAAI,UAAU,YAAY,CAAC,iBAAiB,SAAS,SAAS,EAAE;AAC9D,UAAO,UAAU,SAAS;AAC1B,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK,OAAO;IACZ,QAAQ;IACR,SAAS,mBAAmB,SAAS;IACtC,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,OAAO;GACZ,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,cAAa,SAAS;CACtB,MAAM,YAAY,MAAM,YAAY,GAAG,OAAO,GAAG,MAAM,MAAM,QAAQ,OAAO,OAAO;AACnF,KAAI,WAAW;AACb,SAAO,YAAY;AACnB,WAAS,KAAK;GACZ,QAAQ;GACR,KAAK;GACL,QAAQ;GACT,CAAC;OAGF,UAAS,KAAK;EACZ,QAAQ;EACR,KAAK,GAAG,OAAO,QAAQ;EACvB,QAAQ;EACR,SAAS;EACV,CAAC;AAGJ,QAAO;;AAMT,eAAsB,mBAAmB,aAAqB,UAA0B,EAAE,EAAmC;AAE3H,SADe,MAAM,+BAA+B,aAAa,QAAQ,EAC3D;;AAMhB,eAAsB,+BAA+B,aAAqB,UAA0B,EAAE,EAA0B;CAC9H,MAAM,WAA6B,EAAE;CACrC,MAAM,EAAE,eAAe;AAEvB,cAAa,MAAM;CACnB,MAAM,MAAM,MAAM,gBAAgB,YAAY;AAC9C,KAAI,CAAC,KAAK;AACR,WAAS,KAAK;GACZ,QAAQ;GACR,KAAK,8BAA8B,YAAY;GAC/C,QAAQ;GACR,SAAS;GACV,CAAC;AACF,SAAO;GAAE,SAAS;GAAM;GAAU;;AAGpC,UAAS,KAAK;EACZ,QAAQ;EACR,KAAK,8BAA8B,YAAY;EAC/C,QAAQ;EACR,SAAS,SAAS,IAAI,KAAK,GAAG,IAAI;EACnC,CAAC;CAGF,MAAM,eAAe,IAAI,UACrB,MAAM,qBAAqB,aAAa,IAAI,QAAQ,GACpD,EAAE;CAEN,MAAM,SAA0B;EAC9B,MAAM,IAAI;EACV,SAAS,IAAI;EACb,YAAY,aAAa;EACzB,aAAa,IAAI;EACjB,cAAc,IAAI;EAClB,UAAU,aAAa;EACxB;CAGD,IAAI;CAGJ,IAAI;CACJ,IAAI;AACJ,KAAI,OAAO,IAAI,eAAe,YAAY,IAAI,YAAY,KAAK;AAC7D,eAAa,IAAI,WAAW;EAC5B,MAAM,aAAa,iBAAiB,WAAW;AAE/C,MAAI,CAAC,WAAW,SAAS,MAAM,IAAI,WAAW,SAAS,IAAI,IAAI,CAAC,WAAW,SAAS,IAAI,CACtF,QAAO,UAAU,sBAAsB;MAEvC,QAAO,UAAU;AACnB,WAAS,IAAI,WAAW;YAEjB,OAAO,IAAI,eAAe,SACjC,KAAI,IAAI,WAAW,SAAS,MAAM,EAAE;EAElC,MAAM,KAAK,eAAe,IAAI,WAAW;AACzC,MAAI,GACF,QAAO,UAAU,sBAAsB,GAAG,MAAM,GAAG,GAAG;QAErD;EAEH,MAAM,OAAO,IAAI,WAAW,QAAQ,YAAY,GAAG;AACnD,MAAI,KAAK,SAAS,IAAI,IAAI,CAAC,KAAK,SAAS,IAAI,CAC3C,QAAO,UAAU,sBAAsB;;AAK7C,KAAI,IAAI,YAAY,CAAC,gBAAgB,IAAI,SAAS,IAAI,CAAC,iBAAiB,IAAI,SAAS,CACnF,QAAO,UAAU,IAAI;AAIvB,KAAI,OAAO,SAAS,SAAS,aAAa,EAAE;EAC1C,MAAM,KAAK,eAAe,OAAO,QAAQ;AACzC,MAAI,GAEF,mBAAkB,MAAM,cAAc,IADhB,QAAQ,WAAW,IAAI,SACY,KAAK,QAAQ,UAAU,YAAY;GAAE;GAAY;GAAQ,CAAC;YAG9G,CAAC,OAAO,SAAS;AAExB,eAAa,gBAAgB;EAC7B,MAAM,cAAc,MAAM,iBAAiB,IAAI,KAAK;AACpD,MAAI,aAAa;AACf,UAAO,UAAU;AACjB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACR,SAAS,4BAA4B;IACtC,CAAC;GAEF,MAAM,KAAK,eAAe,YAAY;AACtC,OAAI,GAEF,mBAAkB,MAAM,cAAc,IADhB,QAAQ,WAAW,IAAI,SACY,KAAK,QAAQ,UAAU,WAAW;QAI7F,UAAS,KAAK;GACZ,QAAQ;GACR,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,OAAO,SAAS;AAClB,eAAa,WAAW;EACxB,MAAM,UAAU,MAAM,aAAa,OAAO,QAAQ;AAClD,MAAI,SAAS;AACX,UAAO,UAAU;AACjB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACT,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,GAAG,IAAI,IAAI,OAAO,QAAQ,CAAC,OAAO;GACvC,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,OAAO,cAAc,OAAO,WAAW,iBAAiB;EAC1D,MAAM,cAAc,MAAM,aAAa,OAAO,QAAQ;AACtD,MAAI,eAAe,YAAY,MAAM,SAAS,GAAG;GAC/C,MAAM,aAAa,wBAAwB,YAAY,OAAO,gBAAgB;AAC9E,OAAI,CAAC,WAAW,SAAS;AACvB,aAAS,KAAK;KACZ,QAAQ;KACR,KAAK,OAAO;KACZ,QAAQ;KACR,SAAS,kDAAkD,KAAK,MAAM,WAAW,aAAa,IAAI,CAAC;KACpG,CAAC;AACF,WAAO,aAAa,KAAA;AACpB,WAAO,SAAS,KAAA;;;;AAMtB,KAAI,CAAC,OAAO,WAAW,CAAC,OAAO,WAAW,CAAC,OAAO,aAAa,CAAC,OAAO,cAAc,QAAQ,KAAK;AAChG,eAAa,QAAQ;EACrB,MAAM,SAAS,KAAK,QAAQ,KAAK,gBAAgB,YAAY;EAE7D,MAAM,aAAa,WAAW,OAAO,IAAI,YAAY,OAAO,CAAC,MAAK,MAAK,gBAAgB,KAAK,EAAE,CAAC;AAC/F,MAAI,YAAY;GACd,MAAM,aAAa,KAAK,QAAQ,WAAW;AAC3C,UAAO,YAAY,cAAc,WAAW,CAAC;AAC7C,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACR,SAAS;IACV,CAAC;;;AAKN,KAAI,CAAC,OAAO,WAAW,CAAC,OAAO,WAAW,CAAC,OAAO,aAAa,CAAC,OAAO,WACrE,QAAO;EAAE,SAAS;EAAM;EAAU;AAGpC,QAAO;EAAE,SAAS;EAAQ;EAAU;;AAMtC,SAAgB,sBACd,MACA,SACA,KACwB;AAExB,KAAI,QAAQ,WAAW,QAAQ,EAAE;EAE/B,MAAM,gBAAgB,KADL,QAAQ,KAAK,QAAQ,MAAM,EAAE,CAAC,EACV,eAAe;AACpD,MAAI,WAAW,cAAc,EAAE;GAC7B,MAAM,YAAY,KAAK,MAAM,aAAa,eAAe,QAAQ,CAAC;AAClE,UAAO;IACL,MAAM,UAAU,QAAQ;IACxB,SAAS,UAAU,WAAW;IAC/B;;AAEH,SAAO;;AAIT,KAAI,QAAQ,WAAW,OAAO,EAAE;EAC9B,MAAM,YAAY,QAAQ,MAAM,EAAE;EAClC,MAAM,UAAU,UAAU,WAAW,IAAI,GACrC,UAAU,QAAQ,KAAK,EAAE,GACzB,UAAU,QAAQ,IAAI;EAC1B,MAAM,WAAW,UAAU,IAAI,UAAU,MAAM,GAAG,QAAQ,GAAG;AAC7D,SAAO;GAAE,MAAM;GAAU,SAAS,wBAAwB,UAAU,IAAI,IAAI;GAAK;;AAInF,KAAI,QAAQ,WAAW,QAAQ,IAAI,QAAQ,WAAW,OAAO,IAAI,QAAQ,WAAW,OAAO,CACzF,QAAO;CAKT,MAAM,YAAY,wBAAwB,MAAM,IAAI;AACpD,KAAI,UACF,QAAO;EAAE;EAAM,SAAS;EAAW;AAGrC,KAAI,cAAc,KAAK,QAAQ,CAC7B,QAAO;EAAE;EAAM,SAAS,QAAQ,QAAQ,aAAa,GAAA;EAAK;AAI5D,KAAI,QAAQ,WAAW,WAAW,IAAI,QAAQ,WAAW,aAAa,CACpE,QAAO;EAAE;EAAM,SAAS;EAAK;AAE/B,QAAO;;AAOT,SAAgB,wBAAwB,MAAc,KAA4B;AAChF,KAAI;EACF,MAAM,WAAW,gBAAgB,GAAG,KAAK,gBAAgB,EAAE,KAAK,KAAK,CAAC;AAEtE,SADY,KAAK,MAAM,aAAa,UAAU,QAAQ,CAAC,CAC5C,WAAW;SAElB;AAGJ,MAAI;GAEF,IAAI,MAAM,QADI,gBAAgB,MAAM,EAAE,KAAK,KAAK,CAAC,CACzB;AACxB,UAAO,OAAO,SAAS,IAAI,KAAK,gBAAgB;IAC9C,MAAM,UAAU,KAAK,KAAK,eAAe;AACzC,QAAI,WAAW,QAAQ,CAErB,QADY,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC,CAC3C,WAAW;AAExB,UAAM,QAAQ,IAAI;;UAGhB;AACN,SAAO;;;AAOX,eAAsB,sBAAsB,KAAyC;CACnF,MAAM,UAAU,KAAK,KAAK,eAAe;AACzC,KAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,6CAA6C;CAG/D,MAAM,MAAM,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;CACtD,MAAM,OAA+B;EACnC,GAAG,IAAI;EACP,GAAG,IAAI;EACR;CAED,MAAM,UAA6B,EAAE;AAErC,MAAK,MAAM,CAAC,MAAM,YAAY,OAAO,QAAQ,KAAK,EAAE;AAElD,MAAI,KAAK,WAAW,UAAU,IAAI;GAAC;GAAc;GAAU;GAAY;GAAU;GAAO,CAAC,SAAS,KAAK,CACrG;EAGF,MAAM,SAAS,sBAAsB,MAAM,SAAS,IAAI;AACxD,MAAI,OACF,SAAQ,KAAK,OAAO;;AAIxB,QAAO;;AAcT,SAAgB,qBAAqB,WAA4C;CAC/E,MAAM,UAAU,KAAK,WAAW,eAAe;AAC/C,KAAI,CAAC,WAAW,QAAQ,CACtB,QAAO;CAET,MAAM,MAAM,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;CAEtD,IAAI;AACJ,KAAI,IAAI,YAAY,IAClB,WAAU,iBAAiB,IAAI,WAAW,IAAI;UAEvC,OAAO,IAAI,eAAe,SACjC,WAAU,iBAAiB,IAAI,WAAW;AAG5C,QAAO;EACL,MAAM,IAAI;EACV,SAAS,IAAI,WAAW;EACxB,aAAa,IAAI;EACjB;EACA;EACD;;AAMH,eAAsB,wBAAwB,WAAoD;CAChG,MAAM,OAAO,qBAAqB,UAAU;AAC5C,KAAI,CAAC,KACH,QAAO;CAET,MAAM,SAA0B;EAC9B,MAAM,KAAK;EACX,SAAS,KAAK;EACd,aAAa,KAAK;EAClB,SAAS,KAAK;EACf;AAGD,KAAI,KAAK,SAAS,SAAS,aAAa,EAAE;EACxC,MAAM,KAAK,eAAe,KAAK,QAAQ;AACvC,MAAI,IAAI;GAEN,MAAM,UAAU,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,KAAK,SAAS,KAAK,KAAK;AAC9E,OAAI,SAAS;AACX,WAAO,aAAa,QAAQ;AAC5B,WAAO,SAAS,QAAQ;AACxB,WAAO,kBAAkB,QAAQ;;GAInC,MAAM,YAAY,MAAM,YAAY,GAAG,OAAO,GAAG,MAAM,KAAA,GAAW,OAAO,OAAO;AAChF,OAAI,UACF,QAAO,YAAY;;;AAMzB,KAAI,CAAC,OAAO,aAAa,CAAC,OAAO,YAAY;EAC3C,MAAM,aAAa,YAAY,UAAU,CAAC,MAAK,MAAK,gBAAgB,KAAK,EAAE,CAAC;AAC5E,MAAI,WACF,QAAO,YAAY,cAAc,KAAK,WAAW,WAAW,CAAC,CAAC;;AAIlE,KAAI,CAAC,OAAO,aAAa,CAAC,OAAO,WAC/B,QAAO;AAGT,QAAO;;AAUT,eAAsB,aAAa,MAAc,SAAyC;CACxF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,SAAS,KAAK,UAAU,MAAM;AAGpC,KAAI,WAAW,KAAK,QAAQ,eAAe,CAAC,CAC1C,QAAO;CAGT,MAAM,OAAO,MAAM,OACjB,8BAA8B,KAAK,GAAG,UACvC,CAAC,YAAY,KAAK;AACnB,KAAI,CAAC,KACH,QAAO;CACT,MAAM,aAAa,KAAK,MAAM;AAC9B,KAAI,CAAC,WACH,QAAO;CAGT,MAAM,aAAa,MAAM,MAAM,YAAY,EACzC,SAAS,EAAE,cAAc,cAAc,EACxC,CAAC,CAAC,YAAY,KAAK;AAEpB,KAAI,CAAC,YAAY,MAAM,CAAC,WAAW,KACjC,QAAO;AAET,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;CAEtC,MAAM,aAAa,KAAK,UAAU,WAAW;CAC7C,MAAM,aAAa,kBAAkB,WAAW;CAGhD,MAAM,SAAS,WAAW,KAAK,WAAW;AAC1C,OAAM,IAAI,SAAe,KAAK,WAAW;EACvC,MAAM,WAAW,IAAI,SAAS,EAC5B,MAAM,OAAO,WAAW,UAAU;AAChC,cAAW,MAAM,OAAO,SAAS;KAEpC,CAAC;AACF,WAAS,GAAG,gBAAgB;AAC1B,cAAW,KAAK;AAChB,QAAK;IACL;AACF,WAAS,GAAG,SAAS,OAAO;EAE5B,SAAS,OAAO;AACd,UAAO,MAAM,CAAC,MAAM,EAAE,MAAM,YAAY;AACtC,QAAI,MAAM;AACR,cAAS,KAAK;AACd;;AAEF,aAAS,MAAM,aAAa,MAAM,CAAC;KACnC,CAAC,MAAM,OAAO;;AAElB,QAAM;GACN;CAGF,MAAM,EAAE,WAAW,UAAU,OAAO;EAAC;EAAO;EAAY;EAAwB;EAAM;EAAO,EAAE,EAAE,OAAO,UAAU,CAAC;AACnH,KAAI,WAAW,GAAG;AAChB,SAAO,QAAQ;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;AAChD,SAAO,YAAY,EAAE,OAAO,MAAM,CAAC;AACnC,SAAO;;AAGT,YAAW,WAAW;AACtB,QAAO;;AAMT,eAAsB,mBAAmB,aAA6C;AAIpF,SAHa,MAAM,OACjB,qBAAqB,YAAY,eAClC,CAAC,YAAY,KAAK,GACN,WAAW;;AAM1B,SAAgB,yBAAyB,UAAiC;CACxE,MAAM,YAAY,KAAK,UAAU,WAAW;AAC5C,KAAI,CAAC,WAAW,UAAU,CACxB,QAAO;AAIT,QAFgB,aAAa,WAAW,QAAQ,CAC1B,MAAM,6BAA6B,GAC1C,MAAM"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "skilld",
3
3
  "type": "module",
4
- "version": "0.6.1",
4
+ "version": "0.6.2",
5
5
  "description": "Generate AI agent skills from npm package documentation",
6
6
  "author": {
7
7
  "name": "Harlan Wilton",