skilld 0.10.1 → 0.10.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/_chunks/npm.mjs
CHANGED
|
@@ -755,9 +755,29 @@ function truncateBody(body, limit) {
|
|
|
755
755
|
if (lastParagraph > lastSafeEnd * .6) return `${slice.slice(0, lastParagraph)}\n\n...`;
|
|
756
756
|
return `${slice}...`;
|
|
757
757
|
}
|
|
758
|
+
const TITLE_NOISE_RE = /looking .*(developer|engineer|freelanc)|hiring|job post|guide me to (?:complete|finish|build)|help me (?:complete|finish|build)|seeking .* tutorial|recommend.* course/i;
|
|
759
|
+
const MIN_DISCUSSION_SCORE = 3;
|
|
758
760
|
function scoreComment(c) {
|
|
759
761
|
return (c.isMaintainer ? 3 : 1) * (hasCodeBlock(c.body) ? 2 : 1) * (1 + c.reactions);
|
|
760
762
|
}
|
|
763
|
+
function scoreDiscussion(d) {
|
|
764
|
+
if (TITLE_NOISE_RE.test(d.title)) return -1;
|
|
765
|
+
let score = 0;
|
|
766
|
+
if (d.isMaintainer) score += 3;
|
|
767
|
+
if (hasCodeBlock([
|
|
768
|
+
d.body,
|
|
769
|
+
d.answer || "",
|
|
770
|
+
...d.topComments.map((c) => c.body)
|
|
771
|
+
].join("\n"))) score += 3;
|
|
772
|
+
score += Math.min(d.upvoteCount, 5);
|
|
773
|
+
if (d.answer) {
|
|
774
|
+
score += 2;
|
|
775
|
+
if (d.answer.length > 100) score += 1;
|
|
776
|
+
}
|
|
777
|
+
if (d.topComments.some((c) => c.isMaintainer)) score += 2;
|
|
778
|
+
if (d.topComments.some((c) => c.reactions > 0)) score += 1;
|
|
779
|
+
return score;
|
|
780
|
+
}
|
|
761
781
|
async function fetchGitHubDiscussions(owner, repo, limit = 20, releasedAt, fromDate) {
|
|
762
782
|
if (!isGhAvailable()) return [];
|
|
763
783
|
if (!fromDate && releasedAt) {
|
|
@@ -770,7 +790,7 @@ async function fetchGitHubDiscussions(owner, repo, limit = 20, releasedAt, fromD
|
|
|
770
790
|
"api",
|
|
771
791
|
"graphql",
|
|
772
792
|
"-f",
|
|
773
|
-
`query=${`query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { discussions(first: ${Math.min(limit * 3, 80)}, orderBy: {field: CREATED_AT, direction: DESC}) { nodes { number title body category { name } createdAt url upvoteCount comments(first: 10) { totalCount nodes { body author { login } authorAssociation reactions { totalCount } } } answer { body author { login } authorAssociation } author { login } } } } }`}`,
|
|
793
|
+
`query=${`query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { discussions(first: ${Math.min(limit * 3, 80)}, orderBy: {field: CREATED_AT, direction: DESC}) { nodes { number title body category { name } createdAt url upvoteCount comments(first: 10) { totalCount nodes { body author { login } authorAssociation reactions { totalCount } } } answer { body author { login } authorAssociation } author { login } authorAssociation } } } }`}`,
|
|
774
794
|
"-f",
|
|
775
795
|
`owner=${owner}`,
|
|
776
796
|
"-f",
|
|
@@ -819,15 +839,23 @@ async function fetchGitHubDiscussions(owner, repo, limit = 20, releasedAt, fromD
|
|
|
819
839
|
url: d.url,
|
|
820
840
|
upvoteCount: d.upvoteCount || 0,
|
|
821
841
|
comments: d.comments?.totalCount || 0,
|
|
842
|
+
isMaintainer: [
|
|
843
|
+
"OWNER",
|
|
844
|
+
"MEMBER",
|
|
845
|
+
"COLLABORATOR"
|
|
846
|
+
].includes(d.authorAssociation),
|
|
822
847
|
answer,
|
|
823
848
|
topComments: comments
|
|
824
849
|
};
|
|
825
|
-
}).
|
|
826
|
-
|
|
827
|
-
|
|
850
|
+
}).map((d) => ({
|
|
851
|
+
d,
|
|
852
|
+
score: scoreDiscussion(d)
|
|
853
|
+
})).filter(({ score }) => score >= MIN_DISCUSSION_SCORE).sort((a, b) => {
|
|
854
|
+
const aHigh = HIGH_VALUE_CATEGORIES.has(a.d.category.toLowerCase()) ? 1 : 0;
|
|
855
|
+
const bHigh = HIGH_VALUE_CATEGORIES.has(b.d.category.toLowerCase()) ? 1 : 0;
|
|
828
856
|
if (aHigh !== bHigh) return bHigh - aHigh;
|
|
829
|
-
return b.
|
|
830
|
-
}).slice(0, limit);
|
|
857
|
+
return b.score - a.score;
|
|
858
|
+
}).slice(0, limit).map(({ d }) => d);
|
|
831
859
|
} catch {
|
|
832
860
|
return [];
|
|
833
861
|
}
|
package/dist/_chunks/npm.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"npm.mjs","names":["_semverGt","hasCodeBlock","truncateBody","COMMENT_NOISE_RE","fsExistsSync","fsReadFileSync"],"sources":["../../src/sources/github-common.ts","../../src/core/shared.ts","../../src/sources/issues.ts","../../src/sources/utils.ts","../../src/sources/releases.ts","../../src/sources/blog-releases.ts","../../src/sources/discussions.ts","../../src/sources/entries.ts","../../src/sources/git-skills.ts","../../src/sources/llms.ts","../../src/sources/github.ts","../../src/sources/npm.ts"],"sourcesContent":["/**\n * Shared constants and helpers for GitHub source modules (issues, discussions, releases)\n */\n\nexport const BOT_USERS = new Set([\n 'renovate[bot]',\n 'dependabot[bot]',\n 'renovate-bot',\n 'dependabot',\n 'github-actions[bot]',\n])\n\n/** Extract YYYY-MM-DD date from an ISO timestamp */\nexport const isoDate = (iso: string) => iso.split('T')[0]\n\n/** Build YAML frontmatter from a key-value object, auto-quoting strings with special chars */\nexport function buildFrontmatter(fields: Record<string, string | number | boolean | undefined>): string {\n const lines = ['---']\n for (const [k, v] of Object.entries(fields)) {\n if (v !== undefined)\n lines.push(`${k}: ${typeof v === 'string' && /[:\"[\\]]/.test(v) ? `\"${v.replace(/\"/g, '\\\\\"')}\"` : v}`)\n }\n lines.push('---')\n return lines.join('\\n')\n}\n","import { existsSync } from 'node:fs'\nimport { join } from 'pathe'\nimport { gt as _semverGt } from 'semver'\n\n/** Get-or-create for Maps. Polyfill for Map.getOrInsertComputed (not yet in Node.js). */\nexport function mapInsert<K, V>(map: Map<K, V>, key: K, create: () => V): V {\n let val = map.get(key)\n if (val === undefined) {\n val = create()\n map.set(key, val)\n }\n return val\n}\n\n/** Compare two semver strings: returns true if a > b. Handles prereleases. */\nexport function semverGt(a: string, b: string): boolean {\n return _semverGt(a, b, true)\n}\n\nexport const SHARED_SKILLS_DIR = '.skills'\n\n/** Returns the shared skills directory path if `.skills/` exists at project root, else null */\nexport function getSharedSkillsDir(cwd: string = process.cwd()): string | null {\n const dir = join(cwd, SHARED_SKILLS_DIR)\n return existsSync(dir) ? dir : null\n}\n","/**\n * GitHub issues fetching via gh CLI Search API\n * Freshness-weighted scoring, type quotas, comment quality filtering\n * Categorized by labels, noise filtered out, non-technical issues detected\n */\n\nimport { spawnSync } from 'node:child_process'\n\nimport { mapInsert } from '../core/shared.ts'\nimport { BOT_USERS, buildFrontmatter, isoDate } from './github-common.ts'\n\nexport type IssueType = 'bug' | 'question' | 'docs' | 'feature' | 'other'\n\nexport interface IssueComment {\n body: string\n author: string\n reactions: number\n isMaintainer?: boolean\n}\n\nexport interface GitHubIssue {\n number: number\n title: string\n state: string\n labels: string[]\n body: string\n createdAt: string\n url: string\n reactions: number\n comments: number\n type: IssueType\n topComments: IssueComment[]\n /** Freshness-weighted score: reactions * decay(age) */\n score: number\n /** For closed issues: version where fix landed, if detectable */\n resolvedIn?: string\n}\n\nlet _ghAvailable: boolean | undefined\n\n/**\n * Check if gh CLI is installed and authenticated (cached)\n */\nexport function isGhAvailable(): boolean {\n if (_ghAvailable !== undefined)\n return _ghAvailable\n const { status } = spawnSync('gh', ['auth', 'status'], { stdio: 'ignore' })\n return (_ghAvailable = status === 0)\n}\n\n/** Labels that indicate noise — filter these out entirely */\nconst NOISE_LABELS = new Set([\n 'duplicate',\n 'stale',\n 'invalid',\n 'wontfix',\n 'won\\'t fix',\n 'spam',\n 'off-topic',\n 'needs triage',\n 'triage',\n])\n\n/** Labels that indicate feature requests — deprioritize */\nconst FEATURE_LABELS = new Set([\n 'enhancement',\n 'feature',\n 'feature request',\n 'feature-request',\n 'proposal',\n 'rfc',\n 'idea',\n 'suggestion',\n])\n\nconst BUG_LABELS = new Set([\n 'bug',\n 'defect',\n 'regression',\n 'error',\n 'crash',\n 'fix',\n 'confirmed',\n 'verified',\n])\n\nconst QUESTION_LABELS = new Set([\n 'question',\n 'help wanted',\n 'support',\n 'usage',\n 'how-to',\n 'help',\n 'assistance',\n])\n\nconst DOCS_LABELS = new Set([\n 'documentation',\n 'docs',\n 'doc',\n 'typo',\n])\n\n/**\n * Check if a label contains any keyword from a set.\n * Handles emoji-prefixed labels like \":sparkles: feature request\" or \":lady_beetle: bug\".\n */\nfunction labelMatchesAny(label: string, keywords: Set<string>): boolean {\n for (const keyword of keywords) {\n if (label === keyword || label.includes(keyword))\n return true\n }\n return false\n}\n\n/**\n * Classify an issue by its labels into a type useful for skill generation\n */\nexport function classifyIssue(labels: string[]): IssueType {\n const lower = labels.map(l => l.toLowerCase())\n if (lower.some(l => labelMatchesAny(l, BUG_LABELS)))\n return 'bug'\n if (lower.some(l => labelMatchesAny(l, QUESTION_LABELS)))\n return 'question'\n if (lower.some(l => labelMatchesAny(l, DOCS_LABELS)))\n return 'docs'\n if (lower.some(l => labelMatchesAny(l, FEATURE_LABELS)))\n return 'feature'\n return 'other'\n}\n\n/**\n * Check if an issue should be filtered out entirely\n */\nfunction isNoiseIssue(issue: { labels: string[], title: string, body: string }): boolean {\n const lower = issue.labels.map(l => l.toLowerCase())\n if (lower.some(l => labelMatchesAny(l, NOISE_LABELS)))\n return true\n // Tracking/umbrella issues — low signal for skill generation\n if (issue.title.startsWith('☂️') || issue.title.startsWith('[META]') || issue.title.startsWith('[Tracking]'))\n return true\n return false\n}\n\n/** Check if body contains a code block */\nfunction hasCodeBlock(text: string): boolean {\n return /```[\\s\\S]*?```/.test(text) || /`[^`]+`/.test(text)\n}\n\n/**\n * Detect non-technical issues: fan mail, showcases, sentiment.\n * Short body + no code + high reactions = likely non-technical.\n * Note: roadmap/tracking issues are NOT filtered — they get score-boosted instead.\n */\nexport function isNonTechnical(issue: { body: string, title: string, reactions: number }): boolean {\n const body = (issue.body || '').trim()\n // Very short body with no code — probably sentiment/meta\n if (body.length < 200 && !hasCodeBlock(body) && issue.reactions > 50)\n return true\n // Sentiment patterns (love letters, fan mail)\n if (/\\b(?:love|thank|awesome|great work)\\b/i.test(issue.title) && !hasCodeBlock(body))\n return true\n return false\n}\n\n/**\n * Freshness-weighted score: reactions * decay(age_in_years)\n * Steep decay so recent issues dominate over old high-reaction ones.\n * At 0.6: 1yr=0.63x, 2yr=0.45x, 4yr=0.29x, 6yr=0.22x\n */\nexport function freshnessScore(reactions: number, createdAt: string): number {\n const ageMs = Date.now() - new Date(createdAt).getTime()\n const ageYears = ageMs / (365.25 * 24 * 60 * 60 * 1000)\n return reactions * (1 / (1 + ageYears * 0.6))\n}\n\n/**\n * Type quotas — guarantee a mix of issue types.\n * Bugs and questions get priority; feature requests are hard-capped.\n */\nfunction applyTypeQuotas(issues: GitHubIssue[], limit: number): GitHubIssue[] {\n const byType = new Map<IssueType, GitHubIssue[]>()\n for (const issue of issues) {\n mapInsert(byType, issue.type, () => []).push(issue)\n }\n\n // Sort each group by score\n for (const group of byType.values())\n group.sort((a, b) => b.score - a.score)\n\n // Allocate slots: bugs 40%, questions 30%, docs 15%, features 10%, other 5%\n const quotas: [IssueType, number][] = [\n ['bug', Math.ceil(limit * 0.40)],\n ['question', Math.ceil(limit * 0.30)],\n ['docs', Math.ceil(limit * 0.15)],\n ['feature', Math.ceil(limit * 0.10)],\n ['other', Math.ceil(limit * 0.05)],\n ]\n\n const selected: GitHubIssue[] = []\n const used = new Set<number>()\n let remaining = limit\n\n // First pass: fill each type up to its quota\n for (const [type, quota] of quotas) {\n const group = byType.get(type) || []\n const take = Math.min(quota, group.length, remaining)\n for (let i = 0; i < take; i++) {\n selected.push(group[i]!)\n used.add(group[i]!.number)\n remaining--\n }\n }\n\n // Second pass: fill remaining slots from best-scored unused issues (any type except feature)\n if (remaining > 0) {\n const unused = issues\n .filter(i => !used.has(i.number) && i.type !== 'feature')\n .sort((a, b) => b.score - a.score)\n for (const issue of unused) {\n if (remaining <= 0)\n break\n selected.push(issue)\n remaining--\n }\n }\n\n return selected.sort((a, b) => b.score - a.score)\n}\n\n/**\n * Body truncation limit based on reactions — high-reaction issues deserve more space\n */\nfunction bodyLimit(reactions: number): number {\n if (reactions >= 10)\n return 2000\n if (reactions >= 5)\n return 1500\n return 800\n}\n\n/**\n * Smart body truncation — preserves code blocks and error messages.\n * Instead of slicing at a char limit, finds a safe break point.\n */\nfunction truncateBody(body: string, limit: number): string {\n if (body.length <= limit)\n return body\n\n // Find code block boundaries so we don't cut mid-block\n const codeBlockRe = /```[\\s\\S]*?```/g\n let lastSafeEnd = limit\n let match: RegExpExecArray | null\n\n // eslint-disable-next-line no-cond-assign\n while ((match = codeBlockRe.exec(body)) !== null) {\n const blockStart = match.index\n const blockEnd = blockStart + match[0].length\n\n // If the limit falls inside a code block, move limit to after the block\n // (if not too far) or before the block\n if (blockStart < limit && blockEnd > limit) {\n if (blockEnd <= limit + 500) {\n // Block ends reasonably close — include it\n lastSafeEnd = blockEnd\n }\n else {\n // Block is too long — cut before it\n lastSafeEnd = blockStart\n }\n break\n }\n }\n\n // Try to break at a paragraph boundary\n const slice = body.slice(0, lastSafeEnd)\n const lastParagraph = slice.lastIndexOf('\\n\\n')\n if (lastParagraph > lastSafeEnd * 0.6)\n return `${slice.slice(0, lastParagraph)}\\n\\n...`\n\n return `${slice}...`\n}\n\n/**\n * Fetch issues for a state using GitHub Search API sorted by reactions\n */\nfunction fetchIssuesByState(\n owner: string,\n repo: string,\n state: 'open' | 'closed',\n count: number,\n releasedAt?: string,\n fromDate?: string,\n): GitHubIssue[] {\n const fetchCount = Math.min(count * 3, 100)\n let datePart = ''\n if (fromDate) {\n // Explicit lower bound: only issues from this date onward\n datePart = state === 'closed'\n ? `+closed:>=${fromDate}`\n : `+created:>=${fromDate}`\n }\n else if (state === 'closed') {\n if (releasedAt) {\n // For older versions, include issues closed up to 6 months after release\n const date = new Date(releasedAt)\n date.setMonth(date.getMonth() + 6)\n datePart = `+closed:<=${isoDate(date.toISOString())}`\n }\n else {\n datePart = `+closed:>${oneYearAgo()}`\n }\n }\n else if (releasedAt) {\n // For older versions, only include issues created around or before release\n const date = new Date(releasedAt)\n date.setMonth(date.getMonth() + 6)\n datePart = `+created:<=${isoDate(date.toISOString())}`\n }\n\n const q = `repo:${owner}/${repo}+is:issue+is:${state}${datePart}`\n\n const { stdout: result } = spawnSync('gh', [\n 'api',\n `search/issues?q=${q}&sort=reactions&order=desc&per_page=${fetchCount}`,\n '-q',\n '.items[] | {number, title, state, labels: [.labels[]?.name], body, createdAt: .created_at, url: .html_url, reactions: .reactions[\"+1\"], comments: .comments, user: .user.login, userType: .user.type, authorAssociation: .author_association}',\n ], { encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 })\n\n if (!result)\n return []\n\n return result\n .trim()\n .split('\\n')\n .filter(Boolean)\n .map(line => JSON.parse(line) as GitHubIssue & { user: string, userType: string, authorAssociation: string })\n .filter(issue => !BOT_USERS.has(issue.user) && issue.userType !== 'Bot')\n .filter(issue => !isNoiseIssue(issue))\n .filter(issue => !isNonTechnical(issue))\n .map(({ user: _, userType: __, authorAssociation, ...issue }) => {\n const isMaintainer = ['OWNER', 'MEMBER', 'COLLABORATOR'].includes(authorAssociation)\n const isRoadmap = /\\broadmap\\b/i.test(issue.title) || issue.labels.some(l => /roadmap/i.test(l))\n return {\n ...issue,\n type: classifyIssue(issue.labels),\n topComments: [] as IssueComment[],\n score: freshnessScore(issue.reactions, issue.createdAt) * (isMaintainer && isRoadmap ? 5 : 1),\n }\n })\n .sort((a, b) => b.score - a.score)\n .slice(0, count)\n}\n\nfunction oneYearAgo(): string {\n const d = new Date()\n d.setFullYear(d.getFullYear() - 1)\n return isoDate(d.toISOString())!\n}\n\n/** Noise patterns in comments — filter these out */\nconst COMMENT_NOISE_RE = /^(?:\\+1|👍|same here|any update|bump|following|is there any progress|when will this|me too|i have the same|same issue)[\\s!?.]*$/i\n\n/**\n * Batch-fetch top comments for issues via GraphQL.\n * Enriches the top N highest-score issues with their best comments.\n * Prioritizes: comments with code blocks, from maintainers, with high reactions.\n * Filters out \"+1\", \"any updates?\", \"same here\" noise.\n */\nfunction enrichWithComments(owner: string, repo: string, issues: GitHubIssue[], topN = 15): void {\n // Only fetch comments for issues worth enriching\n const worth = issues\n .filter(i => i.comments > 0 && (i.type === 'bug' || i.type === 'question' || i.reactions >= 3))\n .sort((a, b) => b.score - a.score)\n .slice(0, topN)\n\n if (worth.length === 0)\n return\n\n // Build a single GraphQL query fetching comments for all selected issues\n // Fetch more comments (10) so we can filter noise and pick the best\n const fragments = worth.map((issue, i) =>\n `i${i}: issue(number: ${issue.number}) { comments(first: 10) { nodes { body author { login } authorAssociation reactions { totalCount } } } }`,\n ).join(' ')\n\n const query = `query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { ${fragments} } }`\n\n try {\n const { stdout: result } = spawnSync('gh', [\n 'api',\n 'graphql',\n '-f',\n `query=${query}`,\n '-f',\n `owner=${owner}`,\n '-f',\n `repo=${repo}`,\n ], { encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 })\n\n if (!result)\n return\n\n const data = JSON.parse(result)\n const repo_ = data?.data?.repository\n if (!repo_)\n return\n\n for (let i = 0; i < worth.length; i++) {\n const nodes = repo_[`i${i}`]?.comments?.nodes\n if (!Array.isArray(nodes))\n continue\n\n const issue = worth[i]!\n\n const comments: (IssueComment & { _score: number })[] = nodes\n .filter((c: any) => c.author && !BOT_USERS.has(c.author.login))\n .filter((c: any) => !COMMENT_NOISE_RE.test((c.body || '').trim()))\n .map((c: any) => {\n const isMaintainer = ['OWNER', 'MEMBER', 'COLLABORATOR'].includes(c.authorAssociation)\n const body = c.body || ''\n const reactions = c.reactions?.totalCount || 0\n // Score: maintainers get 3x, code blocks get 2x, reactions add linearly\n const _score = (isMaintainer ? 3 : 1) * (hasCodeBlock(body) ? 2 : 1) * (1 + reactions)\n return { body, author: c.author.login, reactions, isMaintainer, _score }\n })\n .sort((a: any, b: any) => b._score - a._score)\n\n // Take top 3 quality comments\n issue.topComments = comments.slice(0, 3).map(({ _score: _, ...c }) => c)\n\n // For closed issues: try to detect fix version from maintainer comments\n if (issue.state === 'closed') {\n issue.resolvedIn = detectResolvedVersion(comments)\n }\n }\n }\n catch {\n // Non-critical — issues still useful without comments\n }\n}\n\n/**\n * Try to detect which version fixed a closed issue from maintainer comments.\n * Looks for version patterns in maintainer/collaborator comments.\n */\nfunction detectResolvedVersion(comments: IssueComment[]): string | undefined {\n const maintainerComments = comments.filter(c => c.isMaintainer)\n // Check from last to first (fix announcements tend to be later)\n for (const c of maintainerComments.reverse()) {\n // \"Fixed in v5.2\", \"landed in 4.1.0\", \"released in v3.0\", \"available in 2.1\"\n const match = c.body.match(/(?:fixed|landed|released|available|shipped|resolved|included)\\s+in\\s+v?(\\d+\\.\\d+(?:\\.\\d+)?)/i)\n if (match)\n return match[1]\n // \"v5.2.0\" or \"5.2.0\" at start of a short comment (release note style)\n if (c.body.length < 100) {\n const vMatch = c.body.match(/\\bv?(\\d+\\.\\d+\\.\\d+)\\b/)\n if (vMatch)\n return vMatch[1]\n }\n }\n return undefined\n}\n\n/**\n * Fetch issues from a GitHub repo with freshness-weighted scoring and type quotas.\n * Returns a balanced mix: bugs > questions > docs > other > features.\n * Filters noise, non-technical content, and enriches with quality comments.\n */\nexport async function fetchGitHubIssues(\n owner: string,\n repo: string,\n limit = 30,\n releasedAt?: string,\n fromDate?: string,\n): Promise<GitHubIssue[]> {\n if (!isGhAvailable())\n return []\n\n const openCount = Math.ceil(limit * 0.75)\n const closedCount = limit - openCount\n\n try {\n // Fetch more than needed so type quotas have a pool to draw from\n const open = fetchIssuesByState(owner, repo, 'open', Math.min(openCount * 2, 100), releasedAt, fromDate)\n const closed = fetchIssuesByState(owner, repo, 'closed', Math.min(closedCount * 2, 50), releasedAt, fromDate)\n const all = [...open, ...closed]\n const selected = applyTypeQuotas(all, limit)\n enrichWithComments(owner, repo, selected)\n return selected\n }\n catch {\n return []\n }\n}\n\n/**\n * Format a single issue as markdown with YAML frontmatter\n */\nexport function formatIssueAsMarkdown(issue: GitHubIssue): string {\n const limit = bodyLimit(issue.reactions)\n const fmFields: Record<string, string | number | boolean | undefined> = {\n number: issue.number,\n title: issue.title,\n type: issue.type,\n state: issue.state,\n created: isoDate(issue.createdAt),\n url: issue.url,\n reactions: issue.reactions,\n comments: issue.comments,\n }\n if (issue.resolvedIn)\n fmFields.resolvedIn = issue.resolvedIn\n if (issue.labels.length > 0)\n fmFields.labels = `[${issue.labels.join(', ')}]`\n const fm = buildFrontmatter(fmFields)\n\n const lines = [fm, '', `# ${issue.title}`]\n\n if (issue.body) {\n const body = truncateBody(issue.body, limit)\n lines.push('', body)\n }\n\n if (issue.topComments.length > 0) {\n lines.push('', '---', '', '## Top Comments')\n for (const c of issue.topComments) {\n const reactions = c.reactions > 0 ? ` (+${c.reactions})` : ''\n const maintainer = c.isMaintainer ? ' [maintainer]' : ''\n const commentBody = truncateBody(c.body, 600)\n lines.push('', `**@${c.author}**${maintainer}${reactions}:`, '', commentBody)\n }\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Generate a summary index of all issues for quick LLM scanning.\n * Groups by type so the LLM can quickly find bugs vs questions.\n */\nexport function generateIssueIndex(issues: GitHubIssue[]): string {\n const byType = new Map<IssueType, GitHubIssue[]>()\n for (const issue of issues) {\n mapInsert(byType, issue.type, () => []).push(issue)\n }\n\n const typeLabels: Record<IssueType, string> = {\n bug: 'Bugs & Regressions',\n question: 'Questions & Usage Help',\n docs: 'Documentation',\n feature: 'Feature Requests',\n other: 'Other',\n }\n\n const typeOrder: IssueType[] = ['bug', 'question', 'docs', 'other', 'feature']\n\n const fm = [\n '---',\n `total: ${issues.length}`,\n `open: ${issues.filter(i => i.state === 'open').length}`,\n `closed: ${issues.filter(i => i.state !== 'open').length}`,\n '---',\n ]\n\n const sections: string[] = [fm.join('\\n'), '', '# Issues Index', '']\n\n for (const type of typeOrder) {\n const group = byType.get(type)\n if (!group?.length)\n continue\n sections.push(`## ${typeLabels[type]} (${group.length})`, '')\n for (const issue of group) {\n const reactions = issue.reactions > 0 ? ` (+${issue.reactions})` : ''\n const state = issue.state === 'open' ? '' : ' [closed]'\n const resolved = issue.resolvedIn ? ` [fixed in ${issue.resolvedIn}]` : ''\n const date = isoDate(issue.createdAt)\n sections.push(`- [#${issue.number}](./issue-${issue.number}.md): ${issue.title}${reactions}${state}${resolved} (${date})`)\n }\n sections.push('')\n }\n\n return sections.join('\\n')\n}\n","/**\n * Shared utilities for doc resolution\n */\n\nimport { ofetch } from 'ofetch'\n\nexport const $fetch = ofetch.create({\n retry: 3,\n retryDelay: 500,\n timeout: 15_000,\n headers: { 'User-Agent': 'skilld/1.0' },\n})\n\n/**\n * Fetch text content from URL\n */\nexport async function fetchText(url: string): Promise<string | null> {\n return $fetch(url, { responseType: 'text' }).catch(() => null)\n}\n\n/**\n * Verify URL exists and is not HTML (likely 404 page)\n */\nexport async function verifyUrl(url: string): Promise<boolean> {\n const res = await $fetch.raw(url, { method: 'HEAD' }).catch(() => null)\n if (!res)\n return false\n const contentType = res.headers.get('content-type') || ''\n return !contentType.includes('text/html')\n}\n\n/**\n * Check if URL points to a social media or package registry site (not real docs)\n */\nconst USELESS_HOSTS = new Set([\n 'twitter.com',\n 'x.com',\n 'facebook.com',\n 'linkedin.com',\n 'youtube.com',\n 'instagram.com',\n 'npmjs.com',\n 'www.npmjs.com',\n 'yarnpkg.com',\n])\n\nexport function isUselessDocsUrl(url: string): boolean {\n try {\n const { hostname } = new URL(url)\n return USELESS_HOSTS.has(hostname)\n }\n catch { return false }\n}\n\n/**\n * Check if URL is a GitHub repo URL (not a docs site)\n */\nexport function isGitHubRepoUrl(url: string): boolean {\n try {\n const parsed = new URL(url)\n return parsed.hostname === 'github.com' || parsed.hostname === 'www.github.com'\n }\n catch {\n return false\n }\n}\n\n/**\n * Parse owner/repo from GitHub URL\n */\nexport function parseGitHubUrl(url: string): { owner: string, repo: string } | null {\n const match = url.match(/github\\.com\\/([^/]+)\\/([^/]+?)(?:\\.git)?(?:[/#]|$)/)\n if (!match)\n return null\n return { owner: match[1]!, repo: match[2]! }\n}\n\n/**\n * Normalize git repo URL to https\n */\nexport function normalizeRepoUrl(url: string): string {\n return url\n .replace(/^git\\+/, '')\n .replace(/#.*$/, '')\n .replace(/\\.git$/, '')\n .replace(/^git:\\/\\//, 'https://')\n .replace(/^ssh:\\/\\/git@github\\.com/, 'https://github.com')\n // SSH format: git@github.com:owner/repo\n .replace(/^git@github\\.com:/, 'https://github.com/')\n}\n\n/**\n * Parse package spec with optional dist-tag or version: \"vue@beta\" → { name: \"vue\", tag: \"beta\" }\n * Handles scoped packages: \"@vue/reactivity@beta\" → { name: \"@vue/reactivity\", tag: \"beta\" }\n */\nexport function parsePackageSpec(spec: string): { name: string, tag?: string } {\n // Scoped: @scope/pkg@tag — find the second @\n if (spec.startsWith('@')) {\n const slashIdx = spec.indexOf('/')\n if (slashIdx !== -1) {\n const atIdx = spec.indexOf('@', slashIdx + 1)\n if (atIdx !== -1)\n return { name: spec.slice(0, atIdx), tag: spec.slice(atIdx + 1) }\n }\n return { name: spec }\n }\n // Unscoped: pkg@tag\n const atIdx = spec.indexOf('@')\n if (atIdx !== -1)\n return { name: spec.slice(0, atIdx), tag: spec.slice(atIdx + 1) }\n return { name: spec }\n}\n\n/**\n * Extract branch hint from URL fragment (e.g. \"git+https://...#main\" → \"main\")\n */\nexport function extractBranchHint(url: string): string | undefined {\n const hash = url.indexOf('#')\n if (hash === -1)\n return undefined\n const fragment = url.slice(hash + 1)\n // Ignore non-branch fragments like \"readme\"\n if (!fragment || fragment === 'readme')\n return undefined\n return fragment\n}\n","/**\n * GitHub release notes fetching via gh CLI (preferred) with ungh.cc fallback\n */\n\nimport { spawnSync } from 'node:child_process'\nimport { isoDate } from './github-common.ts'\nimport { isGhAvailable } from './issues.ts'\nimport { $fetch } from './utils.ts'\n\nexport interface GitHubRelease {\n id: number\n tag: string\n name: string\n prerelease: boolean\n createdAt: string\n publishedAt: string\n markdown: string\n}\n\ninterface UnghReleasesResponse {\n releases: GitHubRelease[]\n}\n\ninterface CachedDoc {\n path: string\n content: string\n}\n\nexport interface SemVer {\n major: number\n minor: number\n patch: number\n raw: string\n}\n\nexport function parseSemver(version: string): SemVer | null {\n const clean = version.replace(/^v/, '')\n const match = clean.match(/^(\\d+)(?:\\.(\\d+))?(?:\\.(\\d+))?/)\n if (!match)\n return null\n return {\n major: +match[1]!,\n minor: match[2] ? +match[2] : 0,\n patch: match[3] ? +match[3] : 0,\n raw: clean,\n }\n}\n\n/**\n * Extract version from a release tag, handling monorepo formats:\n * - `pkg@1.2.3` → `1.2.3`\n * - `pkg-v1.2.3` → `1.2.3`\n * - `v1.2.3` → `1.2.3`\n * - `1.2.3` → `1.2.3`\n */\nfunction extractVersion(tag: string, packageName?: string): string | null {\n if (packageName) {\n // Monorepo: pkg@version or pkg-vversion\n const atMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}@(.+)$`))\n if (atMatch)\n return atMatch[1]!\n const dashMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}-v?(.+)$`))\n if (dashMatch)\n return dashMatch[1]!\n }\n // Standard: v1.2.3 or 1.2.3\n return tag.replace(/^v/, '')\n}\n\nfunction escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n}\n\n/**\n * Check if a release tag belongs to a specific package\n */\nfunction tagMatchesPackage(tag: string, packageName: string): boolean {\n // Exact match: pkg@version or pkg-vversion\n return tag.startsWith(`${packageName}@`) || tag.startsWith(`${packageName}-v`) || tag.startsWith(`${packageName}-`)\n}\n\n/**\n * Check if a version string contains a prerelease suffix (e.g. 6.0.0-beta, 1.2.3-rc.1)\n */\nexport function isPrerelease(version: string): boolean {\n return /^\\d+\\.\\d+\\.\\d+-.+/.test(version.replace(/^v/, ''))\n}\n\nexport function compareSemver(a: SemVer, b: SemVer): number {\n if (a.major !== b.major)\n return a.major - b.major\n if (a.minor !== b.minor)\n return a.minor - b.minor\n return a.patch - b.patch\n}\n\n/**\n * Fetch releases via gh CLI (fast, authenticated, paginated)\n */\nfunction fetchReleasesViaGh(owner: string, repo: string): GitHubRelease[] {\n try {\n const { stdout: ndjson } = spawnSync('gh', [\n 'api',\n `repos/${owner}/${repo}/releases`,\n '--paginate',\n '--jq',\n '.[] | {id: .id, tag: .tag_name, name: .name, prerelease: .prerelease, createdAt: .created_at, publishedAt: .published_at, markdown: .body}',\n ], { encoding: 'utf-8', timeout: 30_000, stdio: ['ignore', 'pipe', 'ignore'] })\n if (!ndjson)\n return []\n return ndjson.trim().split('\\n').filter(Boolean).map(line => JSON.parse(line))\n }\n catch {\n return []\n }\n}\n\n/**\n * Fetch all releases from a GitHub repo via ungh.cc (fallback)\n */\nasync function fetchReleasesViaUngh(owner: string, repo: string): Promise<GitHubRelease[]> {\n const data = await $fetch<UnghReleasesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/releases`,\n { signal: AbortSignal.timeout(15_000) },\n ).catch(() => null)\n return data?.releases ?? []\n}\n\n/**\n * Fetch all releases — gh CLI first, ungh.cc fallback\n */\nasync function fetchAllReleases(owner: string, repo: string): Promise<GitHubRelease[]> {\n if (isGhAvailable()) {\n const releases = fetchReleasesViaGh(owner, repo)\n if (releases.length > 0)\n return releases\n }\n return fetchReleasesViaUngh(owner, repo)\n}\n\n/**\n * Select last 20 stable releases for a package, sorted newest first.\n * For monorepos, filters to package-specific tags (pkg@version).\n * Falls back to generic tags (v1.2.3) only if no package-specific found.\n * If installedVersion is provided, filters out releases newer than it.\n */\nexport function selectReleases(releases: GitHubRelease[], packageName?: string, installedVersion?: string, fromDate?: string): GitHubRelease[] {\n // Check if this looks like a monorepo (has package-prefixed tags)\n const hasMonorepoTags = packageName && releases.some(r => tagMatchesPackage(r.tag, packageName))\n const installedSv = installedVersion ? parseSemver(installedVersion) : null\n const installedIsPrerelease = installedVersion ? isPrerelease(installedVersion) : false\n const fromTs = fromDate ? new Date(fromDate).getTime() : null\n\n const filtered = releases.filter((r) => {\n const ver = extractVersion(r.tag, hasMonorepoTags ? packageName : undefined)\n if (!ver)\n return false\n\n const sv = parseSemver(ver)\n if (!sv)\n return false\n\n // Monorepo: only include tags for this package\n if (hasMonorepoTags && packageName && !tagMatchesPackage(r.tag, packageName))\n return false\n\n // Date lower bound: skip releases published before fromDate\n if (fromTs) {\n const pubDate = r.publishedAt || r.createdAt\n if (pubDate && new Date(pubDate).getTime() < fromTs)\n return false\n }\n\n // Prerelease handling: include only when installed is also prerelease and same major.minor\n if (r.prerelease) {\n if (!installedIsPrerelease || !installedSv)\n return false\n return sv.major === installedSv.major && sv.minor === installedSv.minor\n }\n\n // Filter out stable releases newer than installed version\n if (installedSv && compareSemver(sv, installedSv) > 0)\n return false\n\n return true\n })\n\n const sorted = filtered\n .sort((a, b) => {\n const verA = extractVersion(a.tag, hasMonorepoTags ? packageName : undefined)\n const verB = extractVersion(b.tag, hasMonorepoTags ? packageName : undefined)\n if (!verA || !verB)\n return 0\n return compareSemver(parseSemver(verB)!, parseSemver(verA)!)\n })\n\n // No cap when fromDate is set — include all matching releases\n return fromDate ? sorted : sorted.slice(0, 20)\n}\n\n/**\n * Format a release as markdown with YAML frontmatter\n */\nfunction formatRelease(release: GitHubRelease, packageName?: string): string {\n const date = isoDate(release.publishedAt || release.createdAt)\n const version = extractVersion(release.tag, packageName) || release.tag\n\n const fm = [\n '---',\n `tag: ${release.tag}`,\n `version: ${version}`,\n `published: ${date}`,\n ]\n if (release.name && release.name !== release.tag)\n fm.push(`name: \"${release.name.replace(/\"/g, '\\\\\"')}\"`)\n fm.push('---')\n\n return `${fm.join('\\n')}\\n\\n# ${release.name || release.tag}\\n\\n${release.markdown}`\n}\n\nexport interface ReleaseIndexOptions {\n releases: GitHubRelease[]\n packageName?: string\n blogReleases?: Array<{ version: string, title: string, date: string }>\n hasChangelog?: boolean\n}\n\n/**\n * Generate a unified summary index of all releases for quick LLM scanning.\n * Includes GitHub releases, blog release posts, and CHANGELOG link.\n */\nexport function generateReleaseIndex(releasesOrOpts: GitHubRelease[] | ReleaseIndexOptions, packageName?: string): string {\n // Support both old signature and new options object\n const opts: ReleaseIndexOptions = Array.isArray(releasesOrOpts)\n ? { releases: releasesOrOpts, packageName }\n : releasesOrOpts\n\n const { releases, blogReleases, hasChangelog } = opts\n const pkg = opts.packageName\n\n const total = releases.length + (blogReleases?.length ?? 0)\n const fm = [\n '---',\n `total: ${total}`,\n `latest: ${releases[0]?.tag || 'unknown'}`,\n '---',\n ]\n\n const lines: string[] = [fm.join('\\n'), '', '# Releases Index', '']\n\n // Blog release posts (major version announcements)\n if (blogReleases && blogReleases.length > 0) {\n lines.push('## Blog Releases', '')\n for (const b of blogReleases) {\n lines.push(`- [${b.version}](./blog-${b.version}.md): ${b.title} (${b.date})`)\n }\n lines.push('')\n }\n\n // GitHub release notes\n if (releases.length > 0) {\n if (blogReleases && blogReleases.length > 0)\n lines.push('## Release Notes', '')\n for (const r of releases) {\n const date = isoDate(r.publishedAt || r.createdAt)\n const filename = r.tag.includes('@') || r.tag.startsWith('v') ? r.tag : `v${r.tag}`\n const version = extractVersion(r.tag, pkg) || r.tag\n const sv = parseSemver(version)\n const label = sv?.patch === 0 && sv.minor === 0 ? ' **[MAJOR]**' : sv?.patch === 0 ? ' **[MINOR]**' : ''\n lines.push(`- [${r.tag}](./${filename}.md): ${r.name || r.tag} (${date})${label}`)\n }\n lines.push('')\n }\n\n // CHANGELOG link\n if (hasChangelog) {\n lines.push('## Changelog', '')\n lines.push('- [CHANGELOG.md](./CHANGELOG.md)')\n lines.push('')\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Check if a single release is a stub redirecting to CHANGELOG.md.\n * Short body (<500 chars) that mentions CHANGELOG indicates no real content.\n */\nexport function isStubRelease(release: GitHubRelease): boolean {\n const body = (release.markdown || '').trim()\n return body.length < 500 && /changelog\\.md/i.test(body)\n}\n\n/**\n * Detect if releases are just short stubs redirecting to CHANGELOG.md.\n * Samples up to 3 releases — if all are stubs, it's a redirect pattern.\n */\nexport function isChangelogRedirectPattern(releases: GitHubRelease[]): boolean {\n const sample = releases.slice(0, 3)\n if (sample.length === 0)\n return false\n return sample.every(isStubRelease)\n}\n\n/**\n * Fetch CHANGELOG.md from a GitHub repo at a specific ref as fallback.\n * For monorepos, also checks packages/{shortName}/CHANGELOG.md.\n */\nasync function fetchChangelog(owner: string, repo: string, ref: string, packageName?: string): Promise<string | null> {\n const paths: string[] = []\n\n // Monorepo: try package-specific paths first (e.g. packages/pinia/CHANGELOG.md)\n if (packageName) {\n const shortName = packageName.replace(/^@.*\\//, '')\n const scopeless = packageName.replace(/^@/, '').replace('/', '-')\n const candidates = [...new Set([shortName, scopeless])]\n for (const name of candidates) {\n paths.push(`packages/${name}/CHANGELOG.md`)\n }\n }\n\n // Root-level changelog\n paths.push('CHANGELOG.md', 'changelog.md', 'CHANGES.md')\n\n for (const path of paths) {\n const url = `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${path}`\n const content = await $fetch(url, { responseType: 'text', signal: AbortSignal.timeout(10_000) }).catch(() => null)\n if (content)\n return content\n }\n return null\n}\n\n/**\n * Fetch release notes for a package. Returns CachedDoc[] with releases/{tag}.md files.\n *\n * Strategy:\n * 1. Fetch GitHub releases, filter to package-specific tags for monorepos\n * 2. If no releases found, try CHANGELOG.md as fallback\n */\nexport async function fetchReleaseNotes(\n owner: string,\n repo: string,\n installedVersion: string,\n gitRef?: string,\n packageName?: string,\n fromDate?: string,\n changelogRef?: string,\n): Promise<CachedDoc[]> {\n const releases = await fetchAllReleases(owner, repo)\n const selected = selectReleases(releases, packageName, installedVersion, fromDate)\n\n if (selected.length > 0) {\n // Filter out individual stub releases that just say \"see CHANGELOG\"\n const substantive = selected.filter(r => !isStubRelease(r))\n\n const docs = substantive.map((r) => {\n const filename = r.tag.includes('@') || r.tag.startsWith('v')\n ? r.tag\n : `v${r.tag}`\n return {\n path: `releases/${filename}.md`,\n content: formatRelease(r, packageName),\n }\n })\n\n // Always fetch CHANGELOG.md alongside substantive releases\n const ref = changelogRef || gitRef || selected[0]!.tag\n const changelog = await fetchChangelog(owner, repo, ref, packageName)\n if (changelog && changelog.length < 500_000) {\n docs.push({ path: 'releases/CHANGELOG.md', content: changelog })\n }\n\n return docs\n }\n\n // Fallback: CHANGELOG.md (indexed as single file)\n const ref = changelogRef || gitRef || 'main'\n const changelog = await fetchChangelog(owner, repo, ref, packageName)\n if (!changelog)\n return []\n\n return [{ path: 'releases/CHANGELOG.md', content: changelog }]\n}\n","/**\n * Blog release notes fetching for packages with curated blog releases\n * Supports version filtering and extensible for multiple packages\n */\n\nimport type { BlogRelease } from './package-registry.ts'\nimport { htmlToMarkdown } from 'mdream'\nimport { getBlogPreset } from './package-registry.ts'\nimport { compareSemver, parseSemver } from './releases.ts'\nimport { $fetch } from './utils.ts'\n\nexport interface BlogReleasePost {\n version: string\n title: string\n date: string\n markdown: string\n url: string\n}\n\ninterface CachedDoc {\n path: string\n content: string\n}\n\n/**\n * Format a blog release as markdown with YAML frontmatter\n */\nfunction formatBlogRelease(release: BlogReleasePost): string {\n const fm = [\n '---',\n `version: ${release.version}`,\n `title: \"${release.title.replace(/\"/g, '\\\\\"')}\"`,\n `date: ${release.date}`,\n `url: ${release.url}`,\n `source: blog-release`,\n '---',\n ]\n\n return `${fm.join('\\n')}\\n\\n# ${release.title}\\n\\n${release.markdown}`\n}\n\n/**\n * Fetch and parse a single blog post using preset metadata for version/date\n */\nasync function fetchBlogPost(entry: BlogRelease): Promise<BlogReleasePost | null> {\n try {\n const html = await $fetch(entry.url, { responseType: 'text', signal: AbortSignal.timeout(10_000) }).catch(() => null)\n if (!html)\n return null\n\n // Extract title from <h1> or <title>, fallback to preset title\n let title = ''\n const titleMatch = html.match(/<h1[^>]*>([^<]+)<\\/h1>/)\n if (titleMatch)\n title = titleMatch[1]!.trim()\n\n if (!title) {\n const metaTitleMatch = html.match(/<title>([^<]+)<\\/title>/)\n if (metaTitleMatch)\n title = metaTitleMatch[1]!.trim()\n }\n\n const markdown = htmlToMarkdown(html)\n if (!markdown)\n return null\n\n return {\n version: entry.version,\n title: title || entry.title || `Release ${entry.version}`,\n date: entry.date,\n markdown,\n url: entry.url,\n }\n }\n catch {\n return null\n }\n}\n\n/**\n * Filter blog releases by installed version\n * Only includes releases where version <= installedVersion\n * Returns all releases if version parsing fails (fail-safe)\n */\nfunction filterBlogsByVersion(entries: BlogRelease[], installedVersion: string): BlogRelease[] {\n const installedSv = parseSemver(installedVersion)\n if (!installedSv)\n return entries // Fail-safe: include all if version parsing fails\n\n return entries.filter((entry) => {\n const entrySv = parseSemver(entry.version)\n if (!entrySv)\n return false\n // Include only releases where version <= installed version\n return compareSemver(entrySv, installedSv) <= 0\n })\n}\n\n/**\n * Fetch blog release notes from package presets\n * Filters to only releases matching or older than the installed version\n * Returns CachedDoc[] with releases/blog-{version}.md files\n */\nexport async function fetchBlogReleases(\n packageName: string,\n installedVersion: string,\n): Promise<CachedDoc[]> {\n const preset = getBlogPreset(packageName)\n if (!preset)\n return []\n\n const filteredReleases = filterBlogsByVersion(preset.releases, installedVersion)\n if (filteredReleases.length === 0)\n return []\n\n const releases: BlogReleasePost[] = []\n\n // Fetch all blog posts in parallel with 3 concurrent requests\n const batchSize = 3\n for (let i = 0; i < filteredReleases.length; i += batchSize) {\n const batch = filteredReleases.slice(i, i + batchSize)\n const results = await Promise.all(batch.map(entry => fetchBlogPost(entry)))\n for (const result of results) {\n if (result)\n releases.push(result)\n }\n }\n\n if (releases.length === 0)\n return []\n\n // Sort by version descending (newest first)\n releases.sort((a, b) => {\n const aVer = a.version.split('.').map(Number)\n const bVer = b.version.split('.').map(Number)\n for (let i = 0; i < Math.max(aVer.length, bVer.length); i++) {\n const diff = (bVer[i] ?? 0) - (aVer[i] ?? 0)\n if (diff !== 0)\n return diff\n }\n return 0\n })\n\n // Format as cached docs — stored in releases/ alongside regular releases\n return releases.map(r => ({\n path: `releases/blog-${r.version}.md`,\n content: formatBlogRelease(r),\n }))\n}\n","/**\n * GitHub discussions fetching via gh CLI GraphQL\n * Prioritizes Q&A and Help categories, includes accepted answers\n * Comment quality filtering, smart truncation, noise removal\n */\n\nimport { spawnSync } from 'node:child_process'\nimport { mapInsert } from '../core/shared.ts'\nimport { BOT_USERS, buildFrontmatter, isoDate } from './github-common.ts'\nimport { isGhAvailable } from './issues.ts'\n\n/** Categories most useful for skill generation (in priority order) */\nconst HIGH_VALUE_CATEGORIES = new Set([\n 'q&a',\n 'help',\n 'troubleshooting',\n 'support',\n])\n\nconst LOW_VALUE_CATEGORIES = new Set([\n 'show and tell',\n 'ideas',\n 'polls',\n])\n\nexport interface DiscussionComment {\n body: string\n author: string\n reactions: number\n isMaintainer?: boolean\n}\n\nexport interface GitHubDiscussion {\n number: number\n title: string\n body: string\n category: string\n createdAt: string\n url: string\n upvoteCount: number\n comments: number\n answer?: string\n topComments: DiscussionComment[]\n}\n\n/** Noise patterns in comments — filter these out */\nconst COMMENT_NOISE_RE = /^(?:\\+1|👍|same here|any update|bump|following|is there any progress|when will this|me too|i have the same|same issue|thanks|thank you)[\\s!?.]*$/i\n\n/** Check if body contains a code block */\nfunction hasCodeBlock(text: string): boolean {\n return /```[\\s\\S]*?```/.test(text) || /`[^`]+`/.test(text)\n}\n\n/**\n * Smart body truncation — preserves code blocks and error messages.\n * Instead of slicing at a char limit, finds a safe break point.\n */\nfunction truncateBody(body: string, limit: number): string {\n if (body.length <= limit)\n return body\n\n // Find code block boundaries so we don't cut mid-block\n const codeBlockRe = /```[\\s\\S]*?```/g\n let lastSafeEnd = limit\n let match: RegExpExecArray | null\n\n // eslint-disable-next-line no-cond-assign\n while ((match = codeBlockRe.exec(body)) !== null) {\n const blockStart = match.index\n const blockEnd = blockStart + match[0].length\n\n if (blockStart < limit && blockEnd > limit) {\n if (blockEnd <= limit + 500) {\n lastSafeEnd = blockEnd\n }\n else {\n lastSafeEnd = blockStart\n }\n break\n }\n }\n\n // Try to break at a paragraph boundary\n const slice = body.slice(0, lastSafeEnd)\n const lastParagraph = slice.lastIndexOf('\\n\\n')\n if (lastParagraph > lastSafeEnd * 0.6)\n return `${slice.slice(0, lastParagraph)}\\n\\n...`\n\n return `${slice}...`\n}\n\n/**\n * Score a comment for quality. Higher = more useful for skill generation.\n * Maintainers 3x, code blocks 2x, reactions linear.\n */\nfunction scoreComment(c: { body: string, reactions: number, isMaintainer?: boolean }): number {\n return (c.isMaintainer ? 3 : 1) * (hasCodeBlock(c.body) ? 2 : 1) * (1 + c.reactions)\n}\n\n/**\n * Fetch discussions from a GitHub repo using gh CLI GraphQL.\n * Prioritizes Q&A and Help categories. Includes accepted answer body for answered discussions.\n * Fetches extra comments and scores them for quality.\n */\nexport async function fetchGitHubDiscussions(\n owner: string,\n repo: string,\n limit = 20,\n releasedAt?: string,\n fromDate?: string,\n): Promise<GitHubDiscussion[]> {\n if (!isGhAvailable())\n return []\n\n // GraphQL discussions endpoint doesn't support date filtering,\n // so we fetch latest N and filter client-side. Skip entirely\n // if the cutoff is in the past — results would be empty anyway.\n // (Skip this check when fromDate is set — we'll filter client-side below)\n if (!fromDate && releasedAt) {\n const cutoff = new Date(releasedAt)\n cutoff.setMonth(cutoff.getMonth() + 6)\n if (cutoff < new Date())\n return []\n }\n\n try {\n // Fetch more to compensate for filtering\n const fetchCount = Math.min(limit * 3, 80)\n // Fetch 10 comments per discussion so we can filter noise and pick best\n const query = `query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { discussions(first: ${fetchCount}, orderBy: {field: CREATED_AT, direction: DESC}) { nodes { number title body category { name } createdAt url upvoteCount comments(first: 10) { totalCount nodes { body author { login } authorAssociation reactions { totalCount } } } answer { body author { login } authorAssociation } author { login } } } } }`\n\n const { stdout: result } = spawnSync('gh', ['api', 'graphql', '-f', `query=${query}`, '-f', `owner=${owner}`, '-f', `repo=${repo}`], {\n encoding: 'utf-8',\n maxBuffer: 10 * 1024 * 1024,\n })\n if (!result)\n return []\n\n const data = JSON.parse(result)\n const nodes = data?.data?.repository?.discussions?.nodes\n if (!Array.isArray(nodes))\n return []\n\n const fromTs = fromDate ? new Date(fromDate).getTime() : null\n const discussions = nodes\n .filter((d: any) => d.author && !BOT_USERS.has(d.author.login))\n .filter((d: any) => {\n const cat = (d.category?.name || '').toLowerCase()\n return !LOW_VALUE_CATEGORIES.has(cat)\n })\n .filter((d: any) => !fromTs || new Date(d.createdAt).getTime() >= fromTs)\n .map((d: any) => {\n // Process answer — tag maintainer status\n let answer: string | undefined\n if (d.answer?.body) {\n const isMaintainer = ['OWNER', 'MEMBER', 'COLLABORATOR'].includes(d.answer.authorAssociation)\n const author = d.answer.author?.login\n const tag = isMaintainer && author ? `**@${author}** [maintainer]:\\n\\n` : ''\n answer = `${tag}${d.answer.body}`\n }\n\n // Process comments — filter noise, score for quality, take best 3\n const comments: DiscussionComment[] = (d.comments?.nodes || [])\n .filter((c: any) => c.author && !BOT_USERS.has(c.author.login))\n .filter((c: any) => !COMMENT_NOISE_RE.test((c.body || '').trim()))\n .map((c: any) => {\n const isMaintainer = ['OWNER', 'MEMBER', 'COLLABORATOR'].includes(c.authorAssociation)\n return {\n body: c.body || '',\n author: c.author.login,\n reactions: c.reactions?.totalCount || 0,\n isMaintainer,\n }\n })\n .sort((a: DiscussionComment, b: DiscussionComment) => scoreComment(b) - scoreComment(a))\n .slice(0, 3)\n\n return {\n number: d.number,\n title: d.title,\n body: d.body || '',\n category: d.category?.name || '',\n createdAt: d.createdAt,\n url: d.url,\n upvoteCount: d.upvoteCount || 0,\n comments: d.comments?.totalCount || 0,\n answer,\n topComments: comments,\n }\n })\n // Prioritize high-value categories, then sort by engagement\n .sort((a: GitHubDiscussion, b: GitHubDiscussion) => {\n const aHigh = HIGH_VALUE_CATEGORIES.has(a.category.toLowerCase()) ? 1 : 0\n const bHigh = HIGH_VALUE_CATEGORIES.has(b.category.toLowerCase()) ? 1 : 0\n if (aHigh !== bHigh)\n return bHigh - aHigh\n return (b.upvoteCount + b.comments) - (a.upvoteCount + a.comments)\n })\n .slice(0, limit)\n\n return discussions\n }\n catch {\n return []\n }\n}\n\n/**\n * Format a single discussion as markdown with YAML frontmatter\n */\nexport function formatDiscussionAsMarkdown(d: GitHubDiscussion): string {\n const fm = buildFrontmatter({\n number: d.number,\n title: d.title,\n category: d.category,\n created: isoDate(d.createdAt),\n url: d.url,\n upvotes: d.upvoteCount,\n comments: d.comments,\n answered: !!d.answer,\n })\n\n const bodyLimit = d.upvoteCount >= 5 ? 1500 : 800\n const lines = [fm, '', `# ${d.title}`]\n\n if (d.body) {\n lines.push('', truncateBody(d.body, bodyLimit))\n }\n\n if (d.answer) {\n lines.push('', '---', '', '## Accepted Answer', '', truncateBody(d.answer, 1000))\n }\n else if (d.topComments.length > 0) {\n // No accepted answer — include top comments as context\n lines.push('', '---', '', '## Top Comments')\n for (const c of d.topComments) {\n const reactions = c.reactions > 0 ? ` (+${c.reactions})` : ''\n const maintainer = c.isMaintainer ? ' [maintainer]' : ''\n lines.push('', `**@${c.author}**${maintainer}${reactions}:`, '', truncateBody(c.body, 600))\n }\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Generate a summary index of all discussions for quick LLM scanning.\n * Groups by category so the LLM can quickly find Q&A vs general discussions.\n */\nexport function generateDiscussionIndex(discussions: GitHubDiscussion[]): string {\n const byCategory = new Map<string, GitHubDiscussion[]>()\n for (const d of discussions) {\n const cat = d.category || 'Uncategorized'\n mapInsert(byCategory, cat, () => []).push(d)\n }\n\n const answered = discussions.filter(d => d.answer).length\n\n const fm = [\n '---',\n `total: ${discussions.length}`,\n `answered: ${answered}`,\n '---',\n ]\n\n const sections: string[] = [fm.join('\\n'), '', '# Discussions Index', '']\n\n // Sort categories: high-value first\n const cats = [...byCategory.keys()].sort((a, b) => {\n const aHigh = HIGH_VALUE_CATEGORIES.has(a.toLowerCase()) ? 0 : 1\n const bHigh = HIGH_VALUE_CATEGORIES.has(b.toLowerCase()) ? 0 : 1\n return aHigh - bHigh || a.localeCompare(b)\n })\n\n for (const cat of cats) {\n const group = byCategory.get(cat)!\n sections.push(`## ${cat} (${group.length})`, '')\n for (const d of group) {\n const upvotes = d.upvoteCount > 0 ? ` (+${d.upvoteCount})` : ''\n const answered = d.answer ? ' [answered]' : ''\n const date = isoDate(d.createdAt)\n sections.push(`- [#${d.number}](./discussion-${d.number}.md): ${d.title}${upvotes}${answered} (${date})`)\n }\n sections.push('')\n }\n\n return sections.join('\\n')\n}\n","/**\n * Globs .d.ts type definition files from a package for search indexing.\n * Only types — source code is too verbose.\n */\nimport { existsSync, readFileSync } from 'node:fs'\nimport { globby } from 'globby'\nimport { join } from 'pathe'\n\nexport interface EntryFile {\n path: string\n content: string\n type: 'types' | 'source'\n}\n\nconst SKIP_DIRS = [\n 'node_modules',\n '_vendor',\n '__tests__',\n '__mocks__',\n '__fixtures__',\n 'test',\n 'tests',\n 'fixture',\n 'fixtures',\n 'locales',\n 'locale',\n 'i18n',\n '.git',\n]\n\nconst SKIP_PATTERNS = [\n '*.min.*',\n '*.prod.*',\n '*.global.*',\n '*.browser.*',\n '*.map',\n '*.map.js',\n 'CHANGELOG*',\n 'LICENSE*',\n 'README*',\n]\n\nconst MAX_FILE_SIZE = 500 * 1024 // 500KB per file\n\n/**\n * Glob .d.ts type definition files from a package directory, skipping junk.\n */\nexport async function resolveEntryFiles(packageDir: string): Promise<EntryFile[]> {\n if (!existsSync(join(packageDir, 'package.json')))\n return []\n\n const ignore = [\n ...SKIP_DIRS.map(d => `**/${d}/**`),\n ...SKIP_PATTERNS,\n ]\n\n const files = await globby(['**/*.d.{ts,mts,cts}'], {\n cwd: packageDir,\n ignore,\n absolute: false,\n })\n\n const entries: EntryFile[] = []\n\n for (const file of files) {\n const absPath = join(packageDir, file)\n let content: string\n try {\n content = readFileSync(absPath, 'utf-8')\n }\n catch {\n continue\n }\n\n if (content.length > MAX_FILE_SIZE)\n continue\n\n entries.push({ path: file, content, type: 'types' })\n }\n\n return entries\n}\n","/**\n * Git repo skill source — parse inputs + fetch pre-authored skills from repos\n *\n * Supports GitHub shorthand (owner/repo), full URLs, SSH, GitLab, and local paths.\n * Skills are pre-authored SKILL.md files — no doc resolution or LLM generation needed.\n */\n\nimport { existsSync, readdirSync, readFileSync } from 'node:fs'\nimport pLimit from 'p-limit'\nimport { resolve } from 'pathe'\nimport { yamlParseKV } from '../core/yaml.ts'\nimport { $fetch, normalizeRepoUrl, parseGitHubUrl } from './utils.ts'\n\nexport interface GitSkillSource {\n type: 'github' | 'gitlab' | 'git-ssh' | 'local'\n owner?: string\n repo?: string\n /** Direct path to a specific skill (from /tree/ref/path URLs) */\n skillPath?: string\n /** Branch/tag parsed from URL */\n ref?: string\n /** Absolute path for local sources */\n localPath?: string\n}\n\nexport interface RemoteSkill {\n /** From SKILL.md frontmatter `name` field, or directory name */\n name: string\n /** From SKILL.md frontmatter `description` field */\n description: string\n /** Path within repo (e.g., \"skills/web-design-guidelines\") */\n path: string\n /** Full SKILL.md content */\n content: string\n /** Supporting files (scripts/, references/, assets/) */\n files: Array<{ path: string, content: string }>\n}\n\n/**\n * Detect whether an input string is a git skill source.\n * Returns null for npm package names (including scoped @scope/pkg).\n */\nexport function parseGitSkillInput(input: string): GitSkillSource | null {\n const trimmed = input.trim()\n\n // Scoped npm packages → not git\n if (trimmed.startsWith('@'))\n return null\n\n // Local paths\n if (trimmed.startsWith('./') || trimmed.startsWith('../') || trimmed.startsWith('/') || trimmed.startsWith('~')) {\n const localPath = trimmed.startsWith('~')\n ? resolve(process.env.HOME || '', trimmed.slice(1))\n : resolve(trimmed)\n return { type: 'local', localPath }\n }\n\n // SSH format: git@github.com:owner/repo\n if (trimmed.startsWith('git@')) {\n const normalized = normalizeRepoUrl(trimmed)\n const gh = parseGitHubUrl(normalized)\n if (gh)\n return { type: 'github', owner: gh.owner, repo: gh.repo }\n return null\n }\n\n // Full URLs\n if (trimmed.startsWith('https://') || trimmed.startsWith('http://')) {\n return parseGitUrl(trimmed)\n }\n\n // GitHub shorthand: owner/repo (exactly one slash, no spaces, no commas)\n if (/^[\\w.-]+\\/[\\w.-]+$/.test(trimmed)) {\n return { type: 'github', owner: trimmed.split('/')[0], repo: trimmed.split('/')[1] }\n }\n\n // Everything else → npm\n return null\n}\n\nfunction parseGitUrl(url: string): GitSkillSource | null {\n try {\n const parsed = new URL(url)\n\n if (parsed.hostname === 'github.com' || parsed.hostname === 'www.github.com') {\n const parts = parsed.pathname.replace(/^\\//, '').replace(/\\.git$/, '').split('/')\n const owner = parts[0]\n const repo = parts[1]\n if (!owner || !repo)\n return null\n\n // Handle /tree/ref/path URLs → extract specific skill path\n if (parts[2] === 'tree' && parts.length >= 4) {\n const ref = parts[3]\n const skillPath = parts.length > 4 ? parts.slice(4).join('/') : undefined\n return { type: 'github', owner, repo, ref, skillPath }\n }\n\n return { type: 'github', owner, repo }\n }\n\n if (parsed.hostname === 'gitlab.com') {\n const parts = parsed.pathname.replace(/^\\//, '').replace(/\\.git$/, '').split('/')\n const owner = parts[0]\n const repo = parts[1]\n if (!owner || !repo)\n return null\n return { type: 'gitlab', owner, repo }\n }\n\n return null\n }\n catch {\n return null\n }\n}\n\n/**\n * Parse name and description from SKILL.md frontmatter.\n */\nexport function parseSkillFrontmatterName(content: string): { name?: string, description?: string } {\n const match = content.match(/^---\\n([\\s\\S]*?)\\n---/)\n if (!match)\n return {}\n\n const result: { name?: string, description?: string } = {}\n for (const line of match[1].split('\\n')) {\n const kv = yamlParseKV(line)\n if (!kv)\n continue\n if (kv[0] === 'name')\n result.name = kv[1]\n if (kv[0] === 'description')\n result.description = kv[1]\n }\n return result\n}\n\ninterface UnghFilesResponse {\n meta: { sha: string }\n files: Array<{ path: string, mode: string, sha: string, size: number }>\n}\n\n/** Supporting file dirs within a skill directory */\nconst SUPPORTING_DIRS = ['scripts', 'references', 'assets']\n\n/**\n * Fetch skills from a git source. Returns list of discovered skills + commit SHA.\n */\nexport async function fetchGitSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n if (source.type === 'local')\n return fetchLocalSkills(source)\n if (source.type === 'github')\n return fetchGitHubSkills(source, onProgress)\n if (source.type === 'gitlab')\n return fetchGitLabSkills(source, onProgress)\n return { skills: [] }\n}\n\n// ── Local ──\n\nfunction fetchLocalSkills(source: GitSkillSource): { skills: RemoteSkill[] } {\n const base = source.localPath!\n if (!existsSync(base))\n return { skills: [] }\n\n const skills: RemoteSkill[] = []\n\n // Check for skills/ subdirectory\n const skillsDir = resolve(base, 'skills')\n if (existsSync(skillsDir)) {\n for (const entry of readdirSync(skillsDir, { withFileTypes: true })) {\n if (!entry.isDirectory())\n continue\n const skill = readLocalSkill(resolve(skillsDir, entry.name), `skills/${entry.name}`)\n if (skill)\n skills.push(skill)\n }\n }\n\n // Check for root SKILL.md\n if (skills.length === 0) {\n const skill = readLocalSkill(base, '')\n if (skill)\n skills.push(skill)\n }\n\n return { skills }\n}\n\nfunction readLocalSkill(dir: string, repoPath: string): RemoteSkill | null {\n const skillMdPath = resolve(dir, 'SKILL.md')\n if (!existsSync(skillMdPath))\n return null\n\n const content = readFileSync(skillMdPath, 'utf-8')\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = dir.split('/').pop()!\n const name = frontmatter.name || dirName\n\n const files: Array<{ path: string, content: string }> = []\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPath = resolve(dir, subdir)\n if (!existsSync(subdirPath))\n continue\n for (const file of readdirSync(subdirPath, { withFileTypes: true })) {\n if (!file.isFile())\n continue\n files.push({\n path: `${subdir}/${file.name}`,\n content: readFileSync(resolve(subdirPath, file.name), 'utf-8'),\n })\n }\n }\n\n return {\n name,\n description: frontmatter.description || '',\n path: repoPath,\n content,\n files,\n }\n}\n\n// ── GitHub ──\n\nasync function fetchGitHubSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const { owner, repo } = source\n if (!owner || !repo)\n return { skills: [] }\n\n const ref = source.ref || 'main'\n onProgress?.(`Listing files at ${owner}/${repo}@${ref}`)\n\n const data = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/${ref}`,\n ).catch(() => null)\n\n if (!data?.files?.length) {\n // Try 'master' fallback if default ref failed\n if (ref === 'main') {\n const fallback = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/master`,\n ).catch(() => null)\n if (fallback?.files?.length)\n return extractGitHubSkills(owner!, repo!, 'master', fallback, source.skillPath, onProgress)\n }\n return { skills: [] }\n }\n\n return extractGitHubSkills(owner!, repo!, ref, data, source.skillPath, onProgress)\n}\n\nasync function extractGitHubSkills(\n owner: string,\n repo: string,\n ref: string,\n data: UnghFilesResponse,\n skillPath?: string,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const allFiles = data.files.map(f => f.path)\n const commitSha = data.meta?.sha\n\n // Find SKILL.md files\n let skillMdPaths: string[]\n\n if (skillPath) {\n // Direct skill path: look for SKILL.md at that path\n const candidates = [\n `${skillPath}/SKILL.md`,\n // In case they linked directly to the SKILL.md\n skillPath.endsWith('/SKILL.md') ? skillPath : null,\n ].filter(Boolean) as string[]\n\n skillMdPaths = allFiles.filter(f => candidates.includes(f))\n }\n else {\n // Discover: skills/*/SKILL.md or root SKILL.md\n skillMdPaths = allFiles.filter(f =>\n f.match(/^skills\\/[^/]+\\/SKILL\\.md$/) || f === 'SKILL.md',\n )\n }\n\n if (skillMdPaths.length === 0)\n return { skills: [], commitSha }\n\n const limit = pLimit(5)\n const skills: RemoteSkill[] = []\n\n onProgress?.(`Found ${skillMdPaths.length} skill(s), downloading...`)\n\n await Promise.all(skillMdPaths.map(mdPath => limit(async () => {\n const skillDir = mdPath === 'SKILL.md' ? '' : mdPath.replace(/\\/SKILL\\.md$/, '')\n const content = await fetchRawGitHub(owner, repo, ref, mdPath)\n if (!content)\n return\n\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = skillDir ? skillDir.split('/').pop()! : repo\n const name = frontmatter.name || dirName\n\n // Fetch supporting files\n const supportingFiles: Array<{ path: string, content: string }> = []\n const prefix = skillDir ? `${skillDir}/` : ''\n\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPrefix = `${prefix}${subdir}/`\n const matching = allFiles.filter(f => f.startsWith(subdirPrefix))\n for (const filePath of matching) {\n const fileContent = await fetchRawGitHub(owner, repo, ref, filePath)\n if (fileContent) {\n const relativePath = filePath.slice(prefix.length)\n supportingFiles.push({ path: relativePath, content: fileContent })\n }\n }\n }\n\n skills.push({\n name,\n description: frontmatter.description || '',\n path: skillDir,\n content,\n files: supportingFiles,\n })\n })))\n\n return { skills, commitSha }\n}\n\nasync function fetchRawGitHub(owner: string, repo: string, ref: string, path: string): Promise<string | null> {\n return $fetch(\n `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${path}`,\n { responseType: 'text' },\n ).catch(() => null)\n}\n\n// ── GitLab ──\n\ninterface GitLabTreeEntry {\n id: string\n name: string\n type: string\n path: string\n mode: string\n}\n\nasync function fetchGitLabSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const { owner, repo } = source\n if (!owner || !repo)\n return { skills: [] }\n\n const ref = source.ref || 'main'\n const projectId = encodeURIComponent(`${owner}/${repo}`)\n\n onProgress?.(`Listing files at ${owner}/${repo}@${ref}`)\n\n const tree = await $fetch<GitLabTreeEntry[]>(\n `https://gitlab.com/api/v4/projects/${projectId}/repository/tree?ref=${ref}&recursive=true&per_page=100`,\n ).catch(() => null)\n\n if (!tree?.length)\n return { skills: [] }\n\n const allFiles = tree.filter(e => e.type === 'blob').map(e => e.path)\n\n // Find SKILL.md files\n const skillMdPaths = source.skillPath\n ? allFiles.filter(f => f === `${source.skillPath}/SKILL.md`)\n : allFiles.filter(f => f.match(/^skills\\/[^/]+\\/SKILL\\.md$/) || f === 'SKILL.md')\n\n if (skillMdPaths.length === 0)\n return { skills: [] }\n\n const limit = pLimit(5)\n const skills: RemoteSkill[] = []\n\n onProgress?.(`Found ${skillMdPaths.length} skill(s), downloading...`)\n\n await Promise.all(skillMdPaths.map(mdPath => limit(async () => {\n const skillDir = mdPath === 'SKILL.md' ? '' : mdPath.replace(/\\/SKILL\\.md$/, '')\n const content = await fetchRawGitLab(owner!, repo!, ref, mdPath)\n if (!content)\n return\n\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = skillDir ? skillDir.split('/').pop()! : repo!\n const name = frontmatter.name || dirName\n\n // Fetch supporting files\n const supportingFiles: Array<{ path: string, content: string }> = []\n const prefix = skillDir ? `${skillDir}/` : ''\n\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPrefix = `${prefix}${subdir}/`\n const matching = allFiles.filter(f => f.startsWith(subdirPrefix))\n for (const filePath of matching) {\n const fileContent = await fetchRawGitLab(owner!, repo!, ref, filePath)\n if (fileContent) {\n const relativePath = filePath.slice(prefix.length)\n supportingFiles.push({ path: relativePath, content: fileContent })\n }\n }\n }\n\n skills.push({\n name,\n description: frontmatter.description || '',\n path: skillDir,\n content,\n files: supportingFiles,\n })\n })))\n\n return { skills }\n}\n\nasync function fetchRawGitLab(owner: string, repo: string, ref: string, path: string): Promise<string | null> {\n return $fetch(\n `https://gitlab.com/${owner}/${repo}/-/raw/${ref}/${path}`,\n { responseType: 'text' },\n ).catch(() => null)\n}\n","/**\n * llms.txt fetching and parsing\n */\n\nimport type { FetchedDoc, LlmsContent, LlmsLink } from './types.ts'\nimport pLimit from 'p-limit'\nimport { fetchText, verifyUrl } from './utils.ts'\n\n/**\n * Check for llms.txt at a docs URL, returns the llms.txt URL if found\n */\nexport async function fetchLlmsUrl(docsUrl: string): Promise<string | null> {\n const origin = new URL(docsUrl).origin\n const llmsUrl = `${origin}/llms.txt`\n if (await verifyUrl(llmsUrl))\n return llmsUrl\n return null\n}\n\n/**\n * Fetch and parse llms.txt content\n */\nexport async function fetchLlmsTxt(url: string): Promise<LlmsContent | null> {\n const content = await fetchText(url)\n if (!content || content.length < 50)\n return null\n\n return {\n raw: content,\n links: parseMarkdownLinks(content),\n }\n}\n\n/**\n * Parse markdown links from llms.txt to get .md file paths\n */\nexport function parseMarkdownLinks(content: string): LlmsLink[] {\n const links: LlmsLink[] = []\n const seen = new Set<string>()\n const linkRegex = /\\[([^\\]]+)\\]\\(([^)]+\\.md)\\)/g\n for (let match = linkRegex.exec(content); match !== null; match = linkRegex.exec(content)) {\n const url = match[2]!\n if (!seen.has(url)) {\n seen.add(url)\n links.push({ title: match[1]!, url })\n }\n }\n\n return links\n}\n\n/**\n * Download all .md files referenced in llms.txt\n */\n/** Reject non-https URLs and private/link-local IPs */\nfunction isSafeUrl(url: string): boolean {\n try {\n const parsed = new URL(url)\n if (parsed.protocol !== 'https:')\n return false\n const host = parsed.hostname\n // Reject private/link-local/loopback\n if (host === 'localhost' || host === '127.0.0.1' || host === '::1')\n return false\n if (host === '169.254.169.254') // cloud metadata\n return false\n if (/^(?:10\\.|172\\.(?:1[6-9]|2\\d|3[01])\\.|192\\.168\\.)/.test(host))\n return false\n if (host.startsWith('[')) // IPv6 link-local\n return false\n return true\n }\n catch { return false }\n}\n\nexport async function downloadLlmsDocs(\n llmsContent: LlmsContent,\n baseUrl: string,\n onProgress?: (url: string, index: number, total: number) => void,\n): Promise<FetchedDoc[]> {\n const limit = pLimit(5)\n let completed = 0\n\n const results = await Promise.all(\n llmsContent.links.map(link => limit(async () => {\n const url = link.url.startsWith('http')\n ? link.url\n : `${baseUrl.replace(/\\/$/, '')}${link.url.startsWith('/') ? '' : '/'}${link.url}`\n\n if (!isSafeUrl(url))\n return null\n\n onProgress?.(link.url, completed++, llmsContent.links.length)\n\n const content = await fetchText(url)\n if (content && content.length > 100)\n return { url: link.url, title: link.title, content } as FetchedDoc\n return null\n })),\n )\n\n return results.filter((d): d is FetchedDoc => d !== null)\n}\n\n/**\n * Normalize llms.txt links to relative paths for local access\n * Handles: absolute URLs, root-relative paths, and relative paths\n */\nexport function normalizeLlmsLinks(content: string, baseUrl?: string): string {\n let normalized = content\n\n // Handle absolute URLs: https://example.com/docs/foo.md → ./docs/foo.md\n if (baseUrl) {\n const base = baseUrl.replace(/\\/$/, '')\n const escaped = base.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n normalized = normalized.replace(\n new RegExp(`\\\\]\\\\(${escaped}(/[^)]+\\\\.md)\\\\)`, 'g'),\n '](./docs$1)',\n )\n }\n\n // Handle root-relative paths: /foo.md → ./docs/foo.md\n normalized = normalized.replace(/\\]\\(\\/([^)]+\\.md)\\)/g, '](./docs/$1)')\n\n return normalized\n}\n\n/**\n * Extract sections from llms-full.txt by URL patterns\n * Format: ---\\nurl: /path.md\\n---\\n<content>\\n\\n---\\nurl: ...\n */\nexport function extractSections(content: string, patterns: string[]): string | null {\n const sections: string[] = []\n const parts = content.split(/\\n---\\n/)\n\n for (const part of parts) {\n const urlMatch = part.match(/^url: *(\\S.*)$/m)\n if (!urlMatch)\n continue\n\n const url = urlMatch[1]!\n if (patterns.some(p => url.includes(p))) {\n const contentStart = part.indexOf('\\n', part.indexOf('url:'))\n if (contentStart > -1) {\n sections.push(part.slice(contentStart + 1))\n }\n }\n }\n\n if (sections.length === 0)\n return null\n return sections.join('\\n\\n---\\n\\n')\n}\n","/**\n * GitHub/ungh README resolution + versioned docs\n */\n\nimport type { LlmsLink, ResolvedPackage } from './types.ts'\nimport { spawnSync } from 'node:child_process'\nimport { existsSync as fsExistsSync, readFileSync as fsReadFileSync } from 'node:fs'\nimport { fileURLToPath } from 'node:url'\nimport { mapInsert } from '../core/shared.ts'\nimport { isGhAvailable } from './issues.ts'\nimport { fetchLlmsUrl } from './llms.ts'\nimport { getDocOverride } from './package-registry.ts'\nimport { $fetch, extractBranchHint, fetchText, parseGitHubUrl } from './utils.ts'\n\n/** Minimum git-doc file count to prefer over llms.txt */\nexport const MIN_GIT_DOCS = 5\n\n/** True when git-docs exist but are too few to be useful (< MIN_GIT_DOCS) */\nexport const isShallowGitDocs = (n: number) => n > 0 && n < MIN_GIT_DOCS\n\nexport interface GitDocsResult {\n /** URL pattern for fetching docs (use with ref) */\n baseUrl: string\n /** Git ref (tag) used */\n ref: string\n /** List of doc file paths relative to repo root */\n files: string[]\n /** Prefix to strip when normalizing paths to docs/ (e.g. 'apps/evalite-docs/src/content/') for nested monorepo docs */\n docsPrefix?: string\n /** Full repo file tree — only set when discoverDocFiles() heuristic was used (not standard docs/ prefix) */\n allFiles?: string[]\n /** True when ref is a branch (main/master) rather than a version-specific tag */\n fallback?: boolean\n}\n\ninterface UnghFilesResponse {\n meta: { sha: string }\n files: Array<{ path: string, mode: string, sha: string, size: number }>\n}\n\n/**\n * List files at a git ref using ungh (no rate limits)\n */\nasync function listFilesAtRef(owner: string, repo: string, ref: string): Promise<string[]> {\n const data = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/${ref}`,\n ).catch(() => null)\n return data?.files?.map(f => f.path) ?? []\n}\n\ninterface TagResult {\n ref: string\n files: string[]\n /** True when ref is a branch fallback (main/master) rather than a version tag */\n fallback?: boolean\n}\n\n/**\n * Find git tag for a version by checking if ungh can list files at that ref.\n * Tries v{version}, {version}, and optionally {packageName}@{version} (changeset convention).\n */\nasync function findGitTag(owner: string, repo: string, version: string, packageName?: string, branchHint?: string): Promise<TagResult | null> {\n const candidates = [`v${version}`, version]\n if (packageName)\n candidates.push(`${packageName}@${version}`)\n\n for (const tag of candidates) {\n const files = await listFilesAtRef(owner, repo, tag)\n if (files.length > 0)\n return { ref: tag, files }\n }\n\n // Fallback: find latest release tag matching {packageName}@* (version mismatch in monorepos)\n if (packageName) {\n const latestTag = await findLatestReleaseTag(owner, repo, packageName)\n if (latestTag) {\n const files = await listFilesAtRef(owner, repo, latestTag)\n if (files.length > 0)\n return { ref: latestTag, files }\n }\n }\n\n // Last resort: try default branch (prefer hint from repo URL fragment)\n const branches = branchHint\n ? [branchHint, ...['main', 'master'].filter(b => b !== branchHint)]\n : ['main', 'master']\n for (const branch of branches) {\n const files = await listFilesAtRef(owner, repo, branch)\n if (files.length > 0)\n return { ref: branch, files, fallback: true }\n }\n\n return null\n}\n\n/**\n * Find the latest release tag matching `{packageName}@*` via ungh releases API.\n * Handles monorepos where npm version doesn't match git tag version.\n */\nasync function findLatestReleaseTag(owner: string, repo: string, packageName: string): Promise<string | null> {\n const data = await $fetch<{ releases?: Array<{ tag: string }> }>(\n `https://ungh.cc/repos/${owner}/${repo}/releases`,\n ).catch(() => null)\n const prefix = `${packageName}@`\n return data?.releases?.find(r => r.tag.startsWith(prefix))?.tag ?? null\n}\n\n/**\n * Filter file paths by prefix and md/mdx extension\n */\nfunction filterDocFiles(files: string[], pathPrefix: string): string[] {\n return files.filter(f => f.startsWith(pathPrefix) && /\\.(?:md|mdx)$/.test(f))\n}\n\n/** Known noise paths to exclude from doc discovery */\nconst NOISE_PATTERNS = [\n /^\\.changeset\\//,\n /CHANGELOG\\.md$/i,\n /CONTRIBUTING\\.md$/i,\n /^\\.github\\//,\n]\n\n/** Directories to exclude from \"best directory\" heuristic */\nconst EXCLUDE_DIRS = new Set([\n 'test',\n 'tests',\n '__tests__',\n 'fixtures',\n 'fixture',\n 'examples',\n 'example',\n 'node_modules',\n '.git',\n 'dist',\n 'build',\n 'coverage',\n 'e2e',\n 'spec',\n 'mocks',\n '__mocks__',\n])\n\n/** Directory names that suggest documentation */\nconst DOC_DIR_BONUS = new Set([\n 'docs',\n 'documentation',\n 'pages',\n 'content',\n 'website',\n 'guide',\n 'guides',\n 'wiki',\n 'manual',\n 'api',\n])\n\ninterface DiscoveredDocs {\n files: string[]\n /** Prefix before 'docs/' to strip when normalizing (e.g. 'apps/evalite-docs/src/content/') */\n prefix: string\n}\n\n/**\n * Check if a path contains any excluded directory\n */\nfunction hasExcludedDir(path: string): boolean {\n const parts = path.split('/')\n return parts.some(p => EXCLUDE_DIRS.has(p.toLowerCase()))\n}\n\n/**\n * Get the depth of a path (number of directory levels)\n */\nfunction getPathDepth(path: string): number {\n return path.split('/').filter(Boolean).length\n}\n\n/**\n * Check if path contains a doc-related directory name\n */\nfunction hasDocDirBonus(path: string): boolean {\n const parts = path.split('/')\n return parts.some(p => DOC_DIR_BONUS.has(p.toLowerCase()))\n}\n\n/**\n * Score a directory for doc likelihood.\n * Higher = better. Formula: count * nameBonus / depth\n */\nfunction scoreDocDir(dir: string, fileCount: number): number {\n const depth = getPathDepth(dir) || 1\n const nameBonus = hasDocDirBonus(dir) ? 1.5 : 1\n return (fileCount * nameBonus) / depth\n}\n\n/**\n * Discover doc files in non-standard locations.\n * First tries to find clusters of md/mdx files in paths containing /docs/.\n * Falls back to finding the directory with the most markdown files (≥5).\n */\nfunction discoverDocFiles(allFiles: string[]): DiscoveredDocs | null {\n const mdFiles = allFiles\n .filter(f => /\\.(?:md|mdx)$/.test(f))\n .filter(f => !NOISE_PATTERNS.some(p => p.test(f)))\n .filter(f => f.includes('/'))\n\n // Strategy 1: Look for /docs/ clusters (existing behavior)\n const docsGroups = new Map<string, string[]>()\n\n for (const file of mdFiles) {\n const docsIdx = file.lastIndexOf('/docs/')\n if (docsIdx === -1)\n continue\n\n const prefix = file.slice(0, docsIdx + '/docs/'.length)\n mapInsert(docsGroups, prefix, () => []).push(file)\n }\n\n if (docsGroups.size > 0) {\n const largest = [...docsGroups.entries()].sort((a, b) => b[1].length - a[1].length)[0]!\n if (largest[1].length >= 3) {\n const fullPrefix = largest[0]\n const docsIdx = fullPrefix.lastIndexOf('docs/')\n const stripPrefix = docsIdx > 0 ? fullPrefix.slice(0, docsIdx) : ''\n return { files: largest[1], prefix: stripPrefix }\n }\n }\n\n // Strategy 2: Find best directory by file count (for non-standard structures)\n const dirGroups = new Map<string, string[]>()\n\n for (const file of mdFiles) {\n if (hasExcludedDir(file))\n continue\n\n // Group by immediate parent directory\n const lastSlash = file.lastIndexOf('/')\n if (lastSlash === -1)\n continue\n\n const dir = file.slice(0, lastSlash + 1)\n mapInsert(dirGroups, dir, () => []).push(file)\n }\n\n if (dirGroups.size === 0)\n return null\n\n // Score and sort directories\n const scored = [...dirGroups.entries()]\n .map(([dir, files]) => ({ dir, files, score: scoreDocDir(dir, files.length) }))\n .filter(d => d.files.length >= 5) // Minimum threshold\n .sort((a, b) => b.score - a.score)\n\n if (scored.length === 0)\n return null\n\n const best = scored[0]!\n\n // For non-docs paths, the prefix is everything up to (but not including) the final dir\n // e.g. 'website/pages/' -> prefix is 'website/' so files normalize to 'pages/...'\n // But actually we want the full prefix so downstream can strip it\n return { files: best.files, prefix: best.dir }\n}\n\n/**\n * List markdown files in a folder at a specific git ref\n */\nasync function listDocsAtRef(owner: string, repo: string, ref: string, pathPrefix = 'docs/'): Promise<string[]> {\n const files = await listFilesAtRef(owner, repo, ref)\n return filterDocFiles(files, pathPrefix)\n}\n\n/**\n * Fetch versioned docs from GitHub repo's docs/ folder.\n * Pass packageName to check doc overrides (e.g. vue -> vuejs/docs).\n */\nexport async function fetchGitDocs(owner: string, repo: string, version: string, packageName?: string, repoUrl?: string): Promise<GitDocsResult | null> {\n const override = packageName ? getDocOverride(packageName) : undefined\n if (override) {\n const ref = override.ref || 'main'\n const fallback = !override.ref\n const files = await listDocsAtRef(override.owner, override.repo, ref, `${override.path}/`)\n if (files.length === 0)\n return null\n return {\n baseUrl: `https://raw.githubusercontent.com/${override.owner}/${override.repo}/${ref}`,\n ref,\n files,\n fallback,\n // Strip non-standard prefix so sync normalizes paths under docs/\n docsPrefix: `${override.path}/` !== 'docs/' ? `${override.path}/` : undefined,\n }\n }\n\n const branchHint = repoUrl ? extractBranchHint(repoUrl) : undefined\n const tag = await findGitTag(owner, repo, version, packageName, branchHint)\n if (!tag)\n return null\n\n let docs = filterDocFiles(tag.files, 'docs/')\n let docsPrefix: string | undefined\n let allFiles: string[] | undefined\n\n // Fallback: discover docs in nested paths (monorepos, content collections)\n if (docs.length === 0) {\n const discovered = discoverDocFiles(tag.files)\n if (discovered) {\n docs = discovered.files\n docsPrefix = discovered.prefix || undefined\n allFiles = tag.files\n }\n }\n\n if (docs.length === 0)\n return null\n\n return {\n baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,\n ref: tag.ref,\n files: docs,\n docsPrefix,\n allFiles,\n fallback: tag.fallback,\n }\n}\n\n/**\n * Strip file extension (.md, .mdx) and leading slash from a path\n */\nfunction normalizePath(p: string): string {\n return p.replace(/^\\//, '').replace(/\\.(?:md|mdx)$/, '')\n}\n\n/**\n * Validate that discovered git docs are relevant by cross-referencing llms.txt links\n * against the repo file tree. Uses extensionless suffix matching to handle monorepo nesting.\n *\n * Returns { isValid, matchRatio } where isValid = matchRatio >= 0.3\n */\nexport function validateGitDocsWithLlms(\n llmsLinks: LlmsLink[],\n repoFiles: string[],\n): { isValid: boolean, matchRatio: number } {\n if (llmsLinks.length === 0)\n return { isValid: true, matchRatio: 1 }\n\n // Sample up to 10 links\n const sample = llmsLinks.slice(0, 10)\n\n // Normalize llms link paths\n const normalizedLinks = sample.map((link) => {\n let path = link.url\n // Strip absolute URL to pathname\n if (path.startsWith('http')) {\n try {\n path = new URL(path).pathname\n }\n catch { /* keep as-is */ }\n }\n return normalizePath(path)\n })\n\n // Pre-process repo files: strip extensions to get extensionless paths\n const repoNormalized = new Set(repoFiles.map(normalizePath))\n\n let matches = 0\n for (const linkPath of normalizedLinks) {\n // Check if any repo file ends with this path (suffix matching for monorepo nesting)\n for (const repoPath of repoNormalized) {\n if (repoPath === linkPath || repoPath.endsWith(`/${linkPath}`)) {\n matches++\n break\n }\n }\n }\n\n const matchRatio = matches / sample.length\n return { isValid: matchRatio >= 0.3, matchRatio }\n}\n\n/**\n * Verify a GitHub repo is the source for an npm package by checking package.json name field.\n * Checks root first, then common monorepo paths (packages/{shortName}, packages/{name}).\n */\nasync function verifyNpmRepo(owner: string, repo: string, packageName: string): Promise<boolean> {\n const base = `https://raw.githubusercontent.com/${owner}/${repo}/HEAD`\n const shortName = packageName.replace(/^@.*\\//, '')\n const paths = [\n 'package.json',\n `packages/${shortName}/package.json`,\n `packages/${packageName.replace(/^@/, '').replace('/', '-')}/package.json`,\n ]\n for (const path of paths) {\n const text = await fetchText(`${base}/${path}`)\n if (!text)\n continue\n try {\n const pkg = JSON.parse(text) as { name?: string }\n if (pkg.name === packageName)\n return true\n }\n catch {}\n }\n return false\n}\n\nexport async function searchGitHubRepo(packageName: string): Promise<string | null> {\n // Try ungh heuristic first — check if repo name matches package name\n const shortName = packageName.replace(/^@.*\\//, '')\n for (const candidate of [packageName.replace(/^@/, '').replace('/', '/'), shortName]) {\n // Only try if it looks like owner/repo\n if (!candidate.includes('/')) {\n // Try common patterns: {name}/{name}\n const unghRes = await $fetch.raw(`https://ungh.cc/repos/${shortName}/${shortName}`).catch(() => null)\n if (unghRes?.ok)\n return `https://github.com/${shortName}/${shortName}`\n continue\n }\n const unghRes = await $fetch.raw(`https://ungh.cc/repos/${candidate}`).catch(() => null)\n if (unghRes?.ok)\n return `https://github.com/${candidate}`\n }\n\n // Try gh CLI — strip @ to avoid GitHub search syntax issues\n const searchTerm = packageName.replace(/^@/, '')\n if (isGhAvailable()) {\n try {\n const { stdout: json } = spawnSync('gh', ['search', 'repos', searchTerm, '--json', 'fullName', '--limit', '5'], {\n encoding: 'utf-8',\n timeout: 15_000,\n })\n if (!json)\n throw new Error('no output')\n const repos = JSON.parse(json) as Array<{ fullName: string }>\n // Prefer exact suffix match\n const match = repos.find(r =>\n r.fullName.toLowerCase().endsWith(`/${packageName.toLowerCase()}`)\n || r.fullName.toLowerCase().endsWith(`/${shortName.toLowerCase()}`),\n )\n if (match)\n return `https://github.com/${match.fullName}`\n // Validate remaining results via package.json\n for (const candidate of repos) {\n const gh = parseGitHubUrl(`https://github.com/${candidate.fullName}`)\n if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName))\n return `https://github.com/${candidate.fullName}`\n }\n }\n catch {\n // fall through to REST API\n }\n }\n\n // Fallback: GitHub REST search API (no auth needed, but rate-limited)\n const query = encodeURIComponent(`${searchTerm} in:name`)\n const data = await $fetch<{ items?: Array<{ full_name: string }> }>(\n `https://api.github.com/search/repositories?q=${query}&per_page=5`,\n ).catch(() => null)\n if (!data?.items?.length)\n return null\n\n // Prefer exact suffix match\n const match = data.items.find(r =>\n r.full_name.toLowerCase().endsWith(`/${packageName.toLowerCase()}`)\n || r.full_name.toLowerCase().endsWith(`/${shortName.toLowerCase()}`),\n )\n if (match)\n return `https://github.com/${match.full_name}`\n\n // Validate remaining results via package.json\n for (const candidate of data.items) {\n const gh = parseGitHubUrl(`https://github.com/${candidate.full_name}`)\n if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName))\n return `https://github.com/${candidate.full_name}`\n }\n\n return null\n}\n\n/**\n * Fetch GitHub repo metadata to get website URL.\n * Pass packageName to check doc overrides first (avoids API call).\n */\nexport async function fetchGitHubRepoMeta(owner: string, repo: string, packageName?: string): Promise<{ homepage?: string } | null> {\n const override = packageName ? getDocOverride(packageName) : undefined\n if (override?.homepage)\n return { homepage: override.homepage }\n\n // Prefer gh CLI to avoid rate limits\n if (isGhAvailable()) {\n try {\n const { stdout: json } = spawnSync('gh', ['api', `repos/${owner}/${repo}`, '-q', '{homepage}'], {\n encoding: 'utf-8',\n timeout: 10_000,\n })\n if (!json)\n throw new Error('no output')\n const data = JSON.parse(json) as { homepage?: string }\n return data?.homepage ? { homepage: data.homepage } : null\n }\n catch {\n // fall through to fetch\n }\n }\n\n const data = await $fetch<{ homepage?: string }>(\n `https://api.github.com/repos/${owner}/${repo}`,\n ).catch(() => null)\n return data?.homepage ? { homepage: data.homepage } : null\n}\n\n/**\n * Resolve README URL for a GitHub repo, returns ungh:// pseudo-URL or raw URL\n */\nexport async function fetchReadme(owner: string, repo: string, subdir?: string, ref?: string): Promise<string | null> {\n const branch = ref || 'main'\n\n // Try ungh first\n const unghUrl = subdir\n ? `https://ungh.cc/repos/${owner}/${repo}/files/${branch}/${subdir}/README.md`\n : `https://ungh.cc/repos/${owner}/${repo}/readme${ref ? `?ref=${ref}` : ''}`\n\n const unghRes = await $fetch.raw(unghUrl).catch(() => null)\n\n if (unghRes?.ok) {\n return `ungh://${owner}/${repo}${subdir ? `/${subdir}` : ''}${ref ? `@${ref}` : ''}`\n }\n\n // Fallback to raw.githubusercontent.com — use GET instead of HEAD\n // because raw.githubusercontent.com sometimes returns HTML on HEAD for valid URLs\n const basePath = subdir ? `${subdir}/` : ''\n const branches = ref ? [ref] : ['main', 'master']\n for (const b of branches) {\n for (const filename of ['README.md', 'Readme.md', 'readme.md']) {\n const readmeUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${b}/${basePath}${filename}`\n const res = await $fetch.raw(readmeUrl).catch(() => null)\n if (res?.ok)\n return readmeUrl\n }\n }\n\n return null\n}\n\n/**\n * Fetch README content from ungh:// pseudo-URL, file:// URL, or regular URL\n */\nexport interface GitSourceResult {\n /** URL pattern for fetching source */\n baseUrl: string\n /** Git ref (tag) used */\n ref: string\n /** List of source file paths relative to repo root */\n files: string[]\n}\n\n/** Source file extensions to include */\nconst SOURCE_EXTENSIONS = new Set([\n '.ts',\n '.tsx',\n '.mts',\n '.cts',\n '.js',\n '.jsx',\n '.mjs',\n '.cjs',\n '.vue',\n '.svelte',\n '.astro',\n])\n\n/** Paths/patterns to exclude */\nconst EXCLUDE_PATTERNS = [\n /\\.test\\./,\n /\\.spec\\./,\n /\\.d\\.ts$/,\n /__tests__/,\n /__mocks__/,\n /\\.config\\./,\n /fixtures?\\//,\n]\n\n/**\n * Filter source files from a file list\n */\nfunction filterSourceFiles(files: string[]): string[] {\n return files.filter((path) => {\n if (!path.startsWith('src/'))\n return false\n\n const ext = path.slice(path.lastIndexOf('.'))\n if (!SOURCE_EXTENSIONS.has(ext))\n return false\n if (EXCLUDE_PATTERNS.some(p => p.test(path)))\n return false\n\n return true\n })\n}\n\n/**\n * Fetch source files from GitHub repo's src/ folder\n */\nexport async function fetchGitSource(owner: string, repo: string, version: string, packageName?: string, repoUrl?: string): Promise<GitSourceResult | null> {\n const branchHint = repoUrl ? extractBranchHint(repoUrl) : undefined\n const tag = await findGitTag(owner, repo, version, packageName, branchHint)\n if (!tag)\n return null\n\n const files = filterSourceFiles(tag.files)\n if (files.length === 0)\n return null\n\n return {\n baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,\n ref: tag.ref,\n files,\n }\n}\n\n/**\n * Fetch README content from ungh:// pseudo-URL, file:// URL, or regular URL\n */\nexport async function fetchReadmeContent(url: string): Promise<string | null> {\n // Local file\n if (url.startsWith('file://')) {\n const filePath = fileURLToPath(url)\n if (!fsExistsSync(filePath))\n return null\n return fsReadFileSync(filePath, 'utf-8')\n }\n\n if (url.startsWith('ungh://')) {\n let path = url.replace('ungh://', '')\n let ref = 'main'\n\n // Parse ref from owner/repo/subdir@ref\n const atIdx = path.lastIndexOf('@')\n if (atIdx !== -1) {\n ref = path.slice(atIdx + 1)\n path = path.slice(0, atIdx)\n }\n\n const parts = path.split('/')\n const owner = parts[0]\n const repo = parts[1]\n const subdir = parts.slice(2).join('/')\n\n const unghUrl = subdir\n ? `https://ungh.cc/repos/${owner}/${repo}/files/${ref}/${subdir}/README.md`\n : `https://ungh.cc/repos/${owner}/${repo}/readme?ref=${ref}`\n\n const text = await $fetch(unghUrl, { responseType: 'text' }).catch(() => null)\n if (!text)\n return null\n\n try {\n const json = JSON.parse(text) as { markdown?: string, file?: { contents?: string } }\n return json.markdown || json.file?.contents || null\n }\n catch {\n return text\n }\n }\n\n return fetchText(url)\n}\n\n/**\n * Resolve a GitHub repo into a ResolvedPackage (no npm registry needed).\n * Fetches repo meta, latest release version, git docs, README, and llms.txt.\n */\nexport async function resolveGitHubRepo(\n owner: string,\n repo: string,\n onProgress?: (msg: string) => void,\n): Promise<ResolvedPackage | null> {\n onProgress?.('Fetching repo metadata')\n\n // Fetch repo metadata (homepage, description) via gh CLI or GitHub API\n const repoUrl = `https://github.com/${owner}/${repo}`\n let homepage: string | undefined\n let description: string | undefined\n\n if (isGhAvailable()) {\n try {\n const { stdout: json } = spawnSync('gh', ['api', `repos/${owner}/${repo}`, '--jq', '{homepage: .homepage, description: .description}'], {\n encoding: 'utf-8',\n timeout: 10_000,\n })\n if (json) {\n const data = JSON.parse(json) as { homepage?: string, description?: string }\n homepage = data.homepage || undefined\n description = data.description || undefined\n }\n }\n catch { /* fall through */ }\n }\n\n if (!homepage && !description) {\n const data = await $fetch<{ homepage?: string, description?: string }>(\n `https://api.github.com/repos/${owner}/${repo}`,\n ).catch(() => null)\n homepage = data?.homepage || undefined\n description = data?.description || undefined\n }\n\n // Fetch latest release tag for version\n onProgress?.('Fetching latest release')\n const releasesData = await $fetch<{ releases?: Array<{ tag: string, publishedAt?: string }> }>(\n `https://ungh.cc/repos/${owner}/${repo}/releases`,\n ).catch(() => null)\n\n let version = 'main'\n let releasedAt: string | undefined\n const latestRelease = releasesData?.releases?.[0]\n if (latestRelease) {\n // Extract version from tag (strip leading \"v\")\n version = latestRelease.tag.replace(/^v/, '')\n releasedAt = latestRelease.publishedAt\n }\n\n // Fetch git docs\n onProgress?.('Resolving docs')\n const gitDocs = await fetchGitDocs(owner, repo, version)\n const gitDocsUrl = gitDocs ? `${repoUrl}/tree/${gitDocs.ref}/docs` : undefined\n const gitRef = gitDocs?.ref\n\n // Fetch README\n onProgress?.('Fetching README')\n const readmeUrl = await fetchReadme(owner, repo)\n\n // Check for llms.txt at homepage\n let llmsUrl: string | undefined\n if (homepage) {\n onProgress?.('Checking llms.txt')\n llmsUrl = await fetchLlmsUrl(homepage).catch(() => null) ?? undefined\n }\n\n // Must have at least some docs\n if (!gitDocsUrl && !readmeUrl && !llmsUrl)\n return null\n\n return {\n name: repo,\n version: latestRelease ? version : undefined,\n releasedAt,\n description,\n repoUrl,\n docsUrl: homepage,\n gitDocsUrl,\n gitRef,\n gitDocsFallback: gitDocs?.fallback,\n readmeUrl: readmeUrl ?? undefined,\n llmsUrl,\n }\n}\n","/**\n * NPM registry lookup\n */\n\nimport type { LocalDependency, NpmPackageInfo, ResolveAttempt, ResolvedPackage, ResolveResult } from './types.ts'\nimport { spawnSync } from 'node:child_process'\nimport { createWriteStream, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, unlinkSync } from 'node:fs'\nimport { Writable } from 'node:stream'\nimport { pathToFileURL } from 'node:url'\nimport { resolvePathSync } from 'mlly'\nimport { basename, dirname, join, resolve } from 'pathe'\nimport { getCacheDir } from '../cache/version.ts'\nimport { fetchGitDocs, fetchGitHubRepoMeta, fetchReadme, searchGitHubRepo, validateGitDocsWithLlms } from './github.ts'\nimport { fetchLlmsTxt, fetchLlmsUrl } from './llms.ts'\nimport { $fetch, isGitHubRepoUrl, isUselessDocsUrl, normalizeRepoUrl, parseGitHubUrl } from './utils.ts'\n\n/**\n * Search npm registry for packages matching a query.\n * Used as a fallback when direct package lookup fails.\n */\nexport async function searchNpmPackages(query: string, size = 5): Promise<Array<{ name: string, description?: string, version: string }>> {\n const data = await $fetch<{\n objects: Array<{ package: { name: string, description?: string, version: string } }>\n }>(`https://registry.npmjs.org/-/v1/search?text=${encodeURIComponent(query)}&size=${size}`).catch(() => null)\n\n if (!data?.objects?.length)\n return []\n\n return data.objects.map(o => ({\n name: o.package.name,\n description: o.package.description,\n version: o.package.version,\n }))\n}\n\n/**\n * Fetch package info from npm registry\n */\nexport async function fetchNpmPackage(packageName: string): Promise<NpmPackageInfo | null> {\n // Try unpkg first (faster, CDN)\n const data = await $fetch<NpmPackageInfo>(`https://unpkg.com/${packageName}/package.json`).catch(() => null)\n if (data)\n return data\n\n // Fallback to npm registry\n return $fetch<NpmPackageInfo>(`https://registry.npmjs.org/${packageName}/latest`).catch(() => null)\n}\n\nexport interface DistTagInfo {\n version: string\n releasedAt?: string\n}\n\nexport interface NpmRegistryMeta {\n releasedAt?: string\n distTags?: Record<string, DistTagInfo>\n}\n\n/**\n * Fetch release date and dist-tags from npm registry\n */\nexport async function fetchNpmRegistryMeta(packageName: string, version: string): Promise<NpmRegistryMeta> {\n const data = await $fetch<{\n 'time'?: Record<string, string>\n 'dist-tags'?: Record<string, string>\n }>(`https://registry.npmjs.org/${packageName}`).catch(() => null)\n\n if (!data)\n return {}\n\n // Enrich dist-tags with release dates\n const distTags: Record<string, DistTagInfo> | undefined = data['dist-tags']\n ? Object.fromEntries(\n Object.entries(data['dist-tags']).map(([tag, ver]) => [\n tag,\n { version: ver, releasedAt: data.time?.[ver] },\n ]),\n )\n : undefined\n\n return {\n releasedAt: data.time?.[version] || undefined,\n distTags,\n }\n}\n\nexport type ResolveStep = 'npm' | 'github-docs' | 'github-meta' | 'github-search' | 'readme' | 'llms.txt' | 'local'\n\nexport interface ResolveOptions {\n /** User's installed version - used to fetch versioned git docs */\n version?: string\n /** Current working directory - for local readme fallback */\n cwd?: string\n /** Progress callback - called before each resolution step */\n onProgress?: (step: ResolveStep) => void\n}\n\n/**\n * Shared GitHub resolution cascade: git docs → repo meta (homepage) → README.\n * Used for both \"repo URL found in package.json\" and \"repo URL found via search\" paths.\n */\nasync function resolveGitHub(\n gh: { owner: string, repo: string },\n targetVersion: string | undefined,\n pkg: { name: string },\n result: ResolvedPackage,\n attempts: ResolveAttempt[],\n onProgress?: (step: ResolveStep) => void,\n opts?: { rawRepoUrl?: string, subdir?: string },\n): Promise<string[] | undefined> {\n let allFiles: string[] | undefined\n\n // Try versioned git docs first (docs/**/*.md at git tag)\n if (targetVersion) {\n onProgress?.('github-docs')\n const gitDocs = await fetchGitDocs(gh.owner, gh.repo, targetVersion, pkg.name, opts?.rawRepoUrl)\n if (gitDocs) {\n result.gitDocsUrl = gitDocs.baseUrl\n result.gitRef = gitDocs.ref\n result.gitDocsFallback = gitDocs.fallback\n allFiles = gitDocs.allFiles\n attempts.push({\n source: 'github-docs',\n url: gitDocs.baseUrl,\n status: 'success',\n message: gitDocs.fallback\n ? `Found ${gitDocs.files.length} docs at ${gitDocs.ref} (no tag for v${targetVersion})`\n : `Found ${gitDocs.files.length} docs at ${gitDocs.ref}`,\n })\n }\n else {\n attempts.push({\n source: 'github-docs',\n url: `${result.repoUrl}/tree/v${targetVersion}/docs`,\n status: 'not-found',\n message: 'No docs/ folder found at version tag',\n })\n }\n }\n\n // If no docsUrl yet (npm had no homepage), try GitHub repo metadata\n if (!result.docsUrl) {\n onProgress?.('github-meta')\n const repoMeta = await fetchGitHubRepoMeta(gh.owner, gh.repo, pkg.name)\n if (repoMeta?.homepage && !isUselessDocsUrl(repoMeta.homepage)) {\n result.docsUrl = repoMeta.homepage\n attempts.push({\n source: 'github-meta',\n url: result.repoUrl!,\n status: 'success',\n message: `Found homepage: ${repoMeta.homepage}`,\n })\n }\n else {\n attempts.push({\n source: 'github-meta',\n url: result.repoUrl!,\n status: 'not-found',\n message: 'No homepage in repo metadata',\n })\n }\n }\n\n // README fallback via ungh\n onProgress?.('readme')\n const readmeUrl = await fetchReadme(gh.owner, gh.repo, opts?.subdir, result.gitRef)\n if (readmeUrl) {\n result.readmeUrl = readmeUrl\n attempts.push({\n source: 'readme',\n url: readmeUrl,\n status: 'success',\n })\n }\n else {\n attempts.push({\n source: 'readme',\n url: `${result.repoUrl}/README.md`,\n status: 'not-found',\n message: 'No README found',\n })\n }\n\n return allFiles\n}\n\n/**\n * Resolve documentation URL for a package (legacy - returns null on failure)\n */\nexport async function resolvePackageDocs(packageName: string, options: ResolveOptions = {}): Promise<ResolvedPackage | null> {\n const result = await resolvePackageDocsWithAttempts(packageName, options)\n return result.package\n}\n\n/**\n * Resolve documentation URL for a package with attempt tracking\n */\nexport async function resolvePackageDocsWithAttempts(packageName: string, options: ResolveOptions = {}): Promise<ResolveResult> {\n const attempts: ResolveAttempt[] = []\n const { onProgress } = options\n\n onProgress?.('npm')\n const pkg = await fetchNpmPackage(packageName)\n if (!pkg) {\n attempts.push({\n source: 'npm',\n url: `https://registry.npmjs.org/${packageName}/latest`,\n status: 'not-found',\n message: 'Package not found on npm registry',\n })\n return { package: null, attempts }\n }\n\n attempts.push({\n source: 'npm',\n url: `https://registry.npmjs.org/${packageName}/latest`,\n status: 'success',\n message: `Found ${pkg.name}@${pkg.version}`,\n })\n\n // Fetch release date and dist-tags for this version\n const registryMeta = pkg.version\n ? await fetchNpmRegistryMeta(packageName, pkg.version)\n : {}\n\n const result: ResolvedPackage = {\n name: pkg.name,\n version: pkg.version,\n releasedAt: registryMeta.releasedAt,\n description: pkg.description,\n dependencies: pkg.dependencies,\n distTags: registryMeta.distTags,\n }\n\n // Track allFiles from heuristic git doc discovery for llms.txt validation\n let gitDocsAllFiles: string[] | undefined\n\n // Extract repo URL (handle both object and shorthand string formats)\n let subdir: string | undefined\n let rawRepoUrl: string | undefined\n if (typeof pkg.repository === 'object' && pkg.repository?.url) {\n rawRepoUrl = pkg.repository.url\n const normalized = normalizeRepoUrl(rawRepoUrl)\n // Handle shorthand \"owner/repo\" in repository.url field (e.g. cac)\n if (!normalized.includes('://') && normalized.includes('/') && !normalized.includes(':'))\n result.repoUrl = `https://github.com/${normalized}`\n else\n result.repoUrl = normalized\n subdir = pkg.repository.directory\n }\n else if (typeof pkg.repository === 'string') {\n if (pkg.repository.includes('://')) {\n // Full URL string (e.g. \"https://github.com/org/repo/tree/main/packages/sub\")\n const gh = parseGitHubUrl(pkg.repository)\n if (gh)\n result.repoUrl = `https://github.com/${gh.owner}/${gh.repo}`\n }\n else {\n // Shorthand: \"owner/repo\" or \"github:owner/repo\"\n const repo = pkg.repository.replace(/^github:/, '')\n if (repo.includes('/') && !repo.includes(':'))\n result.repoUrl = `https://github.com/${repo}`\n }\n }\n\n // Use npm homepage early (skip GitHub repo URLs)\n if (pkg.homepage && !isGitHubRepoUrl(pkg.homepage) && !isUselessDocsUrl(pkg.homepage)) {\n result.docsUrl = pkg.homepage\n }\n\n // GitHub repo handling - try versioned git docs first\n if (result.repoUrl?.includes('github.com')) {\n const gh = parseGitHubUrl(result.repoUrl)\n if (gh) {\n const targetVersion = options.version || pkg.version\n gitDocsAllFiles = await resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgress, { rawRepoUrl, subdir })\n }\n }\n else if (!result.repoUrl) {\n // No repo URL in package.json — try to find it via GitHub search\n onProgress?.('github-search')\n const searchedUrl = await searchGitHubRepo(pkg.name)\n if (searchedUrl) {\n result.repoUrl = searchedUrl\n attempts.push({\n source: 'github-search',\n url: searchedUrl,\n status: 'success',\n message: `Found via GitHub search: ${searchedUrl}`,\n })\n\n const gh = parseGitHubUrl(searchedUrl)\n if (gh) {\n const targetVersion = options.version || pkg.version\n gitDocsAllFiles = await resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgress)\n }\n }\n else {\n attempts.push({\n source: 'github-search',\n status: 'not-found',\n message: 'No repository URL in package.json and GitHub search found no match',\n })\n }\n }\n\n // Check for llms.txt on docsUrl\n if (result.docsUrl) {\n onProgress?.('llms.txt')\n const llmsUrl = await fetchLlmsUrl(result.docsUrl)\n if (llmsUrl) {\n result.llmsUrl = llmsUrl\n attempts.push({\n source: 'llms.txt',\n url: llmsUrl,\n status: 'success',\n })\n }\n else {\n attempts.push({\n source: 'llms.txt',\n url: `${new URL(result.docsUrl).origin}/llms.txt`,\n status: 'not-found',\n message: 'No llms.txt at docs URL',\n })\n }\n }\n\n // Validate heuristic git docs against llms.txt links\n if (result.gitDocsUrl && result.llmsUrl && gitDocsAllFiles) {\n const llmsContent = await fetchLlmsTxt(result.llmsUrl)\n if (llmsContent && llmsContent.links.length > 0) {\n const validation = validateGitDocsWithLlms(llmsContent.links, gitDocsAllFiles)\n if (!validation.isValid) {\n attempts.push({\n source: 'github-docs',\n url: result.gitDocsUrl,\n status: 'not-found',\n message: `Heuristic git docs don't match llms.txt links (${Math.round(validation.matchRatio * 100)}% match), preferring llms.txt`,\n })\n result.gitDocsUrl = undefined\n result.gitRef = undefined\n }\n }\n }\n\n // Fallback: check local node_modules readme when all else fails\n if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl && options.cwd) {\n onProgress?.('local')\n const pkgDir = join(options.cwd, 'node_modules', packageName)\n // Check common readme variations (case-insensitive)\n const readmeFile = existsSync(pkgDir) && readdirSync(pkgDir).find(f => /^readme\\.md$/i.test(f))\n if (readmeFile) {\n const readmePath = join(pkgDir, readmeFile)\n result.readmeUrl = pathToFileURL(readmePath).href\n attempts.push({\n source: 'readme',\n url: readmePath,\n status: 'success',\n message: 'Found local readme in node_modules',\n })\n }\n }\n\n // Must have at least one source\n if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl) {\n return { package: null, attempts }\n }\n\n return { package: result, attempts }\n}\n\n/**\n * Parse version specifier, handling protocols like link:, workspace:, npm:, file:\n */\nexport function parseVersionSpecifier(\n name: string,\n version: string,\n cwd: string,\n): LocalDependency | null {\n // link: - resolve local package.json\n if (version.startsWith('link:')) {\n const linkPath = resolve(cwd, version.slice(5))\n const linkedPkgPath = join(linkPath, 'package.json')\n if (existsSync(linkedPkgPath)) {\n const linkedPkg = JSON.parse(readFileSync(linkedPkgPath, 'utf-8'))\n return {\n name: linkedPkg.name || name,\n version: linkedPkg.version || '0.0.0',\n }\n }\n return null // linked package doesn't exist\n }\n\n // npm: - extract aliased package name\n if (version.startsWith('npm:')) {\n const specifier = version.slice(4)\n const atIndex = specifier.startsWith('@')\n ? specifier.indexOf('@', 1)\n : specifier.indexOf('@')\n const realName = atIndex > 0 ? specifier.slice(0, atIndex) : specifier\n return { name: realName, version: resolveInstalledVersion(realName, cwd) || '*' }\n }\n\n // file: and git: - skip (local/custom sources)\n if (version.startsWith('file:') || version.startsWith('git:') || version.startsWith('git+')) {\n return null\n }\n\n // For everything else (semver, catalog:, workspace:, etc.)\n // resolve the actual installed version from node_modules\n const installed = resolveInstalledVersion(name, cwd)\n if (installed)\n return { name, version: installed }\n\n // Fallback: strip semver prefix if it looks like one\n if (/^[\\^~>=<\\d]/.test(version))\n return { name, version: version.replace(/^[\\^~>=<]/, '') }\n\n // catalog: and workspace: specifiers - include with wildcard version\n // so the dep isn't silently dropped from state.deps\n if (version.startsWith('catalog:') || version.startsWith('workspace:'))\n return { name, version: '*' }\n\n return null\n}\n\n/**\n * Resolve the actual installed version of a package by finding its package.json\n * via mlly's resolvePathSync. Works regardless of package manager or version protocol.\n */\nexport function resolveInstalledVersion(name: string, cwd: string): string | null {\n try {\n const resolved = resolvePathSync(`${name}/package.json`, { url: cwd })\n const pkg = JSON.parse(readFileSync(resolved, 'utf-8'))\n return pkg.version || null\n }\n catch {\n // Packages with `exports` that don't expose ./package.json\n // Resolve the entry point, then walk up to find package.json\n try {\n const entry = resolvePathSync(name, { url: cwd })\n let dir = dirname(entry)\n while (dir && basename(dir) !== 'node_modules') {\n const pkgPath = join(dir, 'package.json')\n if (existsSync(pkgPath)) {\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n return pkg.version || null\n }\n dir = dirname(dir)\n }\n }\n catch {}\n return null\n }\n}\n\n/**\n * Read package.json dependencies with versions\n */\nexport async function readLocalDependencies(cwd: string): Promise<LocalDependency[]> {\n const pkgPath = join(cwd, 'package.json')\n if (!existsSync(pkgPath)) {\n throw new Error('No package.json found in current directory')\n }\n\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n const deps: Record<string, string> = {\n ...pkg.dependencies,\n ...pkg.devDependencies,\n }\n\n const results: LocalDependency[] = []\n\n for (const [name, version] of Object.entries(deps)) {\n const parsed = parseVersionSpecifier(name, version, cwd)\n if (parsed) {\n results.push(parsed)\n }\n }\n\n return results\n}\n\nexport interface LocalPackageInfo {\n name: string\n version: string\n description?: string\n repoUrl?: string\n localPath: string\n}\n\n/**\n * Read package info from a local path (for link: deps)\n */\nexport function readLocalPackageInfo(localPath: string): LocalPackageInfo | null {\n const pkgPath = join(localPath, 'package.json')\n if (!existsSync(pkgPath))\n return null\n\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n\n let repoUrl: string | undefined\n if (pkg.repository?.url) {\n repoUrl = normalizeRepoUrl(pkg.repository.url)\n }\n else if (typeof pkg.repository === 'string') {\n repoUrl = normalizeRepoUrl(pkg.repository)\n }\n\n return {\n name: pkg.name,\n version: pkg.version || '0.0.0',\n description: pkg.description,\n repoUrl,\n localPath,\n }\n}\n\n/**\n * Resolve docs for a local package (link: dependency)\n */\nexport async function resolveLocalPackageDocs(localPath: string): Promise<ResolvedPackage | null> {\n const info = readLocalPackageInfo(localPath)\n if (!info)\n return null\n\n const result: ResolvedPackage = {\n name: info.name,\n version: info.version,\n description: info.description,\n repoUrl: info.repoUrl,\n }\n\n // Try GitHub if repo URL available\n if (info.repoUrl?.includes('github.com')) {\n const gh = parseGitHubUrl(info.repoUrl)\n if (gh) {\n // Try versioned git docs\n const gitDocs = await fetchGitDocs(gh.owner, gh.repo, info.version, info.name)\n if (gitDocs) {\n result.gitDocsUrl = gitDocs.baseUrl\n result.gitRef = gitDocs.ref\n result.gitDocsFallback = gitDocs.fallback\n }\n\n // README fallback via ungh\n const readmeUrl = await fetchReadme(gh.owner, gh.repo, undefined, result.gitRef)\n if (readmeUrl) {\n result.readmeUrl = readmeUrl\n }\n }\n }\n\n // Fallback: read local readme (case-insensitive)\n if (!result.readmeUrl && !result.gitDocsUrl) {\n const readmeFile = readdirSync(localPath).find(f => /^readme\\.md$/i.test(f))\n if (readmeFile) {\n result.readmeUrl = pathToFileURL(join(localPath, readmeFile)).href\n }\n }\n\n if (!result.readmeUrl && !result.gitDocsUrl) {\n return null\n }\n\n return result\n}\n\n/**\n * Download and extract npm package tarball to cache directory.\n * Used when the package isn't available in node_modules.\n *\n * Extracts to: ~/.skilld/references/<pkg>@<version>/pkg/\n * Returns the extracted directory path, or null on failure.\n */\nexport async function fetchPkgDist(name: string, version: string): Promise<string | null> {\n const cacheDir = getCacheDir(name, version)\n const pkgDir = join(cacheDir, 'pkg')\n\n // Already extracted\n if (existsSync(join(pkgDir, 'package.json')))\n return pkgDir\n\n // Fetch version metadata to get tarball URL\n const data = await $fetch<{ dist?: { tarball?: string } }>(\n `https://registry.npmjs.org/${name}/${version}`,\n ).catch(() => null)\n if (!data)\n return null\n const tarballUrl = data.dist?.tarball\n if (!tarballUrl)\n return null\n\n // Download tarball to temp file\n const tarballRes = await fetch(tarballUrl, {\n headers: { 'User-Agent': 'skilld/1.0' },\n }).catch(() => null)\n\n if (!tarballRes?.ok || !tarballRes.body)\n return null\n\n mkdirSync(pkgDir, { recursive: true })\n\n const tmpTarball = join(cacheDir, '_pkg.tgz')\n const fileStream = createWriteStream(tmpTarball)\n\n // Stream response body to file\n const reader = tarballRes.body.getReader()\n await new Promise<void>((res, reject) => {\n const writable = new Writable({\n write(chunk, _encoding, callback) {\n fileStream.write(chunk, callback)\n },\n })\n writable.on('finish', () => {\n fileStream.end()\n res()\n })\n writable.on('error', reject)\n\n function pump() {\n reader.read().then(({ done, value }) => {\n if (done) {\n writable.end()\n return\n }\n writable.write(value, () => pump())\n }).catch(reject)\n }\n pump()\n })\n\n // Extract tarball — npm tarballs have a \"package/\" prefix\n const { status } = spawnSync('tar', ['xzf', tmpTarball, '--strip-components=1', '-C', pkgDir], { stdio: 'ignore' })\n if (status !== 0) {\n rmSync(pkgDir, { recursive: true, force: true })\n rmSync(tmpTarball, { force: true })\n return null\n }\n\n unlinkSync(tmpTarball)\n return pkgDir\n}\n\n/**\n * Fetch just the latest version string from npm (lightweight)\n */\nexport async function fetchLatestVersion(packageName: string): Promise<string | null> {\n const data = await $fetch<{ version?: string }>(\n `https://unpkg.com/${packageName}/package.json`,\n ).catch(() => null)\n return data?.version || null\n}\n\n/**\n * Get installed skill version from SKILL.md\n */\nexport function getInstalledSkillVersion(skillDir: string): string | null {\n const skillPath = join(skillDir, 'SKILL.md')\n if (!existsSync(skillPath))\n return null\n\n const content = readFileSync(skillPath, 'utf-8')\n const match = content.match(/^version:\\s*\"?([^\"\\n]+)\"?/m)\n return match?.[1] || null\n}\n"],"mappings":";;;;;;;;;;;;;AAIA,MAAa,YAAY,IAAI,IAAI;CAC/B;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAa,WAAW,QAAgB,IAAI,MAAM,IAAI,CAAC;AAGvD,SAAgB,iBAAiB,QAAuE;CACtG,MAAM,QAAQ,CAAC,MAAM;AACrB,MAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,OAAO,CACzC,KAAI,MAAM,KAAA,EACR,OAAM,KAAK,GAAG,EAAE,IAAI,OAAO,MAAM,YAAY,UAAU,KAAK,EAAE,GAAG,IAAI,EAAE,QAAQ,MAAM,OAAM,CAAC,KAAK,IAAI;AAEzG,OAAM,KAAK,MAAM;AACjB,QAAO,MAAM,KAAK,KAAK;;AClBzB,SAAgB,UAAgB,KAAgB,KAAQ,QAAoB;CAC1E,IAAI,MAAM,IAAI,IAAI,IAAI;AACtB,KAAI,QAAQ,KAAA,GAAW;AACrB,QAAM,QAAQ;AACd,MAAI,IAAI,KAAK,IAAI;;AAEnB,QAAO;;AAIT,SAAgB,SAAS,GAAW,GAAoB;AACtD,QAAOA,GAAU,GAAG,GAAG,KAAK;;AAG9B,MAAa,oBAAoB;AAGjC,SAAgB,mBAAmB,MAAc,QAAQ,KAAK,EAAiB;CAC7E,MAAM,MAAM,KAAK,KAAK,kBAAkB;AACxC,QAAO,WAAW,IAAI,GAAG,MAAM;;ACcjC,IAAI;AAKJ,SAAgB,gBAAyB;AACvC,KAAI,iBAAiB,KAAA,EACnB,QAAO;CACT,MAAM,EAAE,WAAW,UAAU,MAAM,CAAC,QAAQ,SAAS,EAAE,EAAE,OAAO,UAAU,CAAC;AAC3E,QAAQ,eAAe,WAAW;;AAIpC,MAAM,eAAe,IAAI,IAAI;CAC3B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAM,iBAAiB,IAAI,IAAI;CAC7B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,aAAa,IAAI,IAAI;CACzB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,kBAAkB,IAAI,IAAI;CAC9B;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,cAAc,IAAI,IAAI;CAC1B;CACA;CACA;CACA;CACD,CAAC;AAMF,SAAS,gBAAgB,OAAe,UAAgC;AACtE,MAAK,MAAM,WAAW,SACpB,KAAI,UAAU,WAAW,MAAM,SAAS,QAAQ,CAC9C,QAAO;AAEX,QAAO;;AAMT,SAAgB,cAAc,QAA6B;CACzD,MAAM,QAAQ,OAAO,KAAI,MAAK,EAAE,aAAa,CAAC;AAC9C,KAAI,MAAM,MAAK,MAAK,gBAAgB,GAAG,WAAW,CAAC,CACjD,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,gBAAgB,GAAG,gBAAgB,CAAC,CACtD,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,gBAAgB,GAAG,YAAY,CAAC,CAClD,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,gBAAgB,GAAG,eAAe,CAAC,CACrD,QAAO;AACT,QAAO;;AAMT,SAAS,aAAa,OAAmE;AAEvF,KADc,MAAM,OAAO,KAAI,MAAK,EAAE,aAAa,CAAC,CAC1C,MAAK,MAAK,gBAAgB,GAAG,aAAa,CAAC,CACnD,QAAO;AAET,KAAI,MAAM,MAAM,WAAW,KAAK,IAAI,MAAM,MAAM,WAAW,SAAS,IAAI,MAAM,MAAM,WAAW,aAAa,CAC1G,QAAO;AACT,QAAO;;AAIT,SAASC,eAAa,MAAuB;AAC3C,QAAO,iBAAiB,KAAK,KAAK,IAAI,UAAU,KAAK,KAAK;;AAQ5D,SAAgB,eAAe,OAAoE;CACjG,MAAM,QAAQ,MAAM,QAAQ,IAAI,MAAM;AAEtC,KAAI,KAAK,SAAS,OAAO,CAACA,eAAa,KAAK,IAAI,MAAM,YAAY,GAChE,QAAO;AAET,KAAI,yCAAyC,KAAK,MAAM,MAAM,IAAI,CAACA,eAAa,KAAK,CACnF,QAAO;AACT,QAAO;;AAQT,SAAgB,eAAe,WAAmB,WAA2B;AAG3E,QAAO,aAAa,KAAK,KAFX,KAAK,KAAK,GAAG,IAAI,KAAK,UAAU,CAAC,SAAS,KAC9B,SAAS,KAAK,KAAK,KAAK,OACV;;AAO1C,SAAS,gBAAgB,QAAuB,OAA8B;CAC5E,MAAM,yBAAS,IAAI,KAA+B;AAClD,MAAK,MAAM,SAAS,OAClB,WAAU,QAAQ,MAAM,YAAY,EAAE,CAAC,CAAC,KAAK,MAAM;AAIrD,MAAK,MAAM,SAAS,OAAO,QAAQ,CACjC,OAAM,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;CAGzC,MAAM,SAAgC;EACpC,CAAC,OAAO,KAAK,KAAK,QAAQ,GAAK,CAAC;EAChC,CAAC,YAAY,KAAK,KAAK,QAAQ,GAAK,CAAC;EACrC,CAAC,QAAQ,KAAK,KAAK,QAAQ,IAAK,CAAC;EACjC,CAAC,WAAW,KAAK,KAAK,QAAQ,GAAK,CAAC;EACpC,CAAC,SAAS,KAAK,KAAK,QAAQ,IAAK,CAAA;EAClC;CAED,MAAM,WAA0B,EAAE;CAClC,MAAM,uBAAO,IAAI,KAAa;CAC9B,IAAI,YAAY;AAGhB,MAAK,MAAM,CAAC,MAAM,UAAU,QAAQ;EAClC,MAAM,QAAQ,OAAO,IAAI,KAAK,IAAI,EAAE;EACpC,MAAM,OAAO,KAAK,IAAI,OAAO,MAAM,QAAQ,UAAU;AACrD,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,KAAK;AAC7B,YAAS,KAAK,MAAM,GAAI;AACxB,QAAK,IAAI,MAAM,GAAI,OAAO;AAC1B;;;AAKJ,KAAI,YAAY,GAAG;EACjB,MAAM,SAAS,OACZ,QAAO,MAAK,CAAC,KAAK,IAAI,EAAE,OAAO,IAAI,EAAE,SAAS,UAAU,CACxD,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AACpC,OAAK,MAAM,SAAS,QAAQ;AAC1B,OAAI,aAAa,EACf;AACF,YAAS,KAAK,MAAM;AACpB;;;AAIJ,QAAO,SAAS,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;;AAMnD,SAAS,UAAU,WAA2B;AAC5C,KAAI,aAAa,GACf,QAAO;AACT,KAAI,aAAa,EACf,QAAO;AACT,QAAO;;AAOT,SAASC,eAAa,MAAc,OAAuB;AACzD,KAAI,KAAK,UAAU,MACjB,QAAO;CAGT,MAAM,cAAc;CACpB,IAAI,cAAc;CAClB,IAAI;AAGJ,SAAQ,QAAQ,YAAY,KAAK,KAAK,MAAM,MAAM;EAChD,MAAM,aAAa,MAAM;EACzB,MAAM,WAAW,aAAa,MAAM,GAAG;AAIvC,MAAI,aAAa,SAAS,WAAW,OAAO;AAC1C,OAAI,YAAY,QAAQ,IAEtB,eAAc;OAId,eAAc;AAEhB;;;CAKJ,MAAM,QAAQ,KAAK,MAAM,GAAG,YAAY;CACxC,MAAM,gBAAgB,MAAM,YAAY,OAAO;AAC/C,KAAI,gBAAgB,cAAc,GAChC,QAAO,GAAG,MAAM,MAAM,GAAG,cAAc,CAAC;AAE1C,QAAO,GAAG,MAAM;;AAMlB,SAAS,mBACP,OACA,MACA,OACA,OACA,YACA,UACe;CACf,MAAM,aAAa,KAAK,IAAI,QAAQ,GAAG,IAAI;CAC3C,IAAI,WAAW;AACf,KAAI,SAEF,YAAW,UAAU,WACjB,aAAa,aACb,cAAc;UAEX,UAAU,SACjB,KAAI,YAAY;EAEd,MAAM,OAAO,IAAI,KAAK,WAAW;AACjC,OAAK,SAAS,KAAK,UAAU,GAAG,EAAE;AAClC,aAAW,aAAa,QAAQ,KAAK,aAAa,CAAC;OAGnD,YAAW,YAAY,YAAY;UAG9B,YAAY;EAEnB,MAAM,OAAO,IAAI,KAAK,WAAW;AACjC,OAAK,SAAS,KAAK,UAAU,GAAG,EAAE;AAClC,aAAW,cAAc,QAAQ,KAAK,aAAa,CAAC;;CAKtD,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;EACzC;EACA,mBAJQ,QAAQ,MAAM,GAAG,KAAK,eAAe,QAAQ,WAIhC,sCAAsC;EAC3D;EACA;EACD,EAAE;EAAE,UAAU;EAAS,WAAW,KAAK,OAAO;EAAM,CAAC;AAEtD,KAAI,CAAC,OACH,QAAO,EAAE;AAEX,QAAO,OACJ,MAAM,CACN,MAAM,KAAK,CACX,OAAO,QAAQ,CACf,KAAI,SAAQ,KAAK,MAAM,KAAK,CAAgF,CAC5G,QAAO,UAAS,CAAC,UAAU,IAAI,MAAM,KAAK,IAAI,MAAM,aAAa,MAAM,CACvE,QAAO,UAAS,CAAC,aAAa,MAAM,CAAC,CACrC,QAAO,UAAS,CAAC,eAAe,MAAM,CAAC,CACvC,KAAK,EAAE,MAAM,GAAG,UAAU,IAAI,mBAAmB,GAAG,YAAY;EAC/D,MAAM,eAAe;GAAC;GAAS;GAAU;GAAe,CAAC,SAAS,kBAAkB;EACpF,MAAM,YAAY,eAAe,KAAK,MAAM,MAAM,IAAI,MAAM,OAAO,MAAK,MAAK,WAAW,KAAK,EAAE,CAAC;AAChG,SAAO;GACL,GAAG;GACH,MAAM,cAAc,MAAM,OAAO;GACjC,aAAa,EAAE;GACf,OAAO,eAAe,MAAM,WAAW,MAAM,UAAU,IAAI,gBAAgB,YAAY,IAAI;GAC5F;GACD,CACD,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,MAAM,GAAG,MAAM;;AAGpB,SAAS,aAAqB;CAC5B,MAAM,oBAAI,IAAI,MAAM;AACpB,GAAE,YAAY,EAAE,aAAa,GAAG,EAAE;AAClC,QAAO,QAAQ,EAAE,aAAa,CAAC;;AAIjC,MAAMC,qBAAmB;AAQzB,SAAS,mBAAmB,OAAe,MAAc,QAAuB,OAAO,IAAU;CAE/F,MAAM,QAAQ,OACX,QAAO,MAAK,EAAE,WAAW,MAAM,EAAE,SAAS,SAAS,EAAE,SAAS,cAAc,EAAE,aAAa,GAAG,CAC9F,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,MAAM,GAAG,KAAK;AAEjB,KAAI,MAAM,WAAW,EACnB;CAQF,MAAM,QAAQ,qFAJI,MAAM,KAAK,OAAO,MAClC,IAAI,EAAE,kBAAkB,MAAM,OAAO,0GACtC,CAAC,KAAK,IAAI,CAEkG;AAE7G,KAAI;EACF,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;GACzC;GACA;GACA;GACA,SAAS;GACT;GACA,SAAS;GACT;GACA,QAAQ;GACT,EAAE;GAAE,UAAU;GAAS,WAAW,KAAK,OAAO;GAAM,CAAC;AAEtD,MAAI,CAAC,OACH;EAGF,MAAM,QADO,KAAK,MAAM,OAAO,EACX,MAAM;AAC1B,MAAI,CAAC,MACH;AAEF,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,QAAQ,MAAM,IAAI,MAAM,UAAU;AACxC,OAAI,CAAC,MAAM,QAAQ,MAAM,CACvB;GAEF,MAAM,QAAQ,MAAM;GAEpB,MAAM,WAAkD,MACrD,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,QAAQ,MAAW,CAACA,mBAAiB,MAAM,EAAE,QAAQ,IAAI,MAAM,CAAC,CAAC,CACjE,KAAK,MAAW;IACf,MAAM,eAAe;KAAC;KAAS;KAAU;KAAe,CAAC,SAAS,EAAE,kBAAkB;IACtF,MAAM,OAAO,EAAE,QAAQ;IACvB,MAAM,YAAY,EAAE,WAAW,cAAc;IAE7C,MAAM,UAAU,eAAe,IAAI,MAAMF,eAAa,KAAK,GAAG,IAAI,MAAM,IAAI;AAC5E,WAAO;KAAE;KAAM,QAAQ,EAAE,OAAO;KAAO;KAAW;KAAc;KAAQ;KACxE,CACD,MAAM,GAAQ,MAAW,EAAE,SAAS,EAAE,OAAO;AAGhD,SAAM,cAAc,SAAS,MAAM,GAAG,EAAE,CAAC,KAAK,EAAE,QAAQ,GAAG,GAAG,QAAQ,EAAE;AAGxE,OAAI,MAAM,UAAU,SAClB,OAAM,aAAa,sBAAsB,SAAS;;SAIlD;;AASR,SAAS,sBAAsB,UAA8C;CAC3E,MAAM,qBAAqB,SAAS,QAAO,MAAK,EAAE,aAAa;AAE/D,MAAK,MAAM,KAAK,mBAAmB,SAAS,EAAE;EAE5C,MAAM,QAAQ,EAAE,KAAK,MAAM,+FAA+F;AAC1H,MAAI,MACF,QAAO,MAAM;AAEf,MAAI,EAAE,KAAK,SAAS,KAAK;GACvB,MAAM,SAAS,EAAE,KAAK,MAAM,wBAAwB;AACpD,OAAI,OACF,QAAO,OAAO;;;;AAWtB,eAAsB,kBACpB,OACA,MACA,QAAQ,IACR,YACA,UACwB;AACxB,KAAI,CAAC,eAAe,CAClB,QAAO,EAAE;CAEX,MAAM,YAAY,KAAK,KAAK,QAAQ,IAAK;CACzC,MAAM,cAAc,QAAQ;AAE5B,KAAI;EAEF,MAAM,OAAO,mBAAmB,OAAO,MAAM,QAAQ,KAAK,IAAI,YAAY,GAAG,IAAI,EAAE,YAAY,SAAS;EACxG,MAAM,SAAS,mBAAmB,OAAO,MAAM,UAAU,KAAK,IAAI,cAAc,GAAG,GAAG,EAAE,YAAY,SAAS;EAE7G,MAAM,WAAW,gBADL,CAAC,GAAG,MAAM,GAAG,OAAO,EACM,MAAM;AAC5C,qBAAmB,OAAO,MAAM,SAAS;AACzC,SAAO;SAEH;AACJ,SAAO,EAAE;;;AAOb,SAAgB,sBAAsB,OAA4B;CAChE,MAAM,QAAQ,UAAU,MAAM,UAAU;CACxC,MAAM,WAAkE;EACtE,QAAQ,MAAM;EACd,OAAO,MAAM;EACb,MAAM,MAAM;EACZ,OAAO,MAAM;EACb,SAAS,QAAQ,MAAM,UAAU;EACjC,KAAK,MAAM;EACX,WAAW,MAAM;EACjB,UAAU,MAAM;EACjB;AACD,KAAI,MAAM,WACR,UAAS,aAAa,MAAM;AAC9B,KAAI,MAAM,OAAO,SAAS,EACxB,UAAS,SAAS,IAAI,MAAM,OAAO,KAAK,KAAK,CAAC;CAGhD,MAAM,QAAQ;EAFH,iBAAiB,SAAS;EAElB;EAAI,KAAK,MAAM;EAAQ;AAE1C,KAAI,MAAM,MAAM;EACd,MAAM,OAAOC,eAAa,MAAM,MAAM,MAAM;AAC5C,QAAM,KAAK,IAAI,KAAK;;AAGtB,KAAI,MAAM,YAAY,SAAS,GAAG;AAChC,QAAM,KAAK,IAAI,OAAO,IAAI,kBAAkB;AAC5C,OAAK,MAAM,KAAK,MAAM,aAAa;GACjC,MAAM,YAAY,EAAE,YAAY,IAAI,MAAM,EAAE,UAAU,KAAK;GAC3D,MAAM,aAAa,EAAE,eAAe,kBAAkB;GACtD,MAAM,cAAcA,eAAa,EAAE,MAAM,IAAI;AAC7C,SAAM,KAAK,IAAI,MAAM,EAAE,OAAO,IAAI,aAAa,UAAU,IAAI,IAAI,YAAY;;;AAIjF,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,mBAAmB,QAA+B;CAChE,MAAM,yBAAS,IAAI,KAA+B;AAClD,MAAK,MAAM,SAAS,OAClB,WAAU,QAAQ,MAAM,YAAY,EAAE,CAAC,CAAC,KAAK,MAAM;CAGrD,MAAM,aAAwC;EAC5C,KAAK;EACL,UAAU;EACV,MAAM;EACN,SAAS;EACT,OAAO;EACR;CAED,MAAM,YAAyB;EAAC;EAAO;EAAY;EAAQ;EAAS;EAAU;CAU9E,MAAM,WAAqB;EARhB;GACT;GACA,UAAU,OAAO;GACjB,SAAS,OAAO,QAAO,MAAK,EAAE,UAAU,OAAO,CAAC;GAChD,WAAW,OAAO,QAAO,MAAK,EAAE,UAAU,OAAO,CAAC;GAClD;GACD,CAE8B,KAAK,KAAK;EAAE;EAAI;EAAkB;EAAG;AAEpE,MAAK,MAAM,QAAQ,WAAW;EAC5B,MAAM,QAAQ,OAAO,IAAI,KAAK;AAC9B,MAAI,CAAC,OAAO,OACV;AACF,WAAS,KAAK,MAAM,WAAW,MAAM,IAAI,MAAM,OAAO,IAAI,GAAG;AAC7D,OAAK,MAAM,SAAS,OAAO;GACzB,MAAM,YAAY,MAAM,YAAY,IAAI,MAAM,MAAM,UAAU,KAAK;GACnE,MAAM,QAAQ,MAAM,UAAU,SAAS,KAAK;GAC5C,MAAM,WAAW,MAAM,aAAa,cAAc,MAAM,WAAW,KAAK;GACxE,MAAM,OAAO,QAAQ,MAAM,UAAU;AACrC,YAAS,KAAK,OAAO,MAAM,OAAO,YAAY,MAAM,OAAO,QAAQ,MAAM,QAAQ,YAAY,QAAQ,SAAS,IAAI,KAAK,GAAG;;AAE5H,WAAS,KAAK,GAAG;;AAGnB,QAAO,SAAS,KAAK,KAAK;;AC/jB5B,MAAa,SAAS,OAAO,OAAO;CAClC,OAAO;CACP,YAAY;CACZ,SAAS;CACT,SAAS,EAAE,cAAc,cAAA;CAC1B,CAAC;AAKF,eAAsB,UAAU,KAAqC;AACnE,QAAO,OAAO,KAAK,EAAE,cAAc,QAAQ,CAAC,CAAC,YAAY,KAAK;;AAMhE,eAAsB,UAAU,KAA+B;CAC7D,MAAM,MAAM,MAAM,OAAO,IAAI,KAAK,EAAE,QAAQ,QAAQ,CAAC,CAAC,YAAY,KAAK;AACvE,KAAI,CAAC,IACH,QAAO;AAET,QAAO,EADa,IAAI,QAAQ,IAAI,eAAe,IAAI,IACnC,SAAS,YAAY;;AAM3C,MAAM,gBAAgB,IAAI,IAAI;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,SAAgB,iBAAiB,KAAsB;AACrD,KAAI;EACF,MAAM,EAAE,aAAa,IAAI,IAAI,IAAI;AACjC,SAAO,cAAc,IAAI,SAAS;SAE9B;AAAE,SAAO;;;AAMjB,SAAgB,gBAAgB,KAAsB;AACpD,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,SAAO,OAAO,aAAa,gBAAgB,OAAO,aAAa;SAE3D;AACJ,SAAO;;;AAOX,SAAgB,eAAe,KAAqD;CAClF,MAAM,QAAQ,IAAI,MAAM,qDAAqD;AAC7E,KAAI,CAAC,MACH,QAAO;AACT,QAAO;EAAE,OAAO,MAAM;EAAK,MAAM,MAAM;EAAK;;AAM9C,SAAgB,iBAAiB,KAAqB;AACpD,QAAO,IACJ,QAAQ,UAAU,GAAG,CACrB,QAAQ,QAAQ,GAAG,CACnB,QAAQ,UAAU,GAAG,CACrB,QAAQ,aAAa,WAAW,CAChC,QAAQ,4BAA4B,qBAAqB,CAEzD,QAAQ,qBAAqB,sBAAsB;;AAOxD,SAAgB,iBAAiB,MAA8C;AAE7E,KAAI,KAAK,WAAW,IAAI,EAAE;EACxB,MAAM,WAAW,KAAK,QAAQ,IAAI;AAClC,MAAI,aAAa,IAAI;GACnB,MAAM,QAAQ,KAAK,QAAQ,KAAK,WAAW,EAAE;AAC7C,OAAI,UAAU,GACZ,QAAO;IAAE,MAAM,KAAK,MAAM,GAAG,MAAM;IAAE,KAAK,KAAK,MAAM,QAAQ,EAAA;IAAI;;AAErE,SAAO,EAAE,MAAM,MAAM;;CAGvB,MAAM,QAAQ,KAAK,QAAQ,IAAI;AAC/B,KAAI,UAAU,GACZ,QAAO;EAAE,MAAM,KAAK,MAAM,GAAG,MAAM;EAAE,KAAK,KAAK,MAAM,QAAQ,EAAA;EAAI;AACnE,QAAO,EAAE,MAAM,MAAM;;AAMvB,SAAgB,kBAAkB,KAAiC;CACjE,MAAM,OAAO,IAAI,QAAQ,IAAI;AAC7B,KAAI,SAAS,GACX,QAAO,KAAA;CACT,MAAM,WAAW,IAAI,MAAM,OAAO,EAAE;AAEpC,KAAI,CAAC,YAAY,aAAa,SAC5B,QAAO,KAAA;AACT,QAAO;;ACzFT,SAAgB,YAAY,SAAgC;CAC1D,MAAM,QAAQ,QAAQ,QAAQ,MAAM,GAAG;CACvC,MAAM,QAAQ,MAAM,MAAM,iCAAiC;AAC3D,KAAI,CAAC,MACH,QAAO;AACT,QAAO;EACL,OAAO,CAAC,MAAM;EACd,OAAO,MAAM,KAAK,CAAC,MAAM,KAAK;EAC9B,OAAO,MAAM,KAAK,CAAC,MAAM,KAAK;EAC9B,KAAK;EACN;;AAUH,SAAS,eAAe,KAAa,aAAqC;AACxE,KAAI,aAAa;EAEf,MAAM,UAAU,IAAI,MAAM,IAAI,OAAO,IAAI,YAAY,YAAY,CAAC,QAAQ,CAAC;AAC3E,MAAI,QACF,QAAO,QAAQ;EACjB,MAAM,YAAY,IAAI,MAAM,IAAI,OAAO,IAAI,YAAY,YAAY,CAAC,UAAU,CAAC;AAC/E,MAAI,UACF,QAAO,UAAU;;AAGrB,QAAO,IAAI,QAAQ,MAAM,GAAG;;AAG9B,SAAS,YAAY,KAAqB;AACxC,QAAO,IAAI,QAAQ,uBAAuB,OAAO;;AAMnD,SAAS,kBAAkB,KAAa,aAA8B;AAEpE,QAAO,IAAI,WAAW,GAAG,YAAY,GAAG,IAAI,IAAI,WAAW,GAAG,YAAY,IAAI,IAAI,IAAI,WAAW,GAAG,YAAY,GAAG;;AAMrH,SAAgB,aAAa,SAA0B;AACrD,QAAO,oBAAoB,KAAK,QAAQ,QAAQ,MAAM,GAAG,CAAC;;AAG5D,SAAgB,cAAc,GAAW,GAAmB;AAC1D,KAAI,EAAE,UAAU,EAAE,MAChB,QAAO,EAAE,QAAQ,EAAE;AACrB,KAAI,EAAE,UAAU,EAAE,MAChB,QAAO,EAAE,QAAQ,EAAE;AACrB,QAAO,EAAE,QAAQ,EAAE;;AAMrB,SAAS,mBAAmB,OAAe,MAA+B;AACxE,KAAI;EACF,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;GACzC;GACA,SAAS,MAAM,GAAG,KAAK;GACvB;GACA;GACA;GACD,EAAE;GAAE,UAAU;GAAS,SAAS;GAAQ,OAAO;IAAC;IAAU;IAAQ;;GAAW,CAAC;AAC/E,MAAI,CAAC,OACH,QAAO,EAAE;AACX,SAAO,OAAO,MAAM,CAAC,MAAM,KAAK,CAAC,OAAO,QAAQ,CAAC,KAAI,SAAQ,KAAK,MAAM,KAAK,CAAC;SAE1E;AACJ,SAAO,EAAE;;;AAOb,eAAe,qBAAqB,OAAe,MAAwC;AAKzF,SAJa,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,YACvC,EAAE,QAAQ,YAAY,QAAQ,KAAO,EAAE,CACxC,CAAC,YAAY,KAAK,GACN,YAAY,EAAE;;AAM7B,eAAe,iBAAiB,OAAe,MAAwC;AACrF,KAAI,eAAe,EAAE;EACnB,MAAM,WAAW,mBAAmB,OAAO,KAAK;AAChD,MAAI,SAAS,SAAS,EACpB,QAAO;;AAEX,QAAO,qBAAqB,OAAO,KAAK;;AAS1C,SAAgB,eAAe,UAA2B,aAAsB,kBAA2B,UAAoC;CAE7I,MAAM,kBAAkB,eAAe,SAAS,MAAK,MAAK,kBAAkB,EAAE,KAAK,YAAY,CAAC;CAChG,MAAM,cAAc,mBAAmB,YAAY,iBAAiB,GAAG;CACvE,MAAM,wBAAwB,mBAAmB,aAAa,iBAAiB,GAAG;CAClF,MAAM,SAAS,WAAW,IAAI,KAAK,SAAS,CAAC,SAAS,GAAG;CAoCzD,MAAM,SAlCW,SAAS,QAAQ,MAAM;EACtC,MAAM,MAAM,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;AAC5E,MAAI,CAAC,IACH,QAAO;EAET,MAAM,KAAK,YAAY,IAAI;AAC3B,MAAI,CAAC,GACH,QAAO;AAGT,MAAI,mBAAmB,eAAe,CAAC,kBAAkB,EAAE,KAAK,YAAY,CAC1E,QAAO;AAGT,MAAI,QAAQ;GACV,MAAM,UAAU,EAAE,eAAe,EAAE;AACnC,OAAI,WAAW,IAAI,KAAK,QAAQ,CAAC,SAAS,GAAG,OAC3C,QAAO;;AAIX,MAAI,EAAE,YAAY;AAChB,OAAI,CAAC,yBAAyB,CAAC,YAC7B,QAAO;AACT,UAAO,GAAG,UAAU,YAAY,SAAS,GAAG,UAAU,YAAY;;AAIpE,MAAI,eAAe,cAAc,IAAI,YAAY,GAAG,EAClD,QAAO;AAET,SAAO;GACP,CAGC,MAAM,GAAG,MAAM;EACd,MAAM,OAAO,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;EAC7E,MAAM,OAAO,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;AAC7E,MAAI,CAAC,QAAQ,CAAC,KACZ,QAAO;AACT,SAAO,cAAc,YAAY,KAAK,EAAG,YAAY,KAAK,CAAE;GAC5D;AAGJ,QAAO,WAAW,SAAS,OAAO,MAAM,GAAG,GAAG;;AAMhD,SAAS,cAAc,SAAwB,aAA8B;CAC3E,MAAM,OAAO,QAAQ,QAAQ,eAAe,QAAQ,UAAU;CAC9D,MAAM,UAAU,eAAe,QAAQ,KAAK,YAAY,IAAI,QAAQ;CAEpE,MAAM,KAAK;EACT;EACA,QAAQ,QAAQ;EAChB,YAAY;EACZ,cAAc;EACf;AACD,KAAI,QAAQ,QAAQ,QAAQ,SAAS,QAAQ,IAC3C,IAAG,KAAK,UAAU,QAAQ,KAAK,QAAQ,MAAM,OAAM,CAAC,GAAG;AACzD,IAAG,KAAK,MAAM;AAEd,QAAO,GAAG,GAAG,KAAK,KAAK,CAAC,QAAQ,QAAQ,QAAQ,QAAQ,IAAI,MAAM,QAAQ;;AAc5E,SAAgB,qBAAqB,gBAAuD,aAA8B;CAExH,MAAM,OAA4B,MAAM,QAAQ,eAAe,GAC3D;EAAE,UAAU;EAAgB;EAAa,GACzC;CAEJ,MAAM,EAAE,UAAU,cAAc,iBAAiB;CACjD,MAAM,MAAM,KAAK;CAUjB,MAAM,QAAkB;EAPb;GACT;GACA,UAHY,SAAS,UAAU,cAAc,UAAU;GAIvD,WAAW,SAAS,IAAI,OAAO;GAC/B;GACD,CAE2B,KAAK,KAAK;EAAE;EAAI;EAAoB;EAAG;AAGnE,KAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,QAAM,KAAK,oBAAoB,GAAG;AAClC,OAAK,MAAM,KAAK,aACd,OAAM,KAAK,MAAM,EAAE,QAAQ,WAAW,EAAE,QAAQ,QAAQ,EAAE,MAAM,IAAI,EAAE,KAAK,GAAG;AAEhF,QAAM,KAAK,GAAG;;AAIhB,KAAI,SAAS,SAAS,GAAG;AACvB,MAAI,gBAAgB,aAAa,SAAS,EACxC,OAAM,KAAK,oBAAoB,GAAG;AACpC,OAAK,MAAM,KAAK,UAAU;GACxB,MAAM,OAAO,QAAQ,EAAE,eAAe,EAAE,UAAU;GAClD,MAAM,WAAW,EAAE,IAAI,SAAS,IAAI,IAAI,EAAE,IAAI,WAAW,IAAI,GAAG,EAAE,MAAM,IAAI,EAAE;GAE9E,MAAM,KAAK,YADK,eAAe,EAAE,KAAK,IAAI,IAAI,EAAE,IACjB;GAC/B,MAAM,QAAQ,IAAI,UAAU,KAAK,GAAG,UAAU,IAAI,iBAAiB,IAAI,UAAU,IAAI,iBAAiB;AACtG,SAAM,KAAK,MAAM,EAAE,IAAI,MAAM,SAAS,QAAQ,EAAE,QAAQ,EAAE,IAAI,IAAI,KAAK,GAAG,QAAQ;;AAEpF,QAAM,KAAK,GAAG;;AAIhB,KAAI,cAAc;AAChB,QAAM,KAAK,gBAAgB,GAAG;AAC9B,QAAM,KAAK,mCAAmC;AAC9C,QAAM,KAAK,GAAG;;AAGhB,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,cAAc,SAAiC;CAC7D,MAAM,QAAQ,QAAQ,YAAY,IAAI,MAAM;AAC5C,QAAO,KAAK,SAAS,OAAO,iBAAiB,KAAK,KAAK;;AAkBzD,eAAe,eAAe,OAAe,MAAc,KAAa,aAA8C;CACpH,MAAM,QAAkB,EAAE;AAG1B,KAAI,aAAa;EACf,MAAM,YAAY,YAAY,QAAQ,UAAU,GAAG;EACnD,MAAM,YAAY,YAAY,QAAQ,MAAM,GAAG,CAAC,QAAQ,KAAK,IAAI;EACjE,MAAM,aAAa,CAAC,GAAG,IAAI,IAAI,CAAC,WAAW,UAAU,CAAC,CAAC;AACvD,OAAK,MAAM,QAAQ,WACjB,OAAM,KAAK,YAAY,KAAK,eAAe;;AAK/C,OAAM,KAAK,gBAAgB,gBAAgB,aAAa;AAExD,MAAK,MAAM,QAAQ,OAAO;EAExB,MAAM,UAAU,MAAM,OADV,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI,GAAG,QACvC;GAAE,cAAc;GAAQ,QAAQ,YAAY,QAAQ,IAAA;GAAS,CAAC,CAAC,YAAY,KAAK;AAClH,MAAI,QACF,QAAO;;AAEX,QAAO;;AAUT,eAAsB,kBACpB,OACA,MACA,kBACA,QACA,aACA,UACA,cACsB;CAEtB,MAAM,WAAW,eADA,MAAM,iBAAiB,OAAO,KAAK,EACV,aAAa,kBAAkB,SAAS;AAElF,KAAI,SAAS,SAAS,GAAG;EAIvB,MAAM,OAFc,SAAS,QAAO,MAAK,CAAC,cAAc,EAAE,CAAC,CAElC,KAAK,MAAM;AAIlC,UAAO;IACL,MAAM,YAJS,EAAE,IAAI,SAAS,IAAI,IAAI,EAAE,IAAI,WAAW,IAAI,GACzD,EAAE,MACF,IAAI,EAAE,MAEmB;IAC3B,SAAS,cAAc,GAAG,YAAA;IAC3B;IACD;EAIF,MAAM,YAAY,MAAM,eAAe,OAAO,MADlC,gBAAgB,UAAU,SAAS,GAAI,KACM,YAAY;AACrE,MAAI,aAAa,UAAU,SAAS,IAClC,MAAK,KAAK;GAAE,MAAM;GAAyB,SAAS;GAAW,CAAC;AAGlE,SAAO;;CAKT,MAAM,YAAY,MAAM,eAAe,OAAO,MADlC,gBAAgB,UAAU,QACmB,YAAY;AACrE,KAAI,CAAC,UACH,QAAO,EAAE;AAEX,QAAO,CAAC;EAAE,MAAM;EAAyB,SAAS;EAAW,CAAC;;ACnWhE,SAAS,kBAAkB,SAAkC;AAW3D,QAAO,GAVI;EACT;EACA,YAAY,QAAQ;EACpB,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAM,CAAC;EAC9C,SAAS,QAAQ;EACjB,QAAQ,QAAQ;EAChB;EACA;EACD,CAEY,KAAK,KAAK,CAAC,QAAQ,QAAQ,MAAM,MAAM,QAAQ;;AAM9D,eAAe,cAAc,OAAqD;AAChF,KAAI;EACF,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK;GAAE,cAAc;GAAQ,QAAQ,YAAY,QAAQ,IAAA;GAAS,CAAC,CAAC,YAAY,KAAK;AACrH,MAAI,CAAC,KACH,QAAO;EAGT,IAAI,QAAQ;EACZ,MAAM,aAAa,KAAK,MAAM,yBAAyB;AACvD,MAAI,WACF,SAAQ,WAAW,GAAI,MAAM;AAE/B,MAAI,CAAC,OAAO;GACV,MAAM,iBAAiB,KAAK,MAAM,0BAA0B;AAC5D,OAAI,eACF,SAAQ,eAAe,GAAI,MAAM;;EAGrC,MAAM,WAAW,eAAe,KAAK;AACrC,MAAI,CAAC,SACH,QAAO;AAET,SAAO;GACL,SAAS,MAAM;GACf,OAAO,SAAS,MAAM,SAAS,WAAW,MAAM;GAChD,MAAM,MAAM;GACZ;GACA,KAAK,MAAM;GACZ;SAEG;AACJ,SAAO;;;AASX,SAAS,qBAAqB,SAAwB,kBAAyC;CAC7F,MAAM,cAAc,YAAY,iBAAiB;AACjD,KAAI,CAAC,YACH,QAAO;AAET,QAAO,QAAQ,QAAQ,UAAU;EAC/B,MAAM,UAAU,YAAY,MAAM,QAAQ;AAC1C,MAAI,CAAC,QACH,QAAO;AAET,SAAO,cAAc,SAAS,YAAY,IAAI;GAC9C;;AAQJ,eAAsB,kBACpB,aACA,kBACsB;CACtB,MAAM,SAAS,cAAc,YAAY;AACzC,KAAI,CAAC,OACH,QAAO,EAAE;CAEX,MAAM,mBAAmB,qBAAqB,OAAO,UAAU,iBAAiB;AAChF,KAAI,iBAAiB,WAAW,EAC9B,QAAO,EAAE;CAEX,MAAM,WAA8B,EAAE;CAGtC,MAAM,YAAY;AAClB,MAAK,IAAI,IAAI,GAAG,IAAI,iBAAiB,QAAQ,KAAK,WAAW;EAC3D,MAAM,QAAQ,iBAAiB,MAAM,GAAG,IAAI,UAAU;EACtD,MAAM,UAAU,MAAM,QAAQ,IAAI,MAAM,KAAI,UAAS,cAAc,MAAM,CAAC,CAAC;AAC3E,OAAK,MAAM,UAAU,QACnB,KAAI,OACF,UAAS,KAAK,OAAO;;AAI3B,KAAI,SAAS,WAAW,EACtB,QAAO,EAAE;AAGX,UAAS,MAAM,GAAG,MAAM;EACtB,MAAM,OAAO,EAAE,QAAQ,MAAM,IAAI,CAAC,IAAI,OAAO;EAC7C,MAAM,OAAO,EAAE,QAAQ,MAAM,IAAI,CAAC,IAAI,OAAO;AAC7C,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,KAAK,QAAQ,KAAK,OAAO,EAAE,KAAK;GAC3D,MAAM,QAAQ,KAAK,MAAM,MAAM,KAAK,MAAM;AAC1C,OAAI,SAAS,EACX,QAAO;;AAEX,SAAO;GACP;AAGF,QAAO,SAAS,KAAI,OAAM;EACxB,MAAM,iBAAiB,EAAE,QAAQ;EACjC,SAAS,kBAAkB,EAAA;EAC5B,EAAE;;ACvIL,MAAM,wBAAwB,IAAI,IAAI;CACpC;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,uBAAuB,IAAI,IAAI;CACnC;CACA;CACA;CACD,CAAC;AAuBF,MAAM,mBAAmB;AAGzB,SAAS,aAAa,MAAuB;AAC3C,QAAO,iBAAiB,KAAK,KAAK,IAAI,UAAU,KAAK,KAAK;;AAO5D,SAAS,aAAa,MAAc,OAAuB;AACzD,KAAI,KAAK,UAAU,MACjB,QAAO;CAGT,MAAM,cAAc;CACpB,IAAI,cAAc;CAClB,IAAI;AAGJ,SAAQ,QAAQ,YAAY,KAAK,KAAK,MAAM,MAAM;EAChD,MAAM,aAAa,MAAM;EACzB,MAAM,WAAW,aAAa,MAAM,GAAG;AAEvC,MAAI,aAAa,SAAS,WAAW,OAAO;AAC1C,OAAI,YAAY,QAAQ,IACtB,eAAc;OAGd,eAAc;AAEhB;;;CAKJ,MAAM,QAAQ,KAAK,MAAM,GAAG,YAAY;CACxC,MAAM,gBAAgB,MAAM,YAAY,OAAO;AAC/C,KAAI,gBAAgB,cAAc,GAChC,QAAO,GAAG,MAAM,MAAM,GAAG,cAAc,CAAC;AAE1C,QAAO,GAAG,MAAM;;AAOlB,SAAS,aAAa,GAAwE;AAC5F,SAAQ,EAAE,eAAe,IAAI,MAAM,aAAa,EAAE,KAAK,GAAG,IAAI,MAAM,IAAI,EAAE;;AAQ5E,eAAsB,uBACpB,OACA,MACA,QAAQ,IACR,YACA,UAC6B;AAC7B,KAAI,CAAC,eAAe,CAClB,QAAO,EAAE;AAMX,KAAI,CAAC,YAAY,YAAY;EAC3B,MAAM,SAAS,IAAI,KAAK,WAAW;AACnC,SAAO,SAAS,OAAO,UAAU,GAAG,EAAE;AACtC,MAAI,yBAAS,IAAI,MAAM,CACrB,QAAO,EAAE;;AAGb,KAAI;EAMF,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;GAAC;GAAO;GAAW;GAAM,SAFtD,wGAFK,KAAK,IAAI,QAAQ,GAAG,GAAG,CAEuF;GAE3C;GAAM,SAAS;GAAS;GAAM,QAAQ;GAAO,EAAE;GACnI,UAAU;GACV,WAAW,KAAK,OAAO;GACxB,CAAC;AACF,MAAI,CAAC,OACH,QAAO,EAAE;EAGX,MAAM,QADO,KAAK,MAAM,OAAO,EACX,MAAM,YAAY,aAAa;AACnD,MAAI,CAAC,MAAM,QAAQ,MAAM,CACvB,QAAO,EAAE;EAEX,MAAM,SAAS,WAAW,IAAI,KAAK,SAAS,CAAC,SAAS,GAAG;AAyDzD,SAxDoB,MACjB,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,QAAQ,MAAW;GAClB,MAAM,OAAO,EAAE,UAAU,QAAQ,IAAI,aAAa;AAClD,UAAO,CAAC,qBAAqB,IAAI,IAAI;IACrC,CACD,QAAQ,MAAW,CAAC,UAAU,IAAI,KAAK,EAAE,UAAU,CAAC,SAAS,IAAI,OAAO,CACxE,KAAK,MAAW;GAEf,IAAI;AACJ,OAAI,EAAE,QAAQ,MAAM;IAClB,MAAM,eAAe;KAAC;KAAS;KAAU;KAAe,CAAC,SAAS,EAAE,OAAO,kBAAkB;IAC7F,MAAM,SAAS,EAAE,OAAO,QAAQ;AAEhC,aAAS,GADG,gBAAgB,SAAS,MAAM,OAAO,wBAAwB,KACxD,EAAE,OAAO;;GAI7B,MAAM,YAAiC,EAAE,UAAU,SAAS,EAAE,EAC3D,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,QAAQ,MAAW,CAAC,iBAAiB,MAAM,EAAE,QAAQ,IAAI,MAAM,CAAC,CAAC,CACjE,KAAK,MAAW;IACf,MAAM,eAAe;KAAC;KAAS;KAAU;KAAe,CAAC,SAAS,EAAE,kBAAkB;AACtF,WAAO;KACL,MAAM,EAAE,QAAQ;KAChB,QAAQ,EAAE,OAAO;KACjB,WAAW,EAAE,WAAW,cAAc;KACtC;KACD;KACD,CACD,MAAM,GAAsB,MAAyB,aAAa,EAAE,GAAG,aAAa,EAAE,CAAC,CACvF,MAAM,GAAG,EAAE;AAEd,UAAO;IACL,QAAQ,EAAE;IACV,OAAO,EAAE;IACT,MAAM,EAAE,QAAQ;IAChB,UAAU,EAAE,UAAU,QAAQ;IAC9B,WAAW,EAAE;IACb,KAAK,EAAE;IACP,aAAa,EAAE,eAAe;IAC9B,UAAU,EAAE,UAAU,cAAc;IACpC;IACA,aAAa;IACd;IACD,CAED,MAAM,GAAqB,MAAwB;GAClD,MAAM,QAAQ,sBAAsB,IAAI,EAAE,SAAS,aAAa,CAAC,GAAG,IAAI;GACxE,MAAM,QAAQ,sBAAsB,IAAI,EAAE,SAAS,aAAa,CAAC,GAAG,IAAI;AACxE,OAAI,UAAU,MACZ,QAAO,QAAQ;AACjB,UAAQ,EAAE,cAAc,EAAE,YAAa,EAAE,cAAc,EAAE;IACzD,CACD,MAAM,GAAG,MAAM;SAId;AACJ,SAAO,EAAE;;;AAOb,SAAgB,2BAA2B,GAA6B;CACtE,MAAM,KAAK,iBAAiB;EAC1B,QAAQ,EAAE;EACV,OAAO,EAAE;EACT,UAAU,EAAE;EACZ,SAAS,QAAQ,EAAE,UAAU;EAC7B,KAAK,EAAE;EACP,SAAS,EAAE;EACX,UAAU,EAAE;EACZ,UAAU,CAAC,CAAC,EAAE;EACf,CAAC;CAEF,MAAM,YAAY,EAAE,eAAe,IAAI,OAAO;CAC9C,MAAM,QAAQ;EAAC;EAAI;EAAI,KAAK,EAAE;EAAQ;AAEtC,KAAI,EAAE,KACJ,OAAM,KAAK,IAAI,aAAa,EAAE,MAAM,UAAU,CAAC;AAGjD,KAAI,EAAE,OACJ,OAAM,KAAK,IAAI,OAAO,IAAI,sBAAsB,IAAI,aAAa,EAAE,QAAQ,IAAK,CAAC;UAE1E,EAAE,YAAY,SAAS,GAAG;AAEjC,QAAM,KAAK,IAAI,OAAO,IAAI,kBAAkB;AAC5C,OAAK,MAAM,KAAK,EAAE,aAAa;GAC7B,MAAM,YAAY,EAAE,YAAY,IAAI,MAAM,EAAE,UAAU,KAAK;GAC3D,MAAM,aAAa,EAAE,eAAe,kBAAkB;AACtD,SAAM,KAAK,IAAI,MAAM,EAAE,OAAO,IAAI,aAAa,UAAU,IAAI,IAAI,aAAa,EAAE,MAAM,IAAI,CAAC;;;AAI/F,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,wBAAwB,aAAyC;CAC/E,MAAM,6BAAa,IAAI,KAAiC;AACxD,MAAK,MAAM,KAAK,YAEd,WAAU,YADE,EAAE,YAAY,uBACO,EAAE,CAAC,CAAC,KAAK,EAAE;CAG9C,MAAM,WAAW,YAAY,QAAO,MAAK,EAAE,OAAO,CAAC;CASnD,MAAM,WAAqB;EAPhB;GACT;GACA,UAAU,YAAY;GACtB,aAAa;GACb;GACD,CAE8B,KAAK,KAAK;EAAE;EAAI;EAAuB;EAAG;CAGzE,MAAM,OAAO,CAAC,GAAG,WAAW,MAAM,CAAC,CAAC,MAAM,GAAG,MAAM;AAGjD,UAFc,sBAAsB,IAAI,EAAE,aAAa,CAAC,GAAG,IAAI,MACjD,sBAAsB,IAAI,EAAE,aAAa,CAAC,GAAG,IAAI,MACvC,EAAE,cAAc,EAAE;GAC1C;AAEF,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,QAAQ,WAAW,IAAI,IAAI;AACjC,WAAS,KAAK,MAAM,IAAI,IAAI,MAAM,OAAO,IAAI,GAAG;AAChD,OAAK,MAAM,KAAK,OAAO;GACrB,MAAM,UAAU,EAAE,cAAc,IAAI,MAAM,EAAE,YAAY,KAAK;GAC7D,MAAM,WAAW,EAAE,SAAS,gBAAgB;GAC5C,MAAM,OAAO,QAAQ,EAAE,UAAU;AACjC,YAAS,KAAK,OAAO,EAAE,OAAO,iBAAiB,EAAE,OAAO,QAAQ,EAAE,QAAQ,UAAU,SAAS,IAAI,KAAK,GAAG;;AAE3G,WAAS,KAAK,GAAG;;AAGnB,QAAO,SAAS,KAAK,KAAK;;AChR5B,MAAM,YAAY;CAChB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAM,gBAAgB;CACpB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAM,gBAAgB,MAAM;AAK5B,eAAsB,kBAAkB,YAA0C;AAChF,KAAI,CAAC,WAAW,KAAK,YAAY,eAAe,CAAC,CAC/C,QAAO,EAAE;CAOX,MAAM,QAAQ,MAAM,OAAO,CAAC,sBAAsB,EAAE;EAClD,KAAK;EACL,QAPa,CACb,GAAG,UAAU,KAAI,MAAK,MAAM,EAAE,KAAK,EACnC,GAAG,cACJ;EAKC,UAAU;EACX,CAAC;CAEF,MAAM,UAAuB,EAAE;AAE/B,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,UAAU,KAAK,YAAY,KAAK;EACtC,IAAI;AACJ,MAAI;AACF,aAAU,aAAa,SAAS,QAAQ;UAEpC;AACJ;;AAGF,MAAI,QAAQ,SAAS,cACnB;AAEF,UAAQ,KAAK;GAAE,MAAM;GAAM;GAAS,MAAM;GAAS,CAAC;;AAGtD,QAAO;;ACtCT,SAAgB,mBAAmB,OAAsC;CACvE,MAAM,UAAU,MAAM,MAAM;AAG5B,KAAI,QAAQ,WAAW,IAAI,CACzB,QAAO;AAGT,KAAI,QAAQ,WAAW,KAAK,IAAI,QAAQ,WAAW,MAAM,IAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,WAAW,IAAI,CAI7G,QAAO;EAAE,MAAM;EAAS,WAHN,QAAQ,WAAW,IAAI,GACrC,QAAQ,QAAQ,IAAI,QAAQ,IAAI,QAAQ,MAAM,EAAE,CAAC,GACjD,QAAQ,QAAA;EACuB;AAIrC,KAAI,QAAQ,WAAW,OAAO,EAAE;EAE9B,MAAM,KAAK,eADQ,iBAAiB,QAAQ,CACP;AACrC,MAAI,GACF,QAAO;GAAE,MAAM;GAAU,OAAO,GAAG;GAAO,MAAM,GAAG;GAAM;AAC3D,SAAO;;AAIT,KAAI,QAAQ,WAAW,WAAW,IAAI,QAAQ,WAAW,UAAU,CACjE,QAAO,YAAY,QAAQ;AAI7B,KAAI,qBAAqB,KAAK,QAAQ,CACpC,QAAO;EAAE,MAAM;EAAU,OAAO,QAAQ,MAAM,IAAI,CAAC;EAAI,MAAM,QAAQ,MAAM,IAAI,CAAC;EAAI;AAItF,QAAO;;AAGT,SAAS,YAAY,KAAoC;AACvD,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAE3B,MAAI,OAAO,aAAa,gBAAgB,OAAO,aAAa,kBAAkB;GAC5E,MAAM,QAAQ,OAAO,SAAS,QAAQ,OAAO,GAAG,CAAC,QAAQ,UAAU,GAAG,CAAC,MAAM,IAAI;GACjF,MAAM,QAAQ,MAAM;GACpB,MAAM,OAAO,MAAM;AACnB,OAAI,CAAC,SAAS,CAAC,KACb,QAAO;AAGT,OAAI,MAAM,OAAO,UAAU,MAAM,UAAU,EAGzC,QAAO;IAAE,MAAM;IAAU;IAAO;IAAM,KAF1B,MAAM;IAEyB,WADzB,MAAM,SAAS,IAAI,MAAM,MAAM,EAAE,CAAC,KAAK,IAAI,GAAG,KAAA;IACV;AAGxD,UAAO;IAAE,MAAM;IAAU;IAAO;IAAM;;AAGxC,MAAI,OAAO,aAAa,cAAc;GACpC,MAAM,QAAQ,OAAO,SAAS,QAAQ,OAAO,GAAG,CAAC,QAAQ,UAAU,GAAG,CAAC,MAAM,IAAI;GACjF,MAAM,QAAQ,MAAM;GACpB,MAAM,OAAO,MAAM;AACnB,OAAI,CAAC,SAAS,CAAC,KACb,QAAO;AACT,UAAO;IAAE,MAAM;IAAU;IAAO;IAAM;;AAGxC,SAAO;SAEH;AACJ,SAAO;;;AAOX,SAAgB,0BAA0B,SAA0D;CAClG,MAAM,QAAQ,QAAQ,MAAM,wBAAwB;AACpD,KAAI,CAAC,MACH,QAAO,EAAE;CAEX,MAAM,SAAkD,EAAE;AAC1D,MAAK,MAAM,QAAQ,MAAM,GAAG,MAAM,KAAK,EAAE;EACvC,MAAM,KAAK,YAAY,KAAK;AAC5B,MAAI,CAAC,GACH;AACF,MAAI,GAAG,OAAO,OACZ,QAAO,OAAO,GAAG;AACnB,MAAI,GAAG,OAAO,cACZ,QAAO,cAAc,GAAG;;AAE5B,QAAO;;AAST,MAAM,kBAAkB;CAAC;CAAW;CAAc;CAAS;AAK3D,eAAsB,eACpB,QACA,YACwD;AACxD,KAAI,OAAO,SAAS,QAClB,QAAO,iBAAiB,OAAO;AACjC,KAAI,OAAO,SAAS,SAClB,QAAO,kBAAkB,QAAQ,WAAW;AAC9C,KAAI,OAAO,SAAS,SAClB,QAAO,kBAAkB,QAAQ,WAAW;AAC9C,QAAO,EAAE,QAAQ,EAAE,EAAE;;AAKvB,SAAS,iBAAiB,QAAmD;CAC3E,MAAM,OAAO,OAAO;AACpB,KAAI,CAAC,WAAW,KAAK,CACnB,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,SAAwB,EAAE;CAGhC,MAAM,YAAY,QAAQ,MAAM,SAAS;AACzC,KAAI,WAAW,UAAU,CACvB,MAAK,MAAM,SAAS,YAAY,WAAW,EAAE,eAAe,MAAM,CAAC,EAAE;AACnE,MAAI,CAAC,MAAM,aAAa,CACtB;EACF,MAAM,QAAQ,eAAe,QAAQ,WAAW,MAAM,KAAK,EAAE,UAAU,MAAM,OAAO;AACpF,MAAI,MACF,QAAO,KAAK,MAAM;;AAKxB,KAAI,OAAO,WAAW,GAAG;EACvB,MAAM,QAAQ,eAAe,MAAM,GAAG;AACtC,MAAI,MACF,QAAO,KAAK,MAAM;;AAGtB,QAAO,EAAE,QAAQ;;AAGnB,SAAS,eAAe,KAAa,UAAsC;CACzE,MAAM,cAAc,QAAQ,KAAK,WAAW;AAC5C,KAAI,CAAC,WAAW,YAAY,CAC1B,QAAO;CAET,MAAM,UAAU,aAAa,aAAa,QAAQ;CAClD,MAAM,cAAc,0BAA0B,QAAQ;CACtD,MAAM,UAAU,IAAI,MAAM,IAAI,CAAC,KAAK;CACpC,MAAM,OAAO,YAAY,QAAQ;CAEjC,MAAM,QAAkD,EAAE;AAC1D,MAAK,MAAM,UAAU,iBAAiB;EACpC,MAAM,aAAa,QAAQ,KAAK,OAAO;AACvC,MAAI,CAAC,WAAW,WAAW,CACzB;AACF,OAAK,MAAM,QAAQ,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,EAAE;AACnE,OAAI,CAAC,KAAK,QAAQ,CAChB;AACF,SAAM,KAAK;IACT,MAAM,GAAG,OAAO,GAAG,KAAK;IACxB,SAAS,aAAa,QAAQ,YAAY,KAAK,KAAK,EAAE,QAAA;IACvD,CAAC;;;AAIN,QAAO;EACL;EACA,aAAa,YAAY,eAAe;EACxC,MAAM;EACN;EACA;EACD;;AAKH,eAAe,kBACb,QACA,YACwD;CACxD,MAAM,EAAE,OAAO,SAAS;AACxB,KAAI,CAAC,SAAS,CAAC,KACb,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,MAAM,OAAO,OAAO;AAC1B,cAAa,oBAAoB,MAAM,GAAG,KAAK,GAAG,MAAM;CAExD,MAAM,OAAO,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,SAAS,MACjD,CAAC,YAAY,KAAK;AAEnB,KAAI,CAAC,MAAM,OAAO,QAAQ;AAExB,MAAI,QAAQ,QAAQ;GAClB,MAAM,WAAW,MAAM,OACrB,yBAAyB,MAAM,GAAG,KAAK,eACxC,CAAC,YAAY,KAAK;AACnB,OAAI,UAAU,OAAO,OACnB,QAAO,oBAAoB,OAAQ,MAAO,UAAU,UAAU,OAAO,WAAW,WAAW;;AAE/F,SAAO,EAAE,QAAQ,EAAE,EAAE;;AAGvB,QAAO,oBAAoB,OAAQ,MAAO,KAAK,MAAM,OAAO,WAAW,WAAW;;AAGpF,eAAe,oBACb,OACA,MACA,KACA,MACA,WACA,YACwD;CACxD,MAAM,WAAW,KAAK,MAAM,KAAI,MAAK,EAAE,KAAK;CAC5C,MAAM,YAAY,KAAK,MAAM;CAG7B,IAAI;AAEJ,KAAI,WAAW;EAEb,MAAM,aAAa,CACjB,GAAG,UAAU,YAEb,UAAU,SAAS,YAAY,GAAG,YAAY,KAC/C,CAAC,OAAO,QAAQ;AAEjB,iBAAe,SAAS,QAAO,MAAK,WAAW,SAAS,EAAE,CAAC;OAI3D,gBAAe,SAAS,QAAO,MAC7B,EAAE,MAAM,6BAA6B,IAAI,MAAM,WAChD;AAGH,KAAI,aAAa,WAAW,EAC1B,QAAO;EAAE,QAAQ,EAAE;EAAE;EAAW;CAElC,MAAM,QAAQ,OAAO,EAAE;CACvB,MAAM,SAAwB,EAAE;AAEhC,cAAa,SAAS,aAAa,OAAO,2BAA2B;AAErE,OAAM,QAAQ,IAAI,aAAa,KAAI,WAAU,MAAM,YAAY;EAC7D,MAAM,WAAW,WAAW,aAAa,KAAK,OAAO,QAAQ,gBAAgB,GAAG;EAChF,MAAM,UAAU,MAAM,eAAe,OAAO,MAAM,KAAK,OAAO;AAC9D,MAAI,CAAC,QACH;EAEF,MAAM,cAAc,0BAA0B,QAAQ;EACtD,MAAM,UAAU,WAAW,SAAS,MAAM,IAAI,CAAC,KAAK,GAAI;EACxD,MAAM,OAAO,YAAY,QAAQ;EAGjC,MAAM,kBAA4D,EAAE;EACpE,MAAM,SAAS,WAAW,GAAG,SAAS,KAAK;AAE3C,OAAK,MAAM,UAAU,iBAAiB;GACpC,MAAM,eAAe,GAAG,SAAS,OAAO;GACxC,MAAM,WAAW,SAAS,QAAO,MAAK,EAAE,WAAW,aAAa,CAAC;AACjE,QAAK,MAAM,YAAY,UAAU;IAC/B,MAAM,cAAc,MAAM,eAAe,OAAO,MAAM,KAAK,SAAS;AACpE,QAAI,aAAa;KACf,MAAM,eAAe,SAAS,MAAM,OAAO,OAAO;AAClD,qBAAgB,KAAK;MAAE,MAAM;MAAc,SAAS;MAAa,CAAC;;;;AAKxE,SAAO,KAAK;GACV;GACA,aAAa,YAAY,eAAe;GACxC,MAAM;GACN;GACA,OAAO;GACR,CAAC;GACF,CAAC,CAAC;AAEJ,QAAO;EAAE;EAAQ;EAAW;;AAG9B,eAAe,eAAe,OAAe,MAAc,KAAa,MAAsC;AAC5G,QAAO,OACL,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI,GAAG,QAC7D,EAAE,cAAc,QAAQ,CACzB,CAAC,YAAY,KAAK;;AAarB,eAAe,kBACb,QACA,YACwD;CACxD,MAAM,EAAE,OAAO,SAAS;AACxB,KAAI,CAAC,SAAS,CAAC,KACb,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,MAAM,OAAO,OAAO;CAC1B,MAAM,YAAY,mBAAmB,GAAG,MAAM,GAAG,OAAO;AAExD,cAAa,oBAAoB,MAAM,GAAG,KAAK,GAAG,MAAM;CAExD,MAAM,OAAO,MAAM,OACjB,sCAAsC,UAAU,uBAAuB,IAAI,8BAC5E,CAAC,YAAY,KAAK;AAEnB,KAAI,CAAC,MAAM,OACT,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,WAAW,KAAK,QAAO,MAAK,EAAE,SAAS,OAAO,CAAC,KAAI,MAAK,EAAE,KAAK;CAGrE,MAAM,eAAe,OAAO,YACxB,SAAS,QAAO,MAAK,MAAM,GAAG,OAAO,UAAU,WAAW,GAC1D,SAAS,QAAO,MAAK,EAAE,MAAM,6BAA6B,IAAI,MAAM,WAAW;AAEnF,KAAI,aAAa,WAAW,EAC1B,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,QAAQ,OAAO,EAAE;CACvB,MAAM,SAAwB,EAAE;AAEhC,cAAa,SAAS,aAAa,OAAO,2BAA2B;AAErE,OAAM,QAAQ,IAAI,aAAa,KAAI,WAAU,MAAM,YAAY;EAC7D,MAAM,WAAW,WAAW,aAAa,KAAK,OAAO,QAAQ,gBAAgB,GAAG;EAChF,MAAM,UAAU,MAAM,eAAe,OAAQ,MAAO,KAAK,OAAO;AAChE,MAAI,CAAC,QACH;EAEF,MAAM,cAAc,0BAA0B,QAAQ;EACtD,MAAM,UAAU,WAAW,SAAS,MAAM,IAAI,CAAC,KAAK,GAAI;EACxD,MAAM,OAAO,YAAY,QAAQ;EAGjC,MAAM,kBAA4D,EAAE;EACpE,MAAM,SAAS,WAAW,GAAG,SAAS,KAAK;AAE3C,OAAK,MAAM,UAAU,iBAAiB;GACpC,MAAM,eAAe,GAAG,SAAS,OAAO;GACxC,MAAM,WAAW,SAAS,QAAO,MAAK,EAAE,WAAW,aAAa,CAAC;AACjE,QAAK,MAAM,YAAY,UAAU;IAC/B,MAAM,cAAc,MAAM,eAAe,OAAQ,MAAO,KAAK,SAAS;AACtE,QAAI,aAAa;KACf,MAAM,eAAe,SAAS,MAAM,OAAO,OAAO;AAClD,qBAAgB,KAAK;MAAE,MAAM;MAAc,SAAS;MAAa,CAAC;;;;AAKxE,SAAO,KAAK;GACV;GACA,aAAa,YAAY,eAAe;GACxC,MAAM;GACN;GACA,OAAO;GACR,CAAC;GACF,CAAC,CAAC;AAEJ,QAAO,EAAE,QAAQ;;AAGnB,eAAe,eAAe,OAAe,MAAc,KAAa,MAAsC;AAC5G,QAAO,OACL,sBAAsB,MAAM,GAAG,KAAK,SAAS,IAAI,GAAG,QACpD,EAAE,cAAc,QAAQ,CACzB,CAAC,YAAY,KAAK;;ACnarB,eAAsB,aAAa,SAAyC;CAE1E,MAAM,UAAU,GADD,IAAI,IAAI,QAAQ,CAAC,OACN;AAC1B,KAAI,MAAM,UAAU,QAAQ,CAC1B,QAAO;AACT,QAAO;;AAMT,eAAsB,aAAa,KAA0C;CAC3E,MAAM,UAAU,MAAM,UAAU,IAAI;AACpC,KAAI,CAAC,WAAW,QAAQ,SAAS,GAC/B,QAAO;AAET,QAAO;EACL,KAAK;EACL,OAAO,mBAAmB,QAAA;EAC3B;;AAMH,SAAgB,mBAAmB,SAA6B;CAC9D,MAAM,QAAoB,EAAE;CAC5B,MAAM,uBAAO,IAAI,KAAa;CAC9B,MAAM,YAAY;AAClB,MAAK,IAAI,QAAQ,UAAU,KAAK,QAAQ,EAAE,UAAU,MAAM,QAAQ,UAAU,KAAK,QAAQ,EAAE;EACzF,MAAM,MAAM,MAAM;AAClB,MAAI,CAAC,KAAK,IAAI,IAAI,EAAE;AAClB,QAAK,IAAI,IAAI;AACb,SAAM,KAAK;IAAE,OAAO,MAAM;IAAK;IAAK,CAAC;;;AAIzC,QAAO;;AAOT,SAAS,UAAU,KAAsB;AACvC,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,MAAI,OAAO,aAAa,SACtB,QAAO;EACT,MAAM,OAAO,OAAO;AAEpB,MAAI,SAAS,eAAe,SAAS,eAAe,SAAS,MAC3D,QAAO;AACT,MAAI,SAAS,kBACX,QAAO;AACT,MAAI,mDAAmD,KAAK,KAAK,CAC/D,QAAO;AACT,MAAI,KAAK,WAAW,IAAI,CACtB,QAAO;AACT,SAAO;SAEH;AAAE,SAAO;;;AAGjB,eAAsB,iBACpB,aACA,SACA,YACuB;CACvB,MAAM,QAAQ,OAAO,EAAE;CACvB,IAAI,YAAY;AAoBhB,SAlBgB,MAAM,QAAQ,IAC5B,YAAY,MAAM,KAAI,SAAQ,MAAM,YAAY;EAC9C,MAAM,MAAM,KAAK,IAAI,WAAW,OAAO,GACnC,KAAK,MACL,GAAG,QAAQ,QAAQ,OAAO,GAAG,GAAG,KAAK,IAAI,WAAW,IAAI,GAAG,KAAK,MAAM,KAAK;AAE/E,MAAI,CAAC,UAAU,IAAI,CACjB,QAAO;AAET,eAAa,KAAK,KAAK,aAAa,YAAY,MAAM,OAAO;EAE7D,MAAM,UAAU,MAAM,UAAU,IAAI;AACpC,MAAI,WAAW,QAAQ,SAAS,IAC9B,QAAO;GAAE,KAAK,KAAK;GAAK,OAAO,KAAK;GAAO;GAAS;AACtD,SAAO;GACP,CAAC,CACJ,EAEc,QAAQ,MAAuB,MAAM,KAAK;;AAO3D,SAAgB,mBAAmB,SAAiB,SAA0B;CAC5E,IAAI,aAAa;AAGjB,KAAI,SAAS;EAEX,MAAM,UADO,QAAQ,QAAQ,OAAO,GAAG,CAClB,QAAQ,uBAAuB,OAAO;AAC3D,eAAa,WAAW,QACtB,IAAI,OAAO,SAAS,QAAQ,mBAAmB,IAAI,EACnD,cACD;;AAIH,cAAa,WAAW,QAAQ,wBAAwB,eAAe;AAEvE,QAAO;;AAOT,SAAgB,gBAAgB,SAAiB,UAAmC;CAClF,MAAM,WAAqB,EAAE;CAC7B,MAAM,QAAQ,QAAQ,MAAM,UAAU;AAEtC,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,WAAW,KAAK,MAAM,kBAAkB;AAC9C,MAAI,CAAC,SACH;EAEF,MAAM,MAAM,SAAS;AACrB,MAAI,SAAS,MAAK,MAAK,IAAI,SAAS,EAAE,CAAC,EAAE;GACvC,MAAM,eAAe,KAAK,QAAQ,MAAM,KAAK,QAAQ,OAAO,CAAC;AAC7D,OAAI,eAAe,GACjB,UAAS,KAAK,KAAK,MAAM,eAAe,EAAE,CAAC;;;AAKjD,KAAI,SAAS,WAAW,EACtB,QAAO;AACT,QAAO,SAAS,KAAK,cAAc;;ACxIrC,MAAa,eAAe;AAG5B,MAAa,oBAAoB,MAAc,IAAI,KAAK,IAAI;AAyB5D,eAAe,eAAe,OAAe,MAAc,KAAgC;AAIzF,SAHa,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,SAAS,MACjD,CAAC,YAAY,KAAK,GACN,OAAO,KAAI,MAAK,EAAE,KAAK,IAAI,EAAE;;AAc5C,eAAe,WAAW,OAAe,MAAc,SAAiB,aAAsB,YAAgD;CAC5I,MAAM,aAAa,CAAC,IAAI,WAAW,QAAQ;AAC3C,KAAI,YACF,YAAW,KAAK,GAAG,YAAY,GAAG,UAAU;AAE9C,MAAK,MAAM,OAAO,YAAY;EAC5B,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,IAAI;AACpD,MAAI,MAAM,SAAS,EACjB,QAAO;GAAE,KAAK;GAAK;GAAO;;AAI9B,KAAI,aAAa;EACf,MAAM,YAAY,MAAM,qBAAqB,OAAO,MAAM,YAAY;AACtE,MAAI,WAAW;GACb,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,UAAU;AAC1D,OAAI,MAAM,SAAS,EACjB,QAAO;IAAE,KAAK;IAAW;IAAO;;;CAKtC,MAAM,WAAW,aACb,CAAC,YAAY,GAAG,CAAC,QAAQ,SAAS,CAAC,QAAO,MAAK,MAAM,WAAW,CAAC,GACjE,CAAC,QAAQ,SAAS;AACtB,MAAK,MAAM,UAAU,UAAU;EAC7B,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,OAAO;AACvD,MAAI,MAAM,SAAS,EACjB,QAAO;GAAE,KAAK;GAAQ;GAAO,UAAU;GAAM;;AAGjD,QAAO;;AAOT,eAAe,qBAAqB,OAAe,MAAc,aAA6C;CAC5G,MAAM,OAAO,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,WACxC,CAAC,YAAY,KAAK;CACnB,MAAM,SAAS,GAAG,YAAY;AAC9B,QAAO,MAAM,UAAU,MAAK,MAAK,EAAE,IAAI,WAAW,OAAO,CAAC,EAAE,OAAO;;AAMrE,SAAS,eAAe,OAAiB,YAA8B;AACrE,QAAO,MAAM,QAAO,MAAK,EAAE,WAAW,WAAW,IAAI,gBAAgB,KAAK,EAAE,CAAC;;AAI/E,MAAM,iBAAiB;CACrB;CACA;CACA;CACA;CACD;AAGD,MAAM,eAAe,IAAI,IAAI;CAC3B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAM,gBAAgB,IAAI,IAAI;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAWF,SAAS,eAAe,MAAuB;AAE7C,QADc,KAAK,MAAM,IAAI,CAChB,MAAK,MAAK,aAAa,IAAI,EAAE,aAAa,CAAC,CAAC;;AAM3D,SAAS,aAAa,MAAsB;AAC1C,QAAO,KAAK,MAAM,IAAI,CAAC,OAAO,QAAQ,CAAC;;AAMzC,SAAS,eAAe,MAAuB;AAE7C,QADc,KAAK,MAAM,IAAI,CAChB,MAAK,MAAK,cAAc,IAAI,EAAE,aAAa,CAAC,CAAC;;AAO5D,SAAS,YAAY,KAAa,WAA2B;CAC3D,MAAM,QAAQ,aAAa,IAAI,IAAI;AAEnC,QAAQ,aADU,eAAe,IAAI,GAAG,MAAM,KACb;;AAQnC,SAAS,iBAAiB,UAA2C;CACnE,MAAM,UAAU,SACb,QAAO,MAAK,gBAAgB,KAAK,EAAE,CAAC,CACpC,QAAO,MAAK,CAAC,eAAe,MAAK,MAAK,EAAE,KAAK,EAAE,CAAC,CAAC,CACjD,QAAO,MAAK,EAAE,SAAS,IAAI,CAAC;CAG/B,MAAM,6BAAa,IAAI,KAAuB;AAE9C,MAAK,MAAM,QAAQ,SAAS;EAC1B,MAAM,UAAU,KAAK,YAAY,SAAS;AAC1C,MAAI,YAAY,GACd;AAGF,YAAU,YADK,KAAK,MAAM,GAAG,UAAU,EAAgB,QACnB,EAAE,CAAC,CAAC,KAAK,KAAK;;AAGpD,KAAI,WAAW,OAAO,GAAG;EACvB,MAAM,UAAU,CAAC,GAAG,WAAW,SAAS,CAAC,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS,EAAE,GAAG,OAAO,CAAC;AACpF,MAAI,QAAQ,GAAG,UAAU,GAAG;GAC1B,MAAM,aAAa,QAAQ;GAC3B,MAAM,UAAU,WAAW,YAAY,QAAQ;GAC/C,MAAM,cAAc,UAAU,IAAI,WAAW,MAAM,GAAG,QAAQ,GAAG;AACjE,UAAO;IAAE,OAAO,QAAQ;IAAI,QAAQ;IAAa;;;CAKrD,MAAM,4BAAY,IAAI,KAAuB;AAE7C,MAAK,MAAM,QAAQ,SAAS;AAC1B,MAAI,eAAe,KAAK,CACtB;EAGF,MAAM,YAAY,KAAK,YAAY,IAAI;AACvC,MAAI,cAAc,GAChB;AAGF,YAAU,WADE,KAAK,MAAM,GAAG,YAAY,EAAE,QACR,EAAE,CAAC,CAAC,KAAK,KAAK;;AAGhD,KAAI,UAAU,SAAS,EACrB,QAAO;CAGT,MAAM,SAAS,CAAC,GAAG,UAAU,SAAS,CAAC,CACpC,KAAK,CAAC,KAAK,YAAY;EAAE;EAAK;EAAO,OAAO,YAAY,KAAK,MAAM,OAAA;EAAS,EAAE,CAC9E,QAAO,MAAK,EAAE,MAAM,UAAU,EAAE,CAChC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAEpC,KAAI,OAAO,WAAW,EACpB,QAAO;CAET,MAAM,OAAO,OAAO;AAKpB,QAAO;EAAE,OAAO,KAAK;EAAO,QAAQ,KAAK;EAAK;;AAMhD,eAAe,cAAc,OAAe,MAAc,KAAa,aAAa,SAA4B;AAE9G,QAAO,eADO,MAAM,eAAe,OAAO,MAAM,IAAI,EACvB,WAAW;;AAO1C,eAAsB,aAAa,OAAe,MAAc,SAAiB,aAAsB,SAAiD;CACtJ,MAAM,WAAW,cAAc,eAAe,YAAY,GAAG,KAAA;AAC7D,KAAI,UAAU;EACZ,MAAM,MAAM,SAAS,OAAO;EAC5B,MAAM,WAAW,CAAC,SAAS;EAC3B,MAAM,QAAQ,MAAM,cAAc,SAAS,OAAO,SAAS,MAAM,KAAK,GAAG,SAAS,KAAK,GAAG;AAC1F,MAAI,MAAM,WAAW,EACnB,QAAO;AACT,SAAO;GACL,SAAS,qCAAqC,SAAS,MAAM,GAAG,SAAS,KAAK,GAAG;GACjF;GACA;GACA;GAEA,YAAY,GAAG,SAAS,KAAK,OAAO,UAAU,GAAG,SAAS,KAAK,KAAK,KAAA;GACrE;;CAIH,MAAM,MAAM,MAAM,WAAW,OAAO,MAAM,SAAS,aADhC,UAAU,kBAAkB,QAAQ,GAAG,KAAA,EACiB;AAC3E,KAAI,CAAC,IACH,QAAO;CAET,IAAI,OAAO,eAAe,IAAI,OAAO,QAAQ;CAC7C,IAAI;CACJ,IAAI;AAGJ,KAAI,KAAK,WAAW,GAAG;EACrB,MAAM,aAAa,iBAAiB,IAAI,MAAM;AAC9C,MAAI,YAAY;AACd,UAAO,WAAW;AAClB,gBAAa,WAAW,UAAU,KAAA;AAClC,cAAW,IAAI;;;AAInB,KAAI,KAAK,WAAW,EAClB,QAAO;AAET,QAAO;EACL,SAAS,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI;EACnE,KAAK,IAAI;EACT,OAAO;EACP;EACA;EACA,UAAU,IAAI;EACf;;AAMH,SAAS,cAAc,GAAmB;AACxC,QAAO,EAAE,QAAQ,OAAO,GAAG,CAAC,QAAQ,iBAAiB,GAAG;;AAS1D,SAAgB,wBACd,WACA,WAC0C;AAC1C,KAAI,UAAU,WAAW,EACvB,QAAO;EAAE,SAAS;EAAM,YAAY;EAAG;CAGzC,MAAM,SAAS,UAAU,MAAM,GAAG,GAAG;CAGrC,MAAM,kBAAkB,OAAO,KAAK,SAAS;EAC3C,IAAI,OAAO,KAAK;AAEhB,MAAI,KAAK,WAAW,OAAO,CACzB,KAAI;AACF,UAAO,IAAI,IAAI,KAAK,CAAC;UAEjB;AAER,SAAO,cAAc,KAAK;GAC1B;CAGF,MAAM,iBAAiB,IAAI,IAAI,UAAU,IAAI,cAAc,CAAC;CAE5D,IAAI,UAAU;AACd,MAAK,MAAM,YAAY,gBAErB,MAAK,MAAM,YAAY,eACrB,KAAI,aAAa,YAAY,SAAS,SAAS,IAAI,WAAW,EAAE;AAC9D;AACA;;CAKN,MAAM,aAAa,UAAU,OAAO;AACpC,QAAO;EAAE,SAAS,cAAc;EAAK;EAAY;;AAOnD,eAAe,cAAc,OAAe,MAAc,aAAuC;CAC/F,MAAM,OAAO,qCAAqC,MAAM,GAAG,KAAK;CAEhE,MAAM,QAAQ;EACZ;EACA,YAHgB,YAAY,QAAQ,UAAU,GAAG,CAG3B;EACtB,YAAY,YAAY,QAAQ,MAAM,GAAG,CAAC,QAAQ,KAAK,IAAI,CAAC;EAC7D;AACD,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,OAAO,MAAM,UAAU,GAAG,KAAK,GAAG,OAAO;AAC/C,MAAI,CAAC,KACH;AACF,MAAI;AAEF,OADY,KAAK,MAAM,KAAK,CACpB,SAAS,YACf,QAAO;UAEL;;AAER,QAAO;;AAGT,eAAsB,iBAAiB,aAA6C;CAElF,MAAM,YAAY,YAAY,QAAQ,UAAU,GAAG;AACnD,MAAK,MAAM,aAAa,CAAC,YAAY,QAAQ,MAAM,GAAG,CAAC,QAAQ,KAAK,IAAI,EAAE,UAAU,EAAE;AAEpF,MAAI,CAAC,UAAU,SAAS,IAAI,EAAE;AAG5B,QADgB,MAAM,OAAO,IAAI,yBAAyB,UAAU,GAAG,YAAY,CAAC,YAAY,KAAK,GACxF,GACX,QAAO,sBAAsB,UAAU,GAAG;AAC5C;;AAGF,OADgB,MAAM,OAAO,IAAI,yBAAyB,YAAY,CAAC,YAAY,KAAK,GAC3E,GACX,QAAO,sBAAsB;;CAIjC,MAAM,aAAa,YAAY,QAAQ,MAAM,GAAG;AAChD,KAAI,eAAe,CACjB,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GAAC;GAAU;GAAS;GAAY;GAAU;GAAY;GAAW;GAAI,EAAE;GAC9G,UAAU;GACV,SAAS;GACV,CAAC;AACF,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,YAAY;EAC9B,MAAM,QAAQ,KAAK,MAAM,KAAK;EAE9B,MAAM,QAAQ,MAAM,MAAK,MACvB,EAAE,SAAS,aAAa,CAAC,SAAS,IAAI,YAAY,aAAa,GAAG,IAC/D,EAAE,SAAS,aAAa,CAAC,SAAS,IAAI,UAAU,aAAa,GAAG,CACpE;AACD,MAAI,MACF,QAAO,sBAAsB,MAAM;AAErC,OAAK,MAAM,aAAa,OAAO;GAC7B,MAAM,KAAK,eAAe,sBAAsB,UAAU,WAAW;AACrE,OAAI,MAAM,MAAM,cAAc,GAAG,OAAO,GAAG,MAAM,YAAY,CAC3D,QAAO,sBAAsB,UAAU;;SAGvC;CAOR,MAAM,OAAO,MAAM,OACjB,gDAFY,mBAAmB,GAAG,WAAW,UAAU,CAED,aACvD,CAAC,YAAY,KAAK;AACnB,KAAI,CAAC,MAAM,OAAO,OAChB,QAAO;CAGT,MAAM,QAAQ,KAAK,MAAM,MAAK,MAC5B,EAAE,UAAU,aAAa,CAAC,SAAS,IAAI,YAAY,aAAa,GAAG,IAChE,EAAE,UAAU,aAAa,CAAC,SAAS,IAAI,UAAU,aAAa,GAAG,CACrE;AACD,KAAI,MACF,QAAO,sBAAsB,MAAM;AAGrC,MAAK,MAAM,aAAa,KAAK,OAAO;EAClC,MAAM,KAAK,eAAe,sBAAsB,UAAU,YAAY;AACtE,MAAI,MAAM,MAAM,cAAc,GAAG,OAAO,GAAG,MAAM,YAAY,CAC3D,QAAO,sBAAsB,UAAU;;AAG3C,QAAO;;AAOT,eAAsB,oBAAoB,OAAe,MAAc,aAA6D;CAClI,MAAM,WAAW,cAAc,eAAe,YAAY,GAAG,KAAA;AAC7D,KAAI,UAAU,SACZ,QAAO,EAAE,UAAU,SAAS,UAAU;AAGxC,KAAI,eAAe,CACjB,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GAAC;GAAO,SAAS,MAAM,GAAG;GAAQ;GAAM;GAAa,EAAE;GAC9F,UAAU;GACV,SAAS;GACV,CAAC;AACF,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,YAAY;EAC9B,MAAM,OAAO,KAAK,MAAM,KAAK;AAC7B,SAAO,MAAM,WAAW,EAAE,UAAU,KAAK,UAAU,GAAG;SAElD;CAKR,MAAM,OAAO,MAAM,OACjB,gCAAgC,MAAM,GAAG,OAC1C,CAAC,YAAY,KAAK;AACnB,QAAO,MAAM,WAAW,EAAE,UAAU,KAAK,UAAU,GAAG;;AAMxD,eAAsB,YAAY,OAAe,MAAc,QAAiB,KAAsC;CAIpH,MAAM,UAAU,SACZ,yBAAyB,MAAM,GAAG,KAAK,SAJ5B,OAAO,OAIqC,GAAG,OAAO,cACjE,yBAAyB,MAAM,GAAG,KAAK,SAAS,MAAM,QAAQ,QAAQ;AAI1E,MAFgB,MAAM,OAAO,IAAI,QAAQ,CAAC,YAAY,KAAK,GAE9C,GACX,QAAO,UAAU,MAAM,GAAG,OAAO,SAAS,IAAI,WAAW,KAAK,MAAM,IAAI,QAAQ;CAKlF,MAAM,WAAW,SAAS,GAAG,OAAO,KAAK;CACzC,MAAM,WAAW,MAAM,CAAC,IAAI,GAAG,CAAC,QAAQ,SAAS;AACjD,MAAK,MAAM,KAAK,SACd,MAAK,MAAM,YAAY;EAAC;EAAa;EAAa;EAAY,EAAE;EAC9D,MAAM,YAAY,qCAAqC,MAAM,GAAG,KAAK,GAAG,EAAE,GAAG,WAAW;AAExF,OADY,MAAM,OAAO,IAAI,UAAU,CAAC,YAAY,KAAK,GAChD,GACP,QAAO;;AAIb,QAAO;;AAkFT,eAAsB,mBAAmB,KAAqC;AAE5E,KAAI,IAAI,WAAW,UAAU,EAAE;EAC7B,MAAM,WAAW,cAAc,IAAI;AACnC,MAAI,CAACE,WAAa,SAAS,CACzB,QAAO;AACT,SAAOC,aAAe,UAAU,QAAQ;;AAG1C,KAAI,IAAI,WAAW,UAAU,EAAE;EAC7B,IAAI,OAAO,IAAI,QAAQ,WAAW,GAAG;EACrC,IAAI,MAAM;EAGV,MAAM,QAAQ,KAAK,YAAY,IAAI;AACnC,MAAI,UAAU,IAAI;AAChB,SAAM,KAAK,MAAM,QAAQ,EAAE;AAC3B,UAAO,KAAK,MAAM,GAAG,MAAM;;EAG7B,MAAM,QAAQ,KAAK,MAAM,IAAI;EAC7B,MAAM,QAAQ,MAAM;EACpB,MAAM,OAAO,MAAM;EACnB,MAAM,SAAS,MAAM,MAAM,EAAE,CAAC,KAAK,IAAI;EAMvC,MAAM,OAAO,MAAM,OAJH,SACZ,yBAAyB,MAAM,GAAG,KAAK,SAAS,IAAI,GAAG,OAAO,cAC9D,yBAAyB,MAAM,GAAG,KAAK,cAAc,OAEtB,EAAE,cAAc,QAAQ,CAAC,CAAC,YAAY,KAAK;AAC9E,MAAI,CAAC,KACH,QAAO;AAET,MAAI;GACF,MAAM,OAAO,KAAK,MAAM,KAAK;AAC7B,UAAO,KAAK,YAAY,KAAK,MAAM,YAAY;UAE3C;AACJ,UAAO;;;AAIX,QAAO,UAAU,IAAI;;AAOvB,eAAsB,kBACpB,OACA,MACA,YACiC;AACjC,cAAa,yBAAyB;CAGtC,MAAM,UAAU,sBAAsB,MAAM,GAAG;CAC/C,IAAI;CACJ,IAAI;AAEJ,KAAI,eAAe,CACjB,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GAAC;GAAO,SAAS,MAAM,GAAG;GAAQ;GAAQ;GAAmD,EAAE;GACtI,UAAU;GACV,SAAS;GACV,CAAC;AACF,MAAI,MAAM;GACR,MAAM,OAAO,KAAK,MAAM,KAAK;AAC7B,cAAW,KAAK,YAAY,KAAA;AAC5B,iBAAc,KAAK,eAAe,KAAA;;SAGhC;AAGR,KAAI,CAAC,YAAY,CAAC,aAAa;EAC7B,MAAM,OAAO,MAAM,OACjB,gCAAgC,MAAM,GAAG,OAC1C,CAAC,YAAY,KAAK;AACnB,aAAW,MAAM,YAAY,KAAA;AAC7B,gBAAc,MAAM,eAAe,KAAA;;AAIrC,cAAa,0BAA0B;CACvC,MAAM,eAAe,MAAM,OACzB,yBAAyB,MAAM,GAAG,KAAK,WACxC,CAAC,YAAY,KAAK;CAEnB,IAAI,UAAU;CACd,IAAI;CACJ,MAAM,gBAAgB,cAAc,WAAW;AAC/C,KAAI,eAAe;AAEjB,YAAU,cAAc,IAAI,QAAQ,MAAM,GAAG;AAC7C,eAAa,cAAc;;AAI7B,cAAa,iBAAiB;CAC9B,MAAM,UAAU,MAAM,aAAa,OAAO,MAAM,QAAQ;CACxD,MAAM,aAAa,UAAU,GAAG,QAAQ,QAAQ,QAAQ,IAAI,SAAS,KAAA;CACrE,MAAM,SAAS,SAAS;AAGxB,cAAa,kBAAkB;CAC/B,MAAM,YAAY,MAAM,YAAY,OAAO,KAAK;CAGhD,IAAI;AACJ,KAAI,UAAU;AACZ,eAAa,oBAAoB;AACjC,YAAU,MAAM,aAAa,SAAS,CAAC,YAAY,KAAK,IAAI,KAAA;;AAI9D,KAAI,CAAC,cAAc,CAAC,aAAa,CAAC,QAChC,QAAO;AAET,QAAO;EACL,MAAM;EACN,SAAS,gBAAgB,UAAU,KAAA;EACnC;EACA;EACA;EACA,SAAS;EACT;EACA;EACA,iBAAiB,SAAS;EAC1B,WAAW,aAAa,KAAA;EACxB;EACD;;AC/tBH,eAAsB,kBAAkB,OAAe,OAAO,GAA4E;CACxI,MAAM,OAAO,MAAM,OAEhB,+CAA+C,mBAAmB,MAAM,CAAC,QAAQ,OAAO,CAAC,YAAY,KAAK;AAE7G,KAAI,CAAC,MAAM,SAAS,OAClB,QAAO,EAAE;AAEX,QAAO,KAAK,QAAQ,KAAI,OAAM;EAC5B,MAAM,EAAE,QAAQ;EAChB,aAAa,EAAE,QAAQ;EACvB,SAAS,EAAE,QAAQ;EACpB,EAAE;;AAML,eAAsB,gBAAgB,aAAqD;CAEzF,MAAM,OAAO,MAAM,OAAuB,qBAAqB,YAAY,eAAe,CAAC,YAAY,KAAK;AAC5G,KAAI,KACF,QAAO;AAGT,QAAO,OAAuB,8BAA8B,YAAY,SAAS,CAAC,YAAY,KAAK;;AAgBrG,eAAsB,qBAAqB,aAAqB,SAA2C;CACzG,MAAM,OAAO,MAAM,OAGhB,8BAA8B,cAAc,CAAC,YAAY,KAAK;AAEjE,KAAI,CAAC,KACH,QAAO,EAAE;CAGX,MAAM,WAAoD,KAAK,eAC3D,OAAO,YACL,OAAO,QAAQ,KAAK,aAAa,CAAC,KAAK,CAAC,KAAK,SAAS,CACpD,KACA;EAAE,SAAS;EAAK,YAAY,KAAK,OAAO;EAAM,CAC/C,CAAC,CACH,GACD,KAAA;AAEJ,QAAO;EACL,YAAY,KAAK,OAAO,YAAY,KAAA;EACpC;EACD;;AAkBH,eAAe,cACb,IACA,eACA,KACA,QACA,UACA,YACA,MAC+B;CAC/B,IAAI;AAGJ,KAAI,eAAe;AACjB,eAAa,cAAc;EAC3B,MAAM,UAAU,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,eAAe,IAAI,MAAM,MAAM,WAAW;AAChG,MAAI,SAAS;AACX,UAAO,aAAa,QAAQ;AAC5B,UAAO,SAAS,QAAQ;AACxB,UAAO,kBAAkB,QAAQ;AACjC,cAAW,QAAQ;AACnB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK,QAAQ;IACb,QAAQ;IACR,SAAS,QAAQ,WACb,SAAS,QAAQ,MAAM,OAAO,WAAW,QAAQ,IAAI,gBAAgB,cAAc,KACnF,SAAS,QAAQ,MAAM,OAAO,WAAW,QAAQ;IACtD,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,GAAG,OAAO,QAAQ,SAAS,cAAc;GAC9C,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,CAAC,OAAO,SAAS;AACnB,eAAa,cAAc;EAC3B,MAAM,WAAW,MAAM,oBAAoB,GAAG,OAAO,GAAG,MAAM,IAAI,KAAK;AACvE,MAAI,UAAU,YAAY,CAAC,iBAAiB,SAAS,SAAS,EAAE;AAC9D,UAAO,UAAU,SAAS;AAC1B,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK,OAAO;IACZ,QAAQ;IACR,SAAS,mBAAmB,SAAS;IACtC,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,OAAO;GACZ,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,cAAa,SAAS;CACtB,MAAM,YAAY,MAAM,YAAY,GAAG,OAAO,GAAG,MAAM,MAAM,QAAQ,OAAO,OAAO;AACnF,KAAI,WAAW;AACb,SAAO,YAAY;AACnB,WAAS,KAAK;GACZ,QAAQ;GACR,KAAK;GACL,QAAQ;GACT,CAAC;OAGF,UAAS,KAAK;EACZ,QAAQ;EACR,KAAK,GAAG,OAAO,QAAQ;EACvB,QAAQ;EACR,SAAS;EACV,CAAC;AAGJ,QAAO;;AAMT,eAAsB,mBAAmB,aAAqB,UAA0B,EAAE,EAAmC;AAE3H,SADe,MAAM,+BAA+B,aAAa,QAAQ,EAC3D;;AAMhB,eAAsB,+BAA+B,aAAqB,UAA0B,EAAE,EAA0B;CAC9H,MAAM,WAA6B,EAAE;CACrC,MAAM,EAAE,eAAe;AAEvB,cAAa,MAAM;CACnB,MAAM,MAAM,MAAM,gBAAgB,YAAY;AAC9C,KAAI,CAAC,KAAK;AACR,WAAS,KAAK;GACZ,QAAQ;GACR,KAAK,8BAA8B,YAAY;GAC/C,QAAQ;GACR,SAAS;GACV,CAAC;AACF,SAAO;GAAE,SAAS;GAAM;GAAU;;AAGpC,UAAS,KAAK;EACZ,QAAQ;EACR,KAAK,8BAA8B,YAAY;EAC/C,QAAQ;EACR,SAAS,SAAS,IAAI,KAAK,GAAG,IAAI;EACnC,CAAC;CAGF,MAAM,eAAe,IAAI,UACrB,MAAM,qBAAqB,aAAa,IAAI,QAAQ,GACpD,EAAE;CAEN,MAAM,SAA0B;EAC9B,MAAM,IAAI;EACV,SAAS,IAAI;EACb,YAAY,aAAa;EACzB,aAAa,IAAI;EACjB,cAAc,IAAI;EAClB,UAAU,aAAa;EACxB;CAGD,IAAI;CAGJ,IAAI;CACJ,IAAI;AACJ,KAAI,OAAO,IAAI,eAAe,YAAY,IAAI,YAAY,KAAK;AAC7D,eAAa,IAAI,WAAW;EAC5B,MAAM,aAAa,iBAAiB,WAAW;AAE/C,MAAI,CAAC,WAAW,SAAS,MAAM,IAAI,WAAW,SAAS,IAAI,IAAI,CAAC,WAAW,SAAS,IAAI,CACtF,QAAO,UAAU,sBAAsB;MAEvC,QAAO,UAAU;AACnB,WAAS,IAAI,WAAW;YAEjB,OAAO,IAAI,eAAe,SACjC,KAAI,IAAI,WAAW,SAAS,MAAM,EAAE;EAElC,MAAM,KAAK,eAAe,IAAI,WAAW;AACzC,MAAI,GACF,QAAO,UAAU,sBAAsB,GAAG,MAAM,GAAG,GAAG;QAErD;EAEH,MAAM,OAAO,IAAI,WAAW,QAAQ,YAAY,GAAG;AACnD,MAAI,KAAK,SAAS,IAAI,IAAI,CAAC,KAAK,SAAS,IAAI,CAC3C,QAAO,UAAU,sBAAsB;;AAK7C,KAAI,IAAI,YAAY,CAAC,gBAAgB,IAAI,SAAS,IAAI,CAAC,iBAAiB,IAAI,SAAS,CACnF,QAAO,UAAU,IAAI;AAIvB,KAAI,OAAO,SAAS,SAAS,aAAa,EAAE;EAC1C,MAAM,KAAK,eAAe,OAAO,QAAQ;AACzC,MAAI,GAEF,mBAAkB,MAAM,cAAc,IADhB,QAAQ,WAAW,IAAI,SACY,KAAK,QAAQ,UAAU,YAAY;GAAE;GAAY;GAAQ,CAAC;YAG9G,CAAC,OAAO,SAAS;AAExB,eAAa,gBAAgB;EAC7B,MAAM,cAAc,MAAM,iBAAiB,IAAI,KAAK;AACpD,MAAI,aAAa;AACf,UAAO,UAAU;AACjB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACR,SAAS,4BAA4B;IACtC,CAAC;GAEF,MAAM,KAAK,eAAe,YAAY;AACtC,OAAI,GAEF,mBAAkB,MAAM,cAAc,IADhB,QAAQ,WAAW,IAAI,SACY,KAAK,QAAQ,UAAU,WAAW;QAI7F,UAAS,KAAK;GACZ,QAAQ;GACR,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,OAAO,SAAS;AAClB,eAAa,WAAW;EACxB,MAAM,UAAU,MAAM,aAAa,OAAO,QAAQ;AAClD,MAAI,SAAS;AACX,UAAO,UAAU;AACjB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACT,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,GAAG,IAAI,IAAI,OAAO,QAAQ,CAAC,OAAO;GACvC,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,OAAO,cAAc,OAAO,WAAW,iBAAiB;EAC1D,MAAM,cAAc,MAAM,aAAa,OAAO,QAAQ;AACtD,MAAI,eAAe,YAAY,MAAM,SAAS,GAAG;GAC/C,MAAM,aAAa,wBAAwB,YAAY,OAAO,gBAAgB;AAC9E,OAAI,CAAC,WAAW,SAAS;AACvB,aAAS,KAAK;KACZ,QAAQ;KACR,KAAK,OAAO;KACZ,QAAQ;KACR,SAAS,kDAAkD,KAAK,MAAM,WAAW,aAAa,IAAI,CAAC;KACpG,CAAC;AACF,WAAO,aAAa,KAAA;AACpB,WAAO,SAAS,KAAA;;;;AAMtB,KAAI,CAAC,OAAO,WAAW,CAAC,OAAO,WAAW,CAAC,OAAO,aAAa,CAAC,OAAO,cAAc,QAAQ,KAAK;AAChG,eAAa,QAAQ;EACrB,MAAM,SAAS,KAAK,QAAQ,KAAK,gBAAgB,YAAY;EAE7D,MAAM,aAAa,WAAW,OAAO,IAAI,YAAY,OAAO,CAAC,MAAK,MAAK,gBAAgB,KAAK,EAAE,CAAC;AAC/F,MAAI,YAAY;GACd,MAAM,aAAa,KAAK,QAAQ,WAAW;AAC3C,UAAO,YAAY,cAAc,WAAW,CAAC;AAC7C,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACR,SAAS;IACV,CAAC;;;AAKN,KAAI,CAAC,OAAO,WAAW,CAAC,OAAO,WAAW,CAAC,OAAO,aAAa,CAAC,OAAO,WACrE,QAAO;EAAE,SAAS;EAAM;EAAU;AAGpC,QAAO;EAAE,SAAS;EAAQ;EAAU;;AAMtC,SAAgB,sBACd,MACA,SACA,KACwB;AAExB,KAAI,QAAQ,WAAW,QAAQ,EAAE;EAE/B,MAAM,gBAAgB,KADL,QAAQ,KAAK,QAAQ,MAAM,EAAE,CAAC,EACV,eAAe;AACpD,MAAI,WAAW,cAAc,EAAE;GAC7B,MAAM,YAAY,KAAK,MAAM,aAAa,eAAe,QAAQ,CAAC;AAClE,UAAO;IACL,MAAM,UAAU,QAAQ;IACxB,SAAS,UAAU,WAAW;IAC/B;;AAEH,SAAO;;AAIT,KAAI,QAAQ,WAAW,OAAO,EAAE;EAC9B,MAAM,YAAY,QAAQ,MAAM,EAAE;EAClC,MAAM,UAAU,UAAU,WAAW,IAAI,GACrC,UAAU,QAAQ,KAAK,EAAE,GACzB,UAAU,QAAQ,IAAI;EAC1B,MAAM,WAAW,UAAU,IAAI,UAAU,MAAM,GAAG,QAAQ,GAAG;AAC7D,SAAO;GAAE,MAAM;GAAU,SAAS,wBAAwB,UAAU,IAAI,IAAI;GAAK;;AAInF,KAAI,QAAQ,WAAW,QAAQ,IAAI,QAAQ,WAAW,OAAO,IAAI,QAAQ,WAAW,OAAO,CACzF,QAAO;CAKT,MAAM,YAAY,wBAAwB,MAAM,IAAI;AACpD,KAAI,UACF,QAAO;EAAE;EAAM,SAAS;EAAW;AAGrC,KAAI,cAAc,KAAK,QAAQ,CAC7B,QAAO;EAAE;EAAM,SAAS,QAAQ,QAAQ,aAAa,GAAA;EAAK;AAI5D,KAAI,QAAQ,WAAW,WAAW,IAAI,QAAQ,WAAW,aAAa,CACpE,QAAO;EAAE;EAAM,SAAS;EAAK;AAE/B,QAAO;;AAOT,SAAgB,wBAAwB,MAAc,KAA4B;AAChF,KAAI;EACF,MAAM,WAAW,gBAAgB,GAAG,KAAK,gBAAgB,EAAE,KAAK,KAAK,CAAC;AAEtE,SADY,KAAK,MAAM,aAAa,UAAU,QAAQ,CAAC,CAC5C,WAAW;SAElB;AAGJ,MAAI;GAEF,IAAI,MAAM,QADI,gBAAgB,MAAM,EAAE,KAAK,KAAK,CAAC,CACzB;AACxB,UAAO,OAAO,SAAS,IAAI,KAAK,gBAAgB;IAC9C,MAAM,UAAU,KAAK,KAAK,eAAe;AACzC,QAAI,WAAW,QAAQ,CAErB,QADY,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC,CAC3C,WAAW;AAExB,UAAM,QAAQ,IAAI;;UAGhB;AACN,SAAO;;;AAOX,eAAsB,sBAAsB,KAAyC;CACnF,MAAM,UAAU,KAAK,KAAK,eAAe;AACzC,KAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,6CAA6C;CAG/D,MAAM,MAAM,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;CACtD,MAAM,OAA+B;EACnC,GAAG,IAAI;EACP,GAAG,IAAI;EACR;CAED,MAAM,UAA6B,EAAE;AAErC,MAAK,MAAM,CAAC,MAAM,YAAY,OAAO,QAAQ,KAAK,EAAE;EAClD,MAAM,SAAS,sBAAsB,MAAM,SAAS,IAAI;AACxD,MAAI,OACF,SAAQ,KAAK,OAAO;;AAIxB,QAAO;;AAcT,SAAgB,qBAAqB,WAA4C;CAC/E,MAAM,UAAU,KAAK,WAAW,eAAe;AAC/C,KAAI,CAAC,WAAW,QAAQ,CACtB,QAAO;CAET,MAAM,MAAM,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;CAEtD,IAAI;AACJ,KAAI,IAAI,YAAY,IAClB,WAAU,iBAAiB,IAAI,WAAW,IAAI;UAEvC,OAAO,IAAI,eAAe,SACjC,WAAU,iBAAiB,IAAI,WAAW;AAG5C,QAAO;EACL,MAAM,IAAI;EACV,SAAS,IAAI,WAAW;EACxB,aAAa,IAAI;EACjB;EACA;EACD;;AAMH,eAAsB,wBAAwB,WAAoD;CAChG,MAAM,OAAO,qBAAqB,UAAU;AAC5C,KAAI,CAAC,KACH,QAAO;CAET,MAAM,SAA0B;EAC9B,MAAM,KAAK;EACX,SAAS,KAAK;EACd,aAAa,KAAK;EAClB,SAAS,KAAK;EACf;AAGD,KAAI,KAAK,SAAS,SAAS,aAAa,EAAE;EACxC,MAAM,KAAK,eAAe,KAAK,QAAQ;AACvC,MAAI,IAAI;GAEN,MAAM,UAAU,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,KAAK,SAAS,KAAK,KAAK;AAC9E,OAAI,SAAS;AACX,WAAO,aAAa,QAAQ;AAC5B,WAAO,SAAS,QAAQ;AACxB,WAAO,kBAAkB,QAAQ;;GAInC,MAAM,YAAY,MAAM,YAAY,GAAG,OAAO,GAAG,MAAM,KAAA,GAAW,OAAO,OAAO;AAChF,OAAI,UACF,QAAO,YAAY;;;AAMzB,KAAI,CAAC,OAAO,aAAa,CAAC,OAAO,YAAY;EAC3C,MAAM,aAAa,YAAY,UAAU,CAAC,MAAK,MAAK,gBAAgB,KAAK,EAAE,CAAC;AAC5E,MAAI,WACF,QAAO,YAAY,cAAc,KAAK,WAAW,WAAW,CAAC,CAAC;;AAIlE,KAAI,CAAC,OAAO,aAAa,CAAC,OAAO,WAC/B,QAAO;AAGT,QAAO;;AAUT,eAAsB,aAAa,MAAc,SAAyC;CACxF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,SAAS,KAAK,UAAU,MAAM;AAGpC,KAAI,WAAW,KAAK,QAAQ,eAAe,CAAC,CAC1C,QAAO;CAGT,MAAM,OAAO,MAAM,OACjB,8BAA8B,KAAK,GAAG,UACvC,CAAC,YAAY,KAAK;AACnB,KAAI,CAAC,KACH,QAAO;CACT,MAAM,aAAa,KAAK,MAAM;AAC9B,KAAI,CAAC,WACH,QAAO;CAGT,MAAM,aAAa,MAAM,MAAM,YAAY,EACzC,SAAS,EAAE,cAAc,cAAc,EACxC,CAAC,CAAC,YAAY,KAAK;AAEpB,KAAI,CAAC,YAAY,MAAM,CAAC,WAAW,KACjC,QAAO;AAET,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;CAEtC,MAAM,aAAa,KAAK,UAAU,WAAW;CAC7C,MAAM,aAAa,kBAAkB,WAAW;CAGhD,MAAM,SAAS,WAAW,KAAK,WAAW;AAC1C,OAAM,IAAI,SAAe,KAAK,WAAW;EACvC,MAAM,WAAW,IAAI,SAAS,EAC5B,MAAM,OAAO,WAAW,UAAU;AAChC,cAAW,MAAM,OAAO,SAAS;KAEpC,CAAC;AACF,WAAS,GAAG,gBAAgB;AAC1B,cAAW,KAAK;AAChB,QAAK;IACL;AACF,WAAS,GAAG,SAAS,OAAO;EAE5B,SAAS,OAAO;AACd,UAAO,MAAM,CAAC,MAAM,EAAE,MAAM,YAAY;AACtC,QAAI,MAAM;AACR,cAAS,KAAK;AACd;;AAEF,aAAS,MAAM,aAAa,MAAM,CAAC;KACnC,CAAC,MAAM,OAAO;;AAElB,QAAM;GACN;CAGF,MAAM,EAAE,WAAW,UAAU,OAAO;EAAC;EAAO;EAAY;EAAwB;EAAM;EAAO,EAAE,EAAE,OAAO,UAAU,CAAC;AACnH,KAAI,WAAW,GAAG;AAChB,SAAO,QAAQ;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;AAChD,SAAO,YAAY,EAAE,OAAO,MAAM,CAAC;AACnC,SAAO;;AAGT,YAAW,WAAW;AACtB,QAAO;;AAMT,eAAsB,mBAAmB,aAA6C;AAIpF,SAHa,MAAM,OACjB,qBAAqB,YAAY,eAClC,CAAC,YAAY,KAAK,GACN,WAAW;;AAM1B,SAAgB,yBAAyB,UAAiC;CACxE,MAAM,YAAY,KAAK,UAAU,WAAW;AAC5C,KAAI,CAAC,WAAW,UAAU,CACxB,QAAO;AAIT,QAFgB,aAAa,WAAW,QAAQ,CAC1B,MAAM,6BAA6B,GAC1C,MAAM"}
|
|
1
|
+
{"version":3,"file":"npm.mjs","names":["_semverGt","hasCodeBlock","truncateBody","COMMENT_NOISE_RE","fsExistsSync","fsReadFileSync"],"sources":["../../src/sources/github-common.ts","../../src/core/shared.ts","../../src/sources/issues.ts","../../src/sources/utils.ts","../../src/sources/releases.ts","../../src/sources/blog-releases.ts","../../src/sources/discussions.ts","../../src/sources/entries.ts","../../src/sources/git-skills.ts","../../src/sources/llms.ts","../../src/sources/github.ts","../../src/sources/npm.ts"],"sourcesContent":["/**\n * Shared constants and helpers for GitHub source modules (issues, discussions, releases)\n */\n\nexport const BOT_USERS = new Set([\n 'renovate[bot]',\n 'dependabot[bot]',\n 'renovate-bot',\n 'dependabot',\n 'github-actions[bot]',\n])\n\n/** Extract YYYY-MM-DD date from an ISO timestamp */\nexport const isoDate = (iso: string) => iso.split('T')[0]\n\n/** Build YAML frontmatter from a key-value object, auto-quoting strings with special chars */\nexport function buildFrontmatter(fields: Record<string, string | number | boolean | undefined>): string {\n const lines = ['---']\n for (const [k, v] of Object.entries(fields)) {\n if (v !== undefined)\n lines.push(`${k}: ${typeof v === 'string' && /[:\"[\\]]/.test(v) ? `\"${v.replace(/\"/g, '\\\\\"')}\"` : v}`)\n }\n lines.push('---')\n return lines.join('\\n')\n}\n","import { existsSync } from 'node:fs'\nimport { join } from 'pathe'\nimport { gt as _semverGt } from 'semver'\n\n/** Get-or-create for Maps. Polyfill for Map.getOrInsertComputed (not yet in Node.js). */\nexport function mapInsert<K, V>(map: Map<K, V>, key: K, create: () => V): V {\n let val = map.get(key)\n if (val === undefined) {\n val = create()\n map.set(key, val)\n }\n return val\n}\n\n/** Compare two semver strings: returns true if a > b. Handles prereleases. */\nexport function semverGt(a: string, b: string): boolean {\n return _semverGt(a, b, true)\n}\n\nexport const SHARED_SKILLS_DIR = '.skills'\n\n/** Returns the shared skills directory path if `.skills/` exists at project root, else null */\nexport function getSharedSkillsDir(cwd: string = process.cwd()): string | null {\n const dir = join(cwd, SHARED_SKILLS_DIR)\n return existsSync(dir) ? dir : null\n}\n","/**\n * GitHub issues fetching via gh CLI Search API\n * Freshness-weighted scoring, type quotas, comment quality filtering\n * Categorized by labels, noise filtered out, non-technical issues detected\n */\n\nimport { spawnSync } from 'node:child_process'\n\nimport { mapInsert } from '../core/shared.ts'\nimport { BOT_USERS, buildFrontmatter, isoDate } from './github-common.ts'\n\nexport type IssueType = 'bug' | 'question' | 'docs' | 'feature' | 'other'\n\nexport interface IssueComment {\n body: string\n author: string\n reactions: number\n isMaintainer?: boolean\n}\n\nexport interface GitHubIssue {\n number: number\n title: string\n state: string\n labels: string[]\n body: string\n createdAt: string\n url: string\n reactions: number\n comments: number\n type: IssueType\n topComments: IssueComment[]\n /** Freshness-weighted score: reactions * decay(age) */\n score: number\n /** For closed issues: version where fix landed, if detectable */\n resolvedIn?: string\n}\n\nlet _ghAvailable: boolean | undefined\n\n/**\n * Check if gh CLI is installed and authenticated (cached)\n */\nexport function isGhAvailable(): boolean {\n if (_ghAvailable !== undefined)\n return _ghAvailable\n const { status } = spawnSync('gh', ['auth', 'status'], { stdio: 'ignore' })\n return (_ghAvailable = status === 0)\n}\n\n/** Labels that indicate noise — filter these out entirely */\nconst NOISE_LABELS = new Set([\n 'duplicate',\n 'stale',\n 'invalid',\n 'wontfix',\n 'won\\'t fix',\n 'spam',\n 'off-topic',\n 'needs triage',\n 'triage',\n])\n\n/** Labels that indicate feature requests — deprioritize */\nconst FEATURE_LABELS = new Set([\n 'enhancement',\n 'feature',\n 'feature request',\n 'feature-request',\n 'proposal',\n 'rfc',\n 'idea',\n 'suggestion',\n])\n\nconst BUG_LABELS = new Set([\n 'bug',\n 'defect',\n 'regression',\n 'error',\n 'crash',\n 'fix',\n 'confirmed',\n 'verified',\n])\n\nconst QUESTION_LABELS = new Set([\n 'question',\n 'help wanted',\n 'support',\n 'usage',\n 'how-to',\n 'help',\n 'assistance',\n])\n\nconst DOCS_LABELS = new Set([\n 'documentation',\n 'docs',\n 'doc',\n 'typo',\n])\n\n/**\n * Check if a label contains any keyword from a set.\n * Handles emoji-prefixed labels like \":sparkles: feature request\" or \":lady_beetle: bug\".\n */\nfunction labelMatchesAny(label: string, keywords: Set<string>): boolean {\n for (const keyword of keywords) {\n if (label === keyword || label.includes(keyword))\n return true\n }\n return false\n}\n\n/**\n * Classify an issue by its labels into a type useful for skill generation\n */\nexport function classifyIssue(labels: string[]): IssueType {\n const lower = labels.map(l => l.toLowerCase())\n if (lower.some(l => labelMatchesAny(l, BUG_LABELS)))\n return 'bug'\n if (lower.some(l => labelMatchesAny(l, QUESTION_LABELS)))\n return 'question'\n if (lower.some(l => labelMatchesAny(l, DOCS_LABELS)))\n return 'docs'\n if (lower.some(l => labelMatchesAny(l, FEATURE_LABELS)))\n return 'feature'\n return 'other'\n}\n\n/**\n * Check if an issue should be filtered out entirely\n */\nfunction isNoiseIssue(issue: { labels: string[], title: string, body: string }): boolean {\n const lower = issue.labels.map(l => l.toLowerCase())\n if (lower.some(l => labelMatchesAny(l, NOISE_LABELS)))\n return true\n // Tracking/umbrella issues — low signal for skill generation\n if (issue.title.startsWith('☂️') || issue.title.startsWith('[META]') || issue.title.startsWith('[Tracking]'))\n return true\n return false\n}\n\n/** Check if body contains a code block */\nfunction hasCodeBlock(text: string): boolean {\n return /```[\\s\\S]*?```/.test(text) || /`[^`]+`/.test(text)\n}\n\n/**\n * Detect non-technical issues: fan mail, showcases, sentiment.\n * Short body + no code + high reactions = likely non-technical.\n * Note: roadmap/tracking issues are NOT filtered — they get score-boosted instead.\n */\nexport function isNonTechnical(issue: { body: string, title: string, reactions: number }): boolean {\n const body = (issue.body || '').trim()\n // Very short body with no code — probably sentiment/meta\n if (body.length < 200 && !hasCodeBlock(body) && issue.reactions > 50)\n return true\n // Sentiment patterns (love letters, fan mail)\n if (/\\b(?:love|thank|awesome|great work)\\b/i.test(issue.title) && !hasCodeBlock(body))\n return true\n return false\n}\n\n/**\n * Freshness-weighted score: reactions * decay(age_in_years)\n * Steep decay so recent issues dominate over old high-reaction ones.\n * At 0.6: 1yr=0.63x, 2yr=0.45x, 4yr=0.29x, 6yr=0.22x\n */\nexport function freshnessScore(reactions: number, createdAt: string): number {\n const ageMs = Date.now() - new Date(createdAt).getTime()\n const ageYears = ageMs / (365.25 * 24 * 60 * 60 * 1000)\n return reactions * (1 / (1 + ageYears * 0.6))\n}\n\n/**\n * Type quotas — guarantee a mix of issue types.\n * Bugs and questions get priority; feature requests are hard-capped.\n */\nfunction applyTypeQuotas(issues: GitHubIssue[], limit: number): GitHubIssue[] {\n const byType = new Map<IssueType, GitHubIssue[]>()\n for (const issue of issues) {\n mapInsert(byType, issue.type, () => []).push(issue)\n }\n\n // Sort each group by score\n for (const group of byType.values())\n group.sort((a, b) => b.score - a.score)\n\n // Allocate slots: bugs 40%, questions 30%, docs 15%, features 10%, other 5%\n const quotas: [IssueType, number][] = [\n ['bug', Math.ceil(limit * 0.40)],\n ['question', Math.ceil(limit * 0.30)],\n ['docs', Math.ceil(limit * 0.15)],\n ['feature', Math.ceil(limit * 0.10)],\n ['other', Math.ceil(limit * 0.05)],\n ]\n\n const selected: GitHubIssue[] = []\n const used = new Set<number>()\n let remaining = limit\n\n // First pass: fill each type up to its quota\n for (const [type, quota] of quotas) {\n const group = byType.get(type) || []\n const take = Math.min(quota, group.length, remaining)\n for (let i = 0; i < take; i++) {\n selected.push(group[i]!)\n used.add(group[i]!.number)\n remaining--\n }\n }\n\n // Second pass: fill remaining slots from best-scored unused issues (any type except feature)\n if (remaining > 0) {\n const unused = issues\n .filter(i => !used.has(i.number) && i.type !== 'feature')\n .sort((a, b) => b.score - a.score)\n for (const issue of unused) {\n if (remaining <= 0)\n break\n selected.push(issue)\n remaining--\n }\n }\n\n return selected.sort((a, b) => b.score - a.score)\n}\n\n/**\n * Body truncation limit based on reactions — high-reaction issues deserve more space\n */\nfunction bodyLimit(reactions: number): number {\n if (reactions >= 10)\n return 2000\n if (reactions >= 5)\n return 1500\n return 800\n}\n\n/**\n * Smart body truncation — preserves code blocks and error messages.\n * Instead of slicing at a char limit, finds a safe break point.\n */\nfunction truncateBody(body: string, limit: number): string {\n if (body.length <= limit)\n return body\n\n // Find code block boundaries so we don't cut mid-block\n const codeBlockRe = /```[\\s\\S]*?```/g\n let lastSafeEnd = limit\n let match: RegExpExecArray | null\n\n // eslint-disable-next-line no-cond-assign\n while ((match = codeBlockRe.exec(body)) !== null) {\n const blockStart = match.index\n const blockEnd = blockStart + match[0].length\n\n // If the limit falls inside a code block, move limit to after the block\n // (if not too far) or before the block\n if (blockStart < limit && blockEnd > limit) {\n if (blockEnd <= limit + 500) {\n // Block ends reasonably close — include it\n lastSafeEnd = blockEnd\n }\n else {\n // Block is too long — cut before it\n lastSafeEnd = blockStart\n }\n break\n }\n }\n\n // Try to break at a paragraph boundary\n const slice = body.slice(0, lastSafeEnd)\n const lastParagraph = slice.lastIndexOf('\\n\\n')\n if (lastParagraph > lastSafeEnd * 0.6)\n return `${slice.slice(0, lastParagraph)}\\n\\n...`\n\n return `${slice}...`\n}\n\n/**\n * Fetch issues for a state using GitHub Search API sorted by reactions\n */\nfunction fetchIssuesByState(\n owner: string,\n repo: string,\n state: 'open' | 'closed',\n count: number,\n releasedAt?: string,\n fromDate?: string,\n): GitHubIssue[] {\n const fetchCount = Math.min(count * 3, 100)\n let datePart = ''\n if (fromDate) {\n // Explicit lower bound: only issues from this date onward\n datePart = state === 'closed'\n ? `+closed:>=${fromDate}`\n : `+created:>=${fromDate}`\n }\n else if (state === 'closed') {\n if (releasedAt) {\n // For older versions, include issues closed up to 6 months after release\n const date = new Date(releasedAt)\n date.setMonth(date.getMonth() + 6)\n datePart = `+closed:<=${isoDate(date.toISOString())}`\n }\n else {\n datePart = `+closed:>${oneYearAgo()}`\n }\n }\n else if (releasedAt) {\n // For older versions, only include issues created around or before release\n const date = new Date(releasedAt)\n date.setMonth(date.getMonth() + 6)\n datePart = `+created:<=${isoDate(date.toISOString())}`\n }\n\n const q = `repo:${owner}/${repo}+is:issue+is:${state}${datePart}`\n\n const { stdout: result } = spawnSync('gh', [\n 'api',\n `search/issues?q=${q}&sort=reactions&order=desc&per_page=${fetchCount}`,\n '-q',\n '.items[] | {number, title, state, labels: [.labels[]?.name], body, createdAt: .created_at, url: .html_url, reactions: .reactions[\"+1\"], comments: .comments, user: .user.login, userType: .user.type, authorAssociation: .author_association}',\n ], { encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 })\n\n if (!result)\n return []\n\n return result\n .trim()\n .split('\\n')\n .filter(Boolean)\n .map(line => JSON.parse(line) as GitHubIssue & { user: string, userType: string, authorAssociation: string })\n .filter(issue => !BOT_USERS.has(issue.user) && issue.userType !== 'Bot')\n .filter(issue => !isNoiseIssue(issue))\n .filter(issue => !isNonTechnical(issue))\n .map(({ user: _, userType: __, authorAssociation, ...issue }) => {\n const isMaintainer = ['OWNER', 'MEMBER', 'COLLABORATOR'].includes(authorAssociation)\n const isRoadmap = /\\broadmap\\b/i.test(issue.title) || issue.labels.some(l => /roadmap/i.test(l))\n return {\n ...issue,\n type: classifyIssue(issue.labels),\n topComments: [] as IssueComment[],\n score: freshnessScore(issue.reactions, issue.createdAt) * (isMaintainer && isRoadmap ? 5 : 1),\n }\n })\n .sort((a, b) => b.score - a.score)\n .slice(0, count)\n}\n\nfunction oneYearAgo(): string {\n const d = new Date()\n d.setFullYear(d.getFullYear() - 1)\n return isoDate(d.toISOString())!\n}\n\n/** Noise patterns in comments — filter these out */\nconst COMMENT_NOISE_RE = /^(?:\\+1|👍|same here|any update|bump|following|is there any progress|when will this|me too|i have the same|same issue)[\\s!?.]*$/i\n\n/**\n * Batch-fetch top comments for issues via GraphQL.\n * Enriches the top N highest-score issues with their best comments.\n * Prioritizes: comments with code blocks, from maintainers, with high reactions.\n * Filters out \"+1\", \"any updates?\", \"same here\" noise.\n */\nfunction enrichWithComments(owner: string, repo: string, issues: GitHubIssue[], topN = 15): void {\n // Only fetch comments for issues worth enriching\n const worth = issues\n .filter(i => i.comments > 0 && (i.type === 'bug' || i.type === 'question' || i.reactions >= 3))\n .sort((a, b) => b.score - a.score)\n .slice(0, topN)\n\n if (worth.length === 0)\n return\n\n // Build a single GraphQL query fetching comments for all selected issues\n // Fetch more comments (10) so we can filter noise and pick the best\n const fragments = worth.map((issue, i) =>\n `i${i}: issue(number: ${issue.number}) { comments(first: 10) { nodes { body author { login } authorAssociation reactions { totalCount } } } }`,\n ).join(' ')\n\n const query = `query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { ${fragments} } }`\n\n try {\n const { stdout: result } = spawnSync('gh', [\n 'api',\n 'graphql',\n '-f',\n `query=${query}`,\n '-f',\n `owner=${owner}`,\n '-f',\n `repo=${repo}`,\n ], { encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 })\n\n if (!result)\n return\n\n const data = JSON.parse(result)\n const repo_ = data?.data?.repository\n if (!repo_)\n return\n\n for (let i = 0; i < worth.length; i++) {\n const nodes = repo_[`i${i}`]?.comments?.nodes\n if (!Array.isArray(nodes))\n continue\n\n const issue = worth[i]!\n\n const comments: (IssueComment & { _score: number })[] = nodes\n .filter((c: any) => c.author && !BOT_USERS.has(c.author.login))\n .filter((c: any) => !COMMENT_NOISE_RE.test((c.body || '').trim()))\n .map((c: any) => {\n const isMaintainer = ['OWNER', 'MEMBER', 'COLLABORATOR'].includes(c.authorAssociation)\n const body = c.body || ''\n const reactions = c.reactions?.totalCount || 0\n // Score: maintainers get 3x, code blocks get 2x, reactions add linearly\n const _score = (isMaintainer ? 3 : 1) * (hasCodeBlock(body) ? 2 : 1) * (1 + reactions)\n return { body, author: c.author.login, reactions, isMaintainer, _score }\n })\n .sort((a: any, b: any) => b._score - a._score)\n\n // Take top 3 quality comments\n issue.topComments = comments.slice(0, 3).map(({ _score: _, ...c }) => c)\n\n // For closed issues: try to detect fix version from maintainer comments\n if (issue.state === 'closed') {\n issue.resolvedIn = detectResolvedVersion(comments)\n }\n }\n }\n catch {\n // Non-critical — issues still useful without comments\n }\n}\n\n/**\n * Try to detect which version fixed a closed issue from maintainer comments.\n * Looks for version patterns in maintainer/collaborator comments.\n */\nfunction detectResolvedVersion(comments: IssueComment[]): string | undefined {\n const maintainerComments = comments.filter(c => c.isMaintainer)\n // Check from last to first (fix announcements tend to be later)\n for (const c of maintainerComments.reverse()) {\n // \"Fixed in v5.2\", \"landed in 4.1.0\", \"released in v3.0\", \"available in 2.1\"\n const match = c.body.match(/(?:fixed|landed|released|available|shipped|resolved|included)\\s+in\\s+v?(\\d+\\.\\d+(?:\\.\\d+)?)/i)\n if (match)\n return match[1]\n // \"v5.2.0\" or \"5.2.0\" at start of a short comment (release note style)\n if (c.body.length < 100) {\n const vMatch = c.body.match(/\\bv?(\\d+\\.\\d+\\.\\d+)\\b/)\n if (vMatch)\n return vMatch[1]\n }\n }\n return undefined\n}\n\n/**\n * Fetch issues from a GitHub repo with freshness-weighted scoring and type quotas.\n * Returns a balanced mix: bugs > questions > docs > other > features.\n * Filters noise, non-technical content, and enriches with quality comments.\n */\nexport async function fetchGitHubIssues(\n owner: string,\n repo: string,\n limit = 30,\n releasedAt?: string,\n fromDate?: string,\n): Promise<GitHubIssue[]> {\n if (!isGhAvailable())\n return []\n\n const openCount = Math.ceil(limit * 0.75)\n const closedCount = limit - openCount\n\n try {\n // Fetch more than needed so type quotas have a pool to draw from\n const open = fetchIssuesByState(owner, repo, 'open', Math.min(openCount * 2, 100), releasedAt, fromDate)\n const closed = fetchIssuesByState(owner, repo, 'closed', Math.min(closedCount * 2, 50), releasedAt, fromDate)\n const all = [...open, ...closed]\n const selected = applyTypeQuotas(all, limit)\n enrichWithComments(owner, repo, selected)\n return selected\n }\n catch {\n return []\n }\n}\n\n/**\n * Format a single issue as markdown with YAML frontmatter\n */\nexport function formatIssueAsMarkdown(issue: GitHubIssue): string {\n const limit = bodyLimit(issue.reactions)\n const fmFields: Record<string, string | number | boolean | undefined> = {\n number: issue.number,\n title: issue.title,\n type: issue.type,\n state: issue.state,\n created: isoDate(issue.createdAt),\n url: issue.url,\n reactions: issue.reactions,\n comments: issue.comments,\n }\n if (issue.resolvedIn)\n fmFields.resolvedIn = issue.resolvedIn\n if (issue.labels.length > 0)\n fmFields.labels = `[${issue.labels.join(', ')}]`\n const fm = buildFrontmatter(fmFields)\n\n const lines = [fm, '', `# ${issue.title}`]\n\n if (issue.body) {\n const body = truncateBody(issue.body, limit)\n lines.push('', body)\n }\n\n if (issue.topComments.length > 0) {\n lines.push('', '---', '', '## Top Comments')\n for (const c of issue.topComments) {\n const reactions = c.reactions > 0 ? ` (+${c.reactions})` : ''\n const maintainer = c.isMaintainer ? ' [maintainer]' : ''\n const commentBody = truncateBody(c.body, 600)\n lines.push('', `**@${c.author}**${maintainer}${reactions}:`, '', commentBody)\n }\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Generate a summary index of all issues for quick LLM scanning.\n * Groups by type so the LLM can quickly find bugs vs questions.\n */\nexport function generateIssueIndex(issues: GitHubIssue[]): string {\n const byType = new Map<IssueType, GitHubIssue[]>()\n for (const issue of issues) {\n mapInsert(byType, issue.type, () => []).push(issue)\n }\n\n const typeLabels: Record<IssueType, string> = {\n bug: 'Bugs & Regressions',\n question: 'Questions & Usage Help',\n docs: 'Documentation',\n feature: 'Feature Requests',\n other: 'Other',\n }\n\n const typeOrder: IssueType[] = ['bug', 'question', 'docs', 'other', 'feature']\n\n const fm = [\n '---',\n `total: ${issues.length}`,\n `open: ${issues.filter(i => i.state === 'open').length}`,\n `closed: ${issues.filter(i => i.state !== 'open').length}`,\n '---',\n ]\n\n const sections: string[] = [fm.join('\\n'), '', '# Issues Index', '']\n\n for (const type of typeOrder) {\n const group = byType.get(type)\n if (!group?.length)\n continue\n sections.push(`## ${typeLabels[type]} (${group.length})`, '')\n for (const issue of group) {\n const reactions = issue.reactions > 0 ? ` (+${issue.reactions})` : ''\n const state = issue.state === 'open' ? '' : ' [closed]'\n const resolved = issue.resolvedIn ? ` [fixed in ${issue.resolvedIn}]` : ''\n const date = isoDate(issue.createdAt)\n sections.push(`- [#${issue.number}](./issue-${issue.number}.md): ${issue.title}${reactions}${state}${resolved} (${date})`)\n }\n sections.push('')\n }\n\n return sections.join('\\n')\n}\n","/**\n * Shared utilities for doc resolution\n */\n\nimport { ofetch } from 'ofetch'\n\nexport const $fetch = ofetch.create({\n retry: 3,\n retryDelay: 500,\n timeout: 15_000,\n headers: { 'User-Agent': 'skilld/1.0' },\n})\n\n/**\n * Fetch text content from URL\n */\nexport async function fetchText(url: string): Promise<string | null> {\n return $fetch(url, { responseType: 'text' }).catch(() => null)\n}\n\n/**\n * Verify URL exists and is not HTML (likely 404 page)\n */\nexport async function verifyUrl(url: string): Promise<boolean> {\n const res = await $fetch.raw(url, { method: 'HEAD' }).catch(() => null)\n if (!res)\n return false\n const contentType = res.headers.get('content-type') || ''\n return !contentType.includes('text/html')\n}\n\n/**\n * Check if URL points to a social media or package registry site (not real docs)\n */\nconst USELESS_HOSTS = new Set([\n 'twitter.com',\n 'x.com',\n 'facebook.com',\n 'linkedin.com',\n 'youtube.com',\n 'instagram.com',\n 'npmjs.com',\n 'www.npmjs.com',\n 'yarnpkg.com',\n])\n\nexport function isUselessDocsUrl(url: string): boolean {\n try {\n const { hostname } = new URL(url)\n return USELESS_HOSTS.has(hostname)\n }\n catch { return false }\n}\n\n/**\n * Check if URL is a GitHub repo URL (not a docs site)\n */\nexport function isGitHubRepoUrl(url: string): boolean {\n try {\n const parsed = new URL(url)\n return parsed.hostname === 'github.com' || parsed.hostname === 'www.github.com'\n }\n catch {\n return false\n }\n}\n\n/**\n * Parse owner/repo from GitHub URL\n */\nexport function parseGitHubUrl(url: string): { owner: string, repo: string } | null {\n const match = url.match(/github\\.com\\/([^/]+)\\/([^/]+?)(?:\\.git)?(?:[/#]|$)/)\n if (!match)\n return null\n return { owner: match[1]!, repo: match[2]! }\n}\n\n/**\n * Normalize git repo URL to https\n */\nexport function normalizeRepoUrl(url: string): string {\n return url\n .replace(/^git\\+/, '')\n .replace(/#.*$/, '')\n .replace(/\\.git$/, '')\n .replace(/^git:\\/\\//, 'https://')\n .replace(/^ssh:\\/\\/git@github\\.com/, 'https://github.com')\n // SSH format: git@github.com:owner/repo\n .replace(/^git@github\\.com:/, 'https://github.com/')\n}\n\n/**\n * Parse package spec with optional dist-tag or version: \"vue@beta\" → { name: \"vue\", tag: \"beta\" }\n * Handles scoped packages: \"@vue/reactivity@beta\" → { name: \"@vue/reactivity\", tag: \"beta\" }\n */\nexport function parsePackageSpec(spec: string): { name: string, tag?: string } {\n // Scoped: @scope/pkg@tag — find the second @\n if (spec.startsWith('@')) {\n const slashIdx = spec.indexOf('/')\n if (slashIdx !== -1) {\n const atIdx = spec.indexOf('@', slashIdx + 1)\n if (atIdx !== -1)\n return { name: spec.slice(0, atIdx), tag: spec.slice(atIdx + 1) }\n }\n return { name: spec }\n }\n // Unscoped: pkg@tag\n const atIdx = spec.indexOf('@')\n if (atIdx !== -1)\n return { name: spec.slice(0, atIdx), tag: spec.slice(atIdx + 1) }\n return { name: spec }\n}\n\n/**\n * Extract branch hint from URL fragment (e.g. \"git+https://...#main\" → \"main\")\n */\nexport function extractBranchHint(url: string): string | undefined {\n const hash = url.indexOf('#')\n if (hash === -1)\n return undefined\n const fragment = url.slice(hash + 1)\n // Ignore non-branch fragments like \"readme\"\n if (!fragment || fragment === 'readme')\n return undefined\n return fragment\n}\n","/**\n * GitHub release notes fetching via gh CLI (preferred) with ungh.cc fallback\n */\n\nimport { spawnSync } from 'node:child_process'\nimport { isoDate } from './github-common.ts'\nimport { isGhAvailable } from './issues.ts'\nimport { $fetch } from './utils.ts'\n\nexport interface GitHubRelease {\n id: number\n tag: string\n name: string\n prerelease: boolean\n createdAt: string\n publishedAt: string\n markdown: string\n}\n\ninterface UnghReleasesResponse {\n releases: GitHubRelease[]\n}\n\ninterface CachedDoc {\n path: string\n content: string\n}\n\nexport interface SemVer {\n major: number\n minor: number\n patch: number\n raw: string\n}\n\nexport function parseSemver(version: string): SemVer | null {\n const clean = version.replace(/^v/, '')\n const match = clean.match(/^(\\d+)(?:\\.(\\d+))?(?:\\.(\\d+))?/)\n if (!match)\n return null\n return {\n major: +match[1]!,\n minor: match[2] ? +match[2] : 0,\n patch: match[3] ? +match[3] : 0,\n raw: clean,\n }\n}\n\n/**\n * Extract version from a release tag, handling monorepo formats:\n * - `pkg@1.2.3` → `1.2.3`\n * - `pkg-v1.2.3` → `1.2.3`\n * - `v1.2.3` → `1.2.3`\n * - `1.2.3` → `1.2.3`\n */\nfunction extractVersion(tag: string, packageName?: string): string | null {\n if (packageName) {\n // Monorepo: pkg@version or pkg-vversion\n const atMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}@(.+)$`))\n if (atMatch)\n return atMatch[1]!\n const dashMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}-v?(.+)$`))\n if (dashMatch)\n return dashMatch[1]!\n }\n // Standard: v1.2.3 or 1.2.3\n return tag.replace(/^v/, '')\n}\n\nfunction escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n}\n\n/**\n * Check if a release tag belongs to a specific package\n */\nfunction tagMatchesPackage(tag: string, packageName: string): boolean {\n // Exact match: pkg@version or pkg-vversion\n return tag.startsWith(`${packageName}@`) || tag.startsWith(`${packageName}-v`) || tag.startsWith(`${packageName}-`)\n}\n\n/**\n * Check if a version string contains a prerelease suffix (e.g. 6.0.0-beta, 1.2.3-rc.1)\n */\nexport function isPrerelease(version: string): boolean {\n return /^\\d+\\.\\d+\\.\\d+-.+/.test(version.replace(/^v/, ''))\n}\n\nexport function compareSemver(a: SemVer, b: SemVer): number {\n if (a.major !== b.major)\n return a.major - b.major\n if (a.minor !== b.minor)\n return a.minor - b.minor\n return a.patch - b.patch\n}\n\n/**\n * Fetch releases via gh CLI (fast, authenticated, paginated)\n */\nfunction fetchReleasesViaGh(owner: string, repo: string): GitHubRelease[] {\n try {\n const { stdout: ndjson } = spawnSync('gh', [\n 'api',\n `repos/${owner}/${repo}/releases`,\n '--paginate',\n '--jq',\n '.[] | {id: .id, tag: .tag_name, name: .name, prerelease: .prerelease, createdAt: .created_at, publishedAt: .published_at, markdown: .body}',\n ], { encoding: 'utf-8', timeout: 30_000, stdio: ['ignore', 'pipe', 'ignore'] })\n if (!ndjson)\n return []\n return ndjson.trim().split('\\n').filter(Boolean).map(line => JSON.parse(line))\n }\n catch {\n return []\n }\n}\n\n/**\n * Fetch all releases from a GitHub repo via ungh.cc (fallback)\n */\nasync function fetchReleasesViaUngh(owner: string, repo: string): Promise<GitHubRelease[]> {\n const data = await $fetch<UnghReleasesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/releases`,\n { signal: AbortSignal.timeout(15_000) },\n ).catch(() => null)\n return data?.releases ?? []\n}\n\n/**\n * Fetch all releases — gh CLI first, ungh.cc fallback\n */\nasync function fetchAllReleases(owner: string, repo: string): Promise<GitHubRelease[]> {\n if (isGhAvailable()) {\n const releases = fetchReleasesViaGh(owner, repo)\n if (releases.length > 0)\n return releases\n }\n return fetchReleasesViaUngh(owner, repo)\n}\n\n/**\n * Select last 20 stable releases for a package, sorted newest first.\n * For monorepos, filters to package-specific tags (pkg@version).\n * Falls back to generic tags (v1.2.3) only if no package-specific found.\n * If installedVersion is provided, filters out releases newer than it.\n */\nexport function selectReleases(releases: GitHubRelease[], packageName?: string, installedVersion?: string, fromDate?: string): GitHubRelease[] {\n // Check if this looks like a monorepo (has package-prefixed tags)\n const hasMonorepoTags = packageName && releases.some(r => tagMatchesPackage(r.tag, packageName))\n const installedSv = installedVersion ? parseSemver(installedVersion) : null\n const installedIsPrerelease = installedVersion ? isPrerelease(installedVersion) : false\n const fromTs = fromDate ? new Date(fromDate).getTime() : null\n\n const filtered = releases.filter((r) => {\n const ver = extractVersion(r.tag, hasMonorepoTags ? packageName : undefined)\n if (!ver)\n return false\n\n const sv = parseSemver(ver)\n if (!sv)\n return false\n\n // Monorepo: only include tags for this package\n if (hasMonorepoTags && packageName && !tagMatchesPackage(r.tag, packageName))\n return false\n\n // Date lower bound: skip releases published before fromDate\n if (fromTs) {\n const pubDate = r.publishedAt || r.createdAt\n if (pubDate && new Date(pubDate).getTime() < fromTs)\n return false\n }\n\n // Prerelease handling: include only when installed is also prerelease and same major.minor\n if (r.prerelease) {\n if (!installedIsPrerelease || !installedSv)\n return false\n return sv.major === installedSv.major && sv.minor === installedSv.minor\n }\n\n // Filter out stable releases newer than installed version\n if (installedSv && compareSemver(sv, installedSv) > 0)\n return false\n\n return true\n })\n\n const sorted = filtered\n .sort((a, b) => {\n const verA = extractVersion(a.tag, hasMonorepoTags ? packageName : undefined)\n const verB = extractVersion(b.tag, hasMonorepoTags ? packageName : undefined)\n if (!verA || !verB)\n return 0\n return compareSemver(parseSemver(verB)!, parseSemver(verA)!)\n })\n\n // No cap when fromDate is set — include all matching releases\n return fromDate ? sorted : sorted.slice(0, 20)\n}\n\n/**\n * Format a release as markdown with YAML frontmatter\n */\nfunction formatRelease(release: GitHubRelease, packageName?: string): string {\n const date = isoDate(release.publishedAt || release.createdAt)\n const version = extractVersion(release.tag, packageName) || release.tag\n\n const fm = [\n '---',\n `tag: ${release.tag}`,\n `version: ${version}`,\n `published: ${date}`,\n ]\n if (release.name && release.name !== release.tag)\n fm.push(`name: \"${release.name.replace(/\"/g, '\\\\\"')}\"`)\n fm.push('---')\n\n return `${fm.join('\\n')}\\n\\n# ${release.name || release.tag}\\n\\n${release.markdown}`\n}\n\nexport interface ReleaseIndexOptions {\n releases: GitHubRelease[]\n packageName?: string\n blogReleases?: Array<{ version: string, title: string, date: string }>\n hasChangelog?: boolean\n}\n\n/**\n * Generate a unified summary index of all releases for quick LLM scanning.\n * Includes GitHub releases, blog release posts, and CHANGELOG link.\n */\nexport function generateReleaseIndex(releasesOrOpts: GitHubRelease[] | ReleaseIndexOptions, packageName?: string): string {\n // Support both old signature and new options object\n const opts: ReleaseIndexOptions = Array.isArray(releasesOrOpts)\n ? { releases: releasesOrOpts, packageName }\n : releasesOrOpts\n\n const { releases, blogReleases, hasChangelog } = opts\n const pkg = opts.packageName\n\n const total = releases.length + (blogReleases?.length ?? 0)\n const fm = [\n '---',\n `total: ${total}`,\n `latest: ${releases[0]?.tag || 'unknown'}`,\n '---',\n ]\n\n const lines: string[] = [fm.join('\\n'), '', '# Releases Index', '']\n\n // Blog release posts (major version announcements)\n if (blogReleases && blogReleases.length > 0) {\n lines.push('## Blog Releases', '')\n for (const b of blogReleases) {\n lines.push(`- [${b.version}](./blog-${b.version}.md): ${b.title} (${b.date})`)\n }\n lines.push('')\n }\n\n // GitHub release notes\n if (releases.length > 0) {\n if (blogReleases && blogReleases.length > 0)\n lines.push('## Release Notes', '')\n for (const r of releases) {\n const date = isoDate(r.publishedAt || r.createdAt)\n const filename = r.tag.includes('@') || r.tag.startsWith('v') ? r.tag : `v${r.tag}`\n const version = extractVersion(r.tag, pkg) || r.tag\n const sv = parseSemver(version)\n const label = sv?.patch === 0 && sv.minor === 0 ? ' **[MAJOR]**' : sv?.patch === 0 ? ' **[MINOR]**' : ''\n lines.push(`- [${r.tag}](./${filename}.md): ${r.name || r.tag} (${date})${label}`)\n }\n lines.push('')\n }\n\n // CHANGELOG link\n if (hasChangelog) {\n lines.push('## Changelog', '')\n lines.push('- [CHANGELOG.md](./CHANGELOG.md)')\n lines.push('')\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Check if a single release is a stub redirecting to CHANGELOG.md.\n * Short body (<500 chars) that mentions CHANGELOG indicates no real content.\n */\nexport function isStubRelease(release: GitHubRelease): boolean {\n const body = (release.markdown || '').trim()\n return body.length < 500 && /changelog\\.md/i.test(body)\n}\n\n/**\n * Detect if releases are just short stubs redirecting to CHANGELOG.md.\n * Samples up to 3 releases — if all are stubs, it's a redirect pattern.\n */\nexport function isChangelogRedirectPattern(releases: GitHubRelease[]): boolean {\n const sample = releases.slice(0, 3)\n if (sample.length === 0)\n return false\n return sample.every(isStubRelease)\n}\n\n/**\n * Fetch CHANGELOG.md from a GitHub repo at a specific ref as fallback.\n * For monorepos, also checks packages/{shortName}/CHANGELOG.md.\n */\nasync function fetchChangelog(owner: string, repo: string, ref: string, packageName?: string): Promise<string | null> {\n const paths: string[] = []\n\n // Monorepo: try package-specific paths first (e.g. packages/pinia/CHANGELOG.md)\n if (packageName) {\n const shortName = packageName.replace(/^@.*\\//, '')\n const scopeless = packageName.replace(/^@/, '').replace('/', '-')\n const candidates = [...new Set([shortName, scopeless])]\n for (const name of candidates) {\n paths.push(`packages/${name}/CHANGELOG.md`)\n }\n }\n\n // Root-level changelog\n paths.push('CHANGELOG.md', 'changelog.md', 'CHANGES.md')\n\n for (const path of paths) {\n const url = `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${path}`\n const content = await $fetch(url, { responseType: 'text', signal: AbortSignal.timeout(10_000) }).catch(() => null)\n if (content)\n return content\n }\n return null\n}\n\n/**\n * Fetch release notes for a package. Returns CachedDoc[] with releases/{tag}.md files.\n *\n * Strategy:\n * 1. Fetch GitHub releases, filter to package-specific tags for monorepos\n * 2. If no releases found, try CHANGELOG.md as fallback\n */\nexport async function fetchReleaseNotes(\n owner: string,\n repo: string,\n installedVersion: string,\n gitRef?: string,\n packageName?: string,\n fromDate?: string,\n changelogRef?: string,\n): Promise<CachedDoc[]> {\n const releases = await fetchAllReleases(owner, repo)\n const selected = selectReleases(releases, packageName, installedVersion, fromDate)\n\n if (selected.length > 0) {\n // Filter out individual stub releases that just say \"see CHANGELOG\"\n const substantive = selected.filter(r => !isStubRelease(r))\n\n const docs = substantive.map((r) => {\n const filename = r.tag.includes('@') || r.tag.startsWith('v')\n ? r.tag\n : `v${r.tag}`\n return {\n path: `releases/${filename}.md`,\n content: formatRelease(r, packageName),\n }\n })\n\n // Always fetch CHANGELOG.md alongside substantive releases\n const ref = changelogRef || gitRef || selected[0]!.tag\n const changelog = await fetchChangelog(owner, repo, ref, packageName)\n if (changelog && changelog.length < 500_000) {\n docs.push({ path: 'releases/CHANGELOG.md', content: changelog })\n }\n\n return docs\n }\n\n // Fallback: CHANGELOG.md (indexed as single file)\n const ref = changelogRef || gitRef || 'main'\n const changelog = await fetchChangelog(owner, repo, ref, packageName)\n if (!changelog)\n return []\n\n return [{ path: 'releases/CHANGELOG.md', content: changelog }]\n}\n","/**\n * Blog release notes fetching for packages with curated blog releases\n * Supports version filtering and extensible for multiple packages\n */\n\nimport type { BlogRelease } from './package-registry.ts'\nimport { htmlToMarkdown } from 'mdream'\nimport { getBlogPreset } from './package-registry.ts'\nimport { compareSemver, parseSemver } from './releases.ts'\nimport { $fetch } from './utils.ts'\n\nexport interface BlogReleasePost {\n version: string\n title: string\n date: string\n markdown: string\n url: string\n}\n\ninterface CachedDoc {\n path: string\n content: string\n}\n\n/**\n * Format a blog release as markdown with YAML frontmatter\n */\nfunction formatBlogRelease(release: BlogReleasePost): string {\n const fm = [\n '---',\n `version: ${release.version}`,\n `title: \"${release.title.replace(/\"/g, '\\\\\"')}\"`,\n `date: ${release.date}`,\n `url: ${release.url}`,\n `source: blog-release`,\n '---',\n ]\n\n return `${fm.join('\\n')}\\n\\n# ${release.title}\\n\\n${release.markdown}`\n}\n\n/**\n * Fetch and parse a single blog post using preset metadata for version/date\n */\nasync function fetchBlogPost(entry: BlogRelease): Promise<BlogReleasePost | null> {\n try {\n const html = await $fetch(entry.url, { responseType: 'text', signal: AbortSignal.timeout(10_000) }).catch(() => null)\n if (!html)\n return null\n\n // Extract title from <h1> or <title>, fallback to preset title\n let title = ''\n const titleMatch = html.match(/<h1[^>]*>([^<]+)<\\/h1>/)\n if (titleMatch)\n title = titleMatch[1]!.trim()\n\n if (!title) {\n const metaTitleMatch = html.match(/<title>([^<]+)<\\/title>/)\n if (metaTitleMatch)\n title = metaTitleMatch[1]!.trim()\n }\n\n const markdown = htmlToMarkdown(html)\n if (!markdown)\n return null\n\n return {\n version: entry.version,\n title: title || entry.title || `Release ${entry.version}`,\n date: entry.date,\n markdown,\n url: entry.url,\n }\n }\n catch {\n return null\n }\n}\n\n/**\n * Filter blog releases by installed version\n * Only includes releases where version <= installedVersion\n * Returns all releases if version parsing fails (fail-safe)\n */\nfunction filterBlogsByVersion(entries: BlogRelease[], installedVersion: string): BlogRelease[] {\n const installedSv = parseSemver(installedVersion)\n if (!installedSv)\n return entries // Fail-safe: include all if version parsing fails\n\n return entries.filter((entry) => {\n const entrySv = parseSemver(entry.version)\n if (!entrySv)\n return false\n // Include only releases where version <= installed version\n return compareSemver(entrySv, installedSv) <= 0\n })\n}\n\n/**\n * Fetch blog release notes from package presets\n * Filters to only releases matching or older than the installed version\n * Returns CachedDoc[] with releases/blog-{version}.md files\n */\nexport async function fetchBlogReleases(\n packageName: string,\n installedVersion: string,\n): Promise<CachedDoc[]> {\n const preset = getBlogPreset(packageName)\n if (!preset)\n return []\n\n const filteredReleases = filterBlogsByVersion(preset.releases, installedVersion)\n if (filteredReleases.length === 0)\n return []\n\n const releases: BlogReleasePost[] = []\n\n // Fetch all blog posts in parallel with 3 concurrent requests\n const batchSize = 3\n for (let i = 0; i < filteredReleases.length; i += batchSize) {\n const batch = filteredReleases.slice(i, i + batchSize)\n const results = await Promise.all(batch.map(entry => fetchBlogPost(entry)))\n for (const result of results) {\n if (result)\n releases.push(result)\n }\n }\n\n if (releases.length === 0)\n return []\n\n // Sort by version descending (newest first)\n releases.sort((a, b) => {\n const aVer = a.version.split('.').map(Number)\n const bVer = b.version.split('.').map(Number)\n for (let i = 0; i < Math.max(aVer.length, bVer.length); i++) {\n const diff = (bVer[i] ?? 0) - (aVer[i] ?? 0)\n if (diff !== 0)\n return diff\n }\n return 0\n })\n\n // Format as cached docs — stored in releases/ alongside regular releases\n return releases.map(r => ({\n path: `releases/blog-${r.version}.md`,\n content: formatBlogRelease(r),\n }))\n}\n","/**\n * GitHub discussions fetching via gh CLI GraphQL\n * Prioritizes Q&A and Help categories, includes accepted answers\n * Comment quality filtering, smart truncation, noise removal\n */\n\nimport { spawnSync } from 'node:child_process'\nimport { mapInsert } from '../core/shared.ts'\nimport { BOT_USERS, buildFrontmatter, isoDate } from './github-common.ts'\nimport { isGhAvailable } from './issues.ts'\n\n/** Categories most useful for skill generation (in priority order) */\nconst HIGH_VALUE_CATEGORIES = new Set([\n 'q&a',\n 'help',\n 'troubleshooting',\n 'support',\n])\n\nconst LOW_VALUE_CATEGORIES = new Set([\n 'show and tell',\n 'ideas',\n 'polls',\n])\n\nexport interface DiscussionComment {\n body: string\n author: string\n reactions: number\n isMaintainer?: boolean\n}\n\nexport interface GitHubDiscussion {\n number: number\n title: string\n body: string\n category: string\n createdAt: string\n url: string\n upvoteCount: number\n comments: number\n isMaintainer?: boolean\n answer?: string\n topComments: DiscussionComment[]\n}\n\n/** Noise patterns in comments — filter these out */\nconst COMMENT_NOISE_RE = /^(?:\\+1|👍|same here|any update|bump|following|is there any progress|when will this|me too|i have the same|same issue|thanks|thank you)[\\s!?.]*$/i\n\n/** Check if body contains a code block */\nfunction hasCodeBlock(text: string): boolean {\n return /```[\\s\\S]*?```/.test(text) || /`[^`]+`/.test(text)\n}\n\n/**\n * Smart body truncation — preserves code blocks and error messages.\n * Instead of slicing at a char limit, finds a safe break point.\n */\nfunction truncateBody(body: string, limit: number): string {\n if (body.length <= limit)\n return body\n\n // Find code block boundaries so we don't cut mid-block\n const codeBlockRe = /```[\\s\\S]*?```/g\n let lastSafeEnd = limit\n let match: RegExpExecArray | null\n\n // eslint-disable-next-line no-cond-assign\n while ((match = codeBlockRe.exec(body)) !== null) {\n const blockStart = match.index\n const blockEnd = blockStart + match[0].length\n\n if (blockStart < limit && blockEnd > limit) {\n if (blockEnd <= limit + 500) {\n lastSafeEnd = blockEnd\n }\n else {\n lastSafeEnd = blockStart\n }\n break\n }\n }\n\n // Try to break at a paragraph boundary\n const slice = body.slice(0, lastSafeEnd)\n const lastParagraph = slice.lastIndexOf('\\n\\n')\n if (lastParagraph > lastSafeEnd * 0.6)\n return `${slice.slice(0, lastParagraph)}\\n\\n...`\n\n return `${slice}...`\n}\n\n/** Off-topic or spam title patterns — instant reject */\nconst TITLE_NOISE_RE = /looking .*(developer|engineer|freelanc)|hiring|job post|guide me to (?:complete|finish|build)|help me (?:complete|finish|build)|seeking .* tutorial|recommend.* course/i\n\n/** Minimum score for a discussion to be included */\nconst MIN_DISCUSSION_SCORE = 3\n\n/**\n * Score a comment for quality. Higher = more useful for skill generation.\n * Maintainers 3x, code blocks 2x, reactions linear.\n */\nfunction scoreComment(c: { body: string, reactions: number, isMaintainer?: boolean }): number {\n return (c.isMaintainer ? 3 : 1) * (hasCodeBlock(c.body) ? 2 : 1) * (1 + c.reactions)\n}\n\n/**\n * Score a discussion for overall quality. Used for filtering and sorting.\n * Returns -1 for instant-reject (spam/off-topic).\n */\nexport function scoreDiscussion(d: GitHubDiscussion): number {\n if (TITLE_NOISE_RE.test(d.title))\n return -1\n\n let score = 0\n\n // Discussion authored by a maintainer — high signal\n if (d.isMaintainer)\n score += 3\n\n // Code presence — strongest signal for technical discussions\n const allText = [d.body, d.answer || '', ...d.topComments.map(c => c.body)].join('\\n')\n if (hasCodeBlock(allText))\n score += 3\n\n // Engagement\n score += Math.min(d.upvoteCount, 5)\n\n // Answer quality\n if (d.answer) {\n score += 2\n if (d.answer.length > 100)\n score += 1\n }\n\n // Maintainer involvement\n if (d.topComments.some(c => c.isMaintainer))\n score += 2\n\n // Community validation via reactions\n if (d.topComments.some(c => c.reactions > 0))\n score += 1\n\n return score\n}\n\n/**\n * Fetch discussions from a GitHub repo using gh CLI GraphQL.\n * Prioritizes Q&A and Help categories. Includes accepted answer body for answered discussions.\n * Fetches extra comments and scores them for quality.\n */\nexport async function fetchGitHubDiscussions(\n owner: string,\n repo: string,\n limit = 20,\n releasedAt?: string,\n fromDate?: string,\n): Promise<GitHubDiscussion[]> {\n if (!isGhAvailable())\n return []\n\n // GraphQL discussions endpoint doesn't support date filtering,\n // so we fetch latest N and filter client-side. Skip entirely\n // if the cutoff is in the past — results would be empty anyway.\n // (Skip this check when fromDate is set — we'll filter client-side below)\n if (!fromDate && releasedAt) {\n const cutoff = new Date(releasedAt)\n cutoff.setMonth(cutoff.getMonth() + 6)\n if (cutoff < new Date())\n return []\n }\n\n try {\n // Fetch more to compensate for filtering\n const fetchCount = Math.min(limit * 3, 80)\n // Fetch 10 comments per discussion so we can filter noise and pick best\n const query = `query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { discussions(first: ${fetchCount}, orderBy: {field: CREATED_AT, direction: DESC}) { nodes { number title body category { name } createdAt url upvoteCount comments(first: 10) { totalCount nodes { body author { login } authorAssociation reactions { totalCount } } } answer { body author { login } authorAssociation } author { login } authorAssociation } } } }`\n\n const { stdout: result } = spawnSync('gh', ['api', 'graphql', '-f', `query=${query}`, '-f', `owner=${owner}`, '-f', `repo=${repo}`], {\n encoding: 'utf-8',\n maxBuffer: 10 * 1024 * 1024,\n })\n if (!result)\n return []\n\n const data = JSON.parse(result)\n const nodes = data?.data?.repository?.discussions?.nodes\n if (!Array.isArray(nodes))\n return []\n\n const fromTs = fromDate ? new Date(fromDate).getTime() : null\n const discussions = nodes\n .filter((d: any) => d.author && !BOT_USERS.has(d.author.login))\n .filter((d: any) => {\n const cat = (d.category?.name || '').toLowerCase()\n return !LOW_VALUE_CATEGORIES.has(cat)\n })\n .filter((d: any) => !fromTs || new Date(d.createdAt).getTime() >= fromTs)\n .map((d: any) => {\n // Process answer — tag maintainer status\n let answer: string | undefined\n if (d.answer?.body) {\n const isMaintainer = ['OWNER', 'MEMBER', 'COLLABORATOR'].includes(d.answer.authorAssociation)\n const author = d.answer.author?.login\n const tag = isMaintainer && author ? `**@${author}** [maintainer]:\\n\\n` : ''\n answer = `${tag}${d.answer.body}`\n }\n\n // Process comments — filter noise, score for quality, take best 3\n const comments: DiscussionComment[] = (d.comments?.nodes || [])\n .filter((c: any) => c.author && !BOT_USERS.has(c.author.login))\n .filter((c: any) => !COMMENT_NOISE_RE.test((c.body || '').trim()))\n .map((c: any) => {\n const isMaintainer = ['OWNER', 'MEMBER', 'COLLABORATOR'].includes(c.authorAssociation)\n return {\n body: c.body || '',\n author: c.author.login,\n reactions: c.reactions?.totalCount || 0,\n isMaintainer,\n }\n })\n .sort((a: DiscussionComment, b: DiscussionComment) => scoreComment(b) - scoreComment(a))\n .slice(0, 3)\n\n return {\n number: d.number,\n title: d.title,\n body: d.body || '',\n category: d.category?.name || '',\n createdAt: d.createdAt,\n url: d.url,\n upvoteCount: d.upvoteCount || 0,\n comments: d.comments?.totalCount || 0,\n isMaintainer: ['OWNER', 'MEMBER', 'COLLABORATOR'].includes(d.authorAssociation),\n answer,\n topComments: comments,\n }\n })\n // Score, filter low-quality, sort by category priority then score\n .map((d: GitHubDiscussion) => ({ d, score: scoreDiscussion(d) }))\n .filter(({ score }) => score >= MIN_DISCUSSION_SCORE)\n .sort((a, b) => {\n const aHigh = HIGH_VALUE_CATEGORIES.has(a.d.category.toLowerCase()) ? 1 : 0\n const bHigh = HIGH_VALUE_CATEGORIES.has(b.d.category.toLowerCase()) ? 1 : 0\n if (aHigh !== bHigh)\n return bHigh - aHigh\n return b.score - a.score\n })\n .slice(0, limit)\n .map(({ d }) => d)\n\n return discussions\n }\n catch {\n return []\n }\n}\n\n/**\n * Format a single discussion as markdown with YAML frontmatter\n */\nexport function formatDiscussionAsMarkdown(d: GitHubDiscussion): string {\n const fm = buildFrontmatter({\n number: d.number,\n title: d.title,\n category: d.category,\n created: isoDate(d.createdAt),\n url: d.url,\n upvotes: d.upvoteCount,\n comments: d.comments,\n answered: !!d.answer,\n })\n\n const bodyLimit = d.upvoteCount >= 5 ? 1500 : 800\n const lines = [fm, '', `# ${d.title}`]\n\n if (d.body) {\n lines.push('', truncateBody(d.body, bodyLimit))\n }\n\n if (d.answer) {\n lines.push('', '---', '', '## Accepted Answer', '', truncateBody(d.answer, 1000))\n }\n else if (d.topComments.length > 0) {\n // No accepted answer — include top comments as context\n lines.push('', '---', '', '## Top Comments')\n for (const c of d.topComments) {\n const reactions = c.reactions > 0 ? ` (+${c.reactions})` : ''\n const maintainer = c.isMaintainer ? ' [maintainer]' : ''\n lines.push('', `**@${c.author}**${maintainer}${reactions}:`, '', truncateBody(c.body, 600))\n }\n }\n\n return lines.join('\\n')\n}\n\n/**\n * Generate a summary index of all discussions for quick LLM scanning.\n * Groups by category so the LLM can quickly find Q&A vs general discussions.\n */\nexport function generateDiscussionIndex(discussions: GitHubDiscussion[]): string {\n const byCategory = new Map<string, GitHubDiscussion[]>()\n for (const d of discussions) {\n const cat = d.category || 'Uncategorized'\n mapInsert(byCategory, cat, () => []).push(d)\n }\n\n const answered = discussions.filter(d => d.answer).length\n\n const fm = [\n '---',\n `total: ${discussions.length}`,\n `answered: ${answered}`,\n '---',\n ]\n\n const sections: string[] = [fm.join('\\n'), '', '# Discussions Index', '']\n\n // Sort categories: high-value first\n const cats = [...byCategory.keys()].sort((a, b) => {\n const aHigh = HIGH_VALUE_CATEGORIES.has(a.toLowerCase()) ? 0 : 1\n const bHigh = HIGH_VALUE_CATEGORIES.has(b.toLowerCase()) ? 0 : 1\n return aHigh - bHigh || a.localeCompare(b)\n })\n\n for (const cat of cats) {\n const group = byCategory.get(cat)!\n sections.push(`## ${cat} (${group.length})`, '')\n for (const d of group) {\n const upvotes = d.upvoteCount > 0 ? ` (+${d.upvoteCount})` : ''\n const answered = d.answer ? ' [answered]' : ''\n const date = isoDate(d.createdAt)\n sections.push(`- [#${d.number}](./discussion-${d.number}.md): ${d.title}${upvotes}${answered} (${date})`)\n }\n sections.push('')\n }\n\n return sections.join('\\n')\n}\n","/**\n * Globs .d.ts type definition files from a package for search indexing.\n * Only types — source code is too verbose.\n */\nimport { existsSync, readFileSync } from 'node:fs'\nimport { globby } from 'globby'\nimport { join } from 'pathe'\n\nexport interface EntryFile {\n path: string\n content: string\n type: 'types' | 'source'\n}\n\nconst SKIP_DIRS = [\n 'node_modules',\n '_vendor',\n '__tests__',\n '__mocks__',\n '__fixtures__',\n 'test',\n 'tests',\n 'fixture',\n 'fixtures',\n 'locales',\n 'locale',\n 'i18n',\n '.git',\n]\n\nconst SKIP_PATTERNS = [\n '*.min.*',\n '*.prod.*',\n '*.global.*',\n '*.browser.*',\n '*.map',\n '*.map.js',\n 'CHANGELOG*',\n 'LICENSE*',\n 'README*',\n]\n\nconst MAX_FILE_SIZE = 500 * 1024 // 500KB per file\n\n/**\n * Glob .d.ts type definition files from a package directory, skipping junk.\n */\nexport async function resolveEntryFiles(packageDir: string): Promise<EntryFile[]> {\n if (!existsSync(join(packageDir, 'package.json')))\n return []\n\n const ignore = [\n ...SKIP_DIRS.map(d => `**/${d}/**`),\n ...SKIP_PATTERNS,\n ]\n\n const files = await globby(['**/*.d.{ts,mts,cts}'], {\n cwd: packageDir,\n ignore,\n absolute: false,\n })\n\n const entries: EntryFile[] = []\n\n for (const file of files) {\n const absPath = join(packageDir, file)\n let content: string\n try {\n content = readFileSync(absPath, 'utf-8')\n }\n catch {\n continue\n }\n\n if (content.length > MAX_FILE_SIZE)\n continue\n\n entries.push({ path: file, content, type: 'types' })\n }\n\n return entries\n}\n","/**\n * Git repo skill source — parse inputs + fetch pre-authored skills from repos\n *\n * Supports GitHub shorthand (owner/repo), full URLs, SSH, GitLab, and local paths.\n * Skills are pre-authored SKILL.md files — no doc resolution or LLM generation needed.\n */\n\nimport { existsSync, readdirSync, readFileSync } from 'node:fs'\nimport pLimit from 'p-limit'\nimport { resolve } from 'pathe'\nimport { yamlParseKV } from '../core/yaml.ts'\nimport { $fetch, normalizeRepoUrl, parseGitHubUrl } from './utils.ts'\n\nexport interface GitSkillSource {\n type: 'github' | 'gitlab' | 'git-ssh' | 'local'\n owner?: string\n repo?: string\n /** Direct path to a specific skill (from /tree/ref/path URLs) */\n skillPath?: string\n /** Branch/tag parsed from URL */\n ref?: string\n /** Absolute path for local sources */\n localPath?: string\n}\n\nexport interface RemoteSkill {\n /** From SKILL.md frontmatter `name` field, or directory name */\n name: string\n /** From SKILL.md frontmatter `description` field */\n description: string\n /** Path within repo (e.g., \"skills/web-design-guidelines\") */\n path: string\n /** Full SKILL.md content */\n content: string\n /** Supporting files (scripts/, references/, assets/) */\n files: Array<{ path: string, content: string }>\n}\n\n/**\n * Detect whether an input string is a git skill source.\n * Returns null for npm package names (including scoped @scope/pkg).\n */\nexport function parseGitSkillInput(input: string): GitSkillSource | null {\n const trimmed = input.trim()\n\n // Scoped npm packages → not git\n if (trimmed.startsWith('@'))\n return null\n\n // Local paths\n if (trimmed.startsWith('./') || trimmed.startsWith('../') || trimmed.startsWith('/') || trimmed.startsWith('~')) {\n const localPath = trimmed.startsWith('~')\n ? resolve(process.env.HOME || '', trimmed.slice(1))\n : resolve(trimmed)\n return { type: 'local', localPath }\n }\n\n // SSH format: git@github.com:owner/repo\n if (trimmed.startsWith('git@')) {\n const normalized = normalizeRepoUrl(trimmed)\n const gh = parseGitHubUrl(normalized)\n if (gh)\n return { type: 'github', owner: gh.owner, repo: gh.repo }\n return null\n }\n\n // Full URLs\n if (trimmed.startsWith('https://') || trimmed.startsWith('http://')) {\n return parseGitUrl(trimmed)\n }\n\n // GitHub shorthand: owner/repo (exactly one slash, no spaces, no commas)\n if (/^[\\w.-]+\\/[\\w.-]+$/.test(trimmed)) {\n return { type: 'github', owner: trimmed.split('/')[0], repo: trimmed.split('/')[1] }\n }\n\n // Everything else → npm\n return null\n}\n\nfunction parseGitUrl(url: string): GitSkillSource | null {\n try {\n const parsed = new URL(url)\n\n if (parsed.hostname === 'github.com' || parsed.hostname === 'www.github.com') {\n const parts = parsed.pathname.replace(/^\\//, '').replace(/\\.git$/, '').split('/')\n const owner = parts[0]\n const repo = parts[1]\n if (!owner || !repo)\n return null\n\n // Handle /tree/ref/path URLs → extract specific skill path\n if (parts[2] === 'tree' && parts.length >= 4) {\n const ref = parts[3]\n const skillPath = parts.length > 4 ? parts.slice(4).join('/') : undefined\n return { type: 'github', owner, repo, ref, skillPath }\n }\n\n return { type: 'github', owner, repo }\n }\n\n if (parsed.hostname === 'gitlab.com') {\n const parts = parsed.pathname.replace(/^\\//, '').replace(/\\.git$/, '').split('/')\n const owner = parts[0]\n const repo = parts[1]\n if (!owner || !repo)\n return null\n return { type: 'gitlab', owner, repo }\n }\n\n return null\n }\n catch {\n return null\n }\n}\n\n/**\n * Parse name and description from SKILL.md frontmatter.\n */\nexport function parseSkillFrontmatterName(content: string): { name?: string, description?: string } {\n const match = content.match(/^---\\n([\\s\\S]*?)\\n---/)\n if (!match)\n return {}\n\n const result: { name?: string, description?: string } = {}\n for (const line of match[1].split('\\n')) {\n const kv = yamlParseKV(line)\n if (!kv)\n continue\n if (kv[0] === 'name')\n result.name = kv[1]\n if (kv[0] === 'description')\n result.description = kv[1]\n }\n return result\n}\n\ninterface UnghFilesResponse {\n meta: { sha: string }\n files: Array<{ path: string, mode: string, sha: string, size: number }>\n}\n\n/** Supporting file dirs within a skill directory */\nconst SUPPORTING_DIRS = ['scripts', 'references', 'assets']\n\n/**\n * Fetch skills from a git source. Returns list of discovered skills + commit SHA.\n */\nexport async function fetchGitSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n if (source.type === 'local')\n return fetchLocalSkills(source)\n if (source.type === 'github')\n return fetchGitHubSkills(source, onProgress)\n if (source.type === 'gitlab')\n return fetchGitLabSkills(source, onProgress)\n return { skills: [] }\n}\n\n// ── Local ──\n\nfunction fetchLocalSkills(source: GitSkillSource): { skills: RemoteSkill[] } {\n const base = source.localPath!\n if (!existsSync(base))\n return { skills: [] }\n\n const skills: RemoteSkill[] = []\n\n // Check for skills/ subdirectory\n const skillsDir = resolve(base, 'skills')\n if (existsSync(skillsDir)) {\n for (const entry of readdirSync(skillsDir, { withFileTypes: true })) {\n if (!entry.isDirectory())\n continue\n const skill = readLocalSkill(resolve(skillsDir, entry.name), `skills/${entry.name}`)\n if (skill)\n skills.push(skill)\n }\n }\n\n // Check for root SKILL.md\n if (skills.length === 0) {\n const skill = readLocalSkill(base, '')\n if (skill)\n skills.push(skill)\n }\n\n return { skills }\n}\n\nfunction readLocalSkill(dir: string, repoPath: string): RemoteSkill | null {\n const skillMdPath = resolve(dir, 'SKILL.md')\n if (!existsSync(skillMdPath))\n return null\n\n const content = readFileSync(skillMdPath, 'utf-8')\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = dir.split('/').pop()!\n const name = frontmatter.name || dirName\n\n const files: Array<{ path: string, content: string }> = []\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPath = resolve(dir, subdir)\n if (!existsSync(subdirPath))\n continue\n for (const file of readdirSync(subdirPath, { withFileTypes: true })) {\n if (!file.isFile())\n continue\n files.push({\n path: `${subdir}/${file.name}`,\n content: readFileSync(resolve(subdirPath, file.name), 'utf-8'),\n })\n }\n }\n\n return {\n name,\n description: frontmatter.description || '',\n path: repoPath,\n content,\n files,\n }\n}\n\n// ── GitHub ──\n\nasync function fetchGitHubSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const { owner, repo } = source\n if (!owner || !repo)\n return { skills: [] }\n\n const ref = source.ref || 'main'\n onProgress?.(`Listing files at ${owner}/${repo}@${ref}`)\n\n const data = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/${ref}`,\n ).catch(() => null)\n\n if (!data?.files?.length) {\n // Try 'master' fallback if default ref failed\n if (ref === 'main') {\n const fallback = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/master`,\n ).catch(() => null)\n if (fallback?.files?.length)\n return extractGitHubSkills(owner!, repo!, 'master', fallback, source.skillPath, onProgress)\n }\n return { skills: [] }\n }\n\n return extractGitHubSkills(owner!, repo!, ref, data, source.skillPath, onProgress)\n}\n\nasync function extractGitHubSkills(\n owner: string,\n repo: string,\n ref: string,\n data: UnghFilesResponse,\n skillPath?: string,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const allFiles = data.files.map(f => f.path)\n const commitSha = data.meta?.sha\n\n // Find SKILL.md files\n let skillMdPaths: string[]\n\n if (skillPath) {\n // Direct skill path: look for SKILL.md at that path\n const candidates = [\n `${skillPath}/SKILL.md`,\n // In case they linked directly to the SKILL.md\n skillPath.endsWith('/SKILL.md') ? skillPath : null,\n ].filter(Boolean) as string[]\n\n skillMdPaths = allFiles.filter(f => candidates.includes(f))\n }\n else {\n // Discover: skills/*/SKILL.md or root SKILL.md\n skillMdPaths = allFiles.filter(f =>\n f.match(/^skills\\/[^/]+\\/SKILL\\.md$/) || f === 'SKILL.md',\n )\n }\n\n if (skillMdPaths.length === 0)\n return { skills: [], commitSha }\n\n const limit = pLimit(5)\n const skills: RemoteSkill[] = []\n\n onProgress?.(`Found ${skillMdPaths.length} skill(s), downloading...`)\n\n await Promise.all(skillMdPaths.map(mdPath => limit(async () => {\n const skillDir = mdPath === 'SKILL.md' ? '' : mdPath.replace(/\\/SKILL\\.md$/, '')\n const content = await fetchRawGitHub(owner, repo, ref, mdPath)\n if (!content)\n return\n\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = skillDir ? skillDir.split('/').pop()! : repo\n const name = frontmatter.name || dirName\n\n // Fetch supporting files\n const supportingFiles: Array<{ path: string, content: string }> = []\n const prefix = skillDir ? `${skillDir}/` : ''\n\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPrefix = `${prefix}${subdir}/`\n const matching = allFiles.filter(f => f.startsWith(subdirPrefix))\n for (const filePath of matching) {\n const fileContent = await fetchRawGitHub(owner, repo, ref, filePath)\n if (fileContent) {\n const relativePath = filePath.slice(prefix.length)\n supportingFiles.push({ path: relativePath, content: fileContent })\n }\n }\n }\n\n skills.push({\n name,\n description: frontmatter.description || '',\n path: skillDir,\n content,\n files: supportingFiles,\n })\n })))\n\n return { skills, commitSha }\n}\n\nasync function fetchRawGitHub(owner: string, repo: string, ref: string, path: string): Promise<string | null> {\n return $fetch(\n `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${path}`,\n { responseType: 'text' },\n ).catch(() => null)\n}\n\n// ── GitLab ──\n\ninterface GitLabTreeEntry {\n id: string\n name: string\n type: string\n path: string\n mode: string\n}\n\nasync function fetchGitLabSkills(\n source: GitSkillSource,\n onProgress?: (msg: string) => void,\n): Promise<{ skills: RemoteSkill[], commitSha?: string }> {\n const { owner, repo } = source\n if (!owner || !repo)\n return { skills: [] }\n\n const ref = source.ref || 'main'\n const projectId = encodeURIComponent(`${owner}/${repo}`)\n\n onProgress?.(`Listing files at ${owner}/${repo}@${ref}`)\n\n const tree = await $fetch<GitLabTreeEntry[]>(\n `https://gitlab.com/api/v4/projects/${projectId}/repository/tree?ref=${ref}&recursive=true&per_page=100`,\n ).catch(() => null)\n\n if (!tree?.length)\n return { skills: [] }\n\n const allFiles = tree.filter(e => e.type === 'blob').map(e => e.path)\n\n // Find SKILL.md files\n const skillMdPaths = source.skillPath\n ? allFiles.filter(f => f === `${source.skillPath}/SKILL.md`)\n : allFiles.filter(f => f.match(/^skills\\/[^/]+\\/SKILL\\.md$/) || f === 'SKILL.md')\n\n if (skillMdPaths.length === 0)\n return { skills: [] }\n\n const limit = pLimit(5)\n const skills: RemoteSkill[] = []\n\n onProgress?.(`Found ${skillMdPaths.length} skill(s), downloading...`)\n\n await Promise.all(skillMdPaths.map(mdPath => limit(async () => {\n const skillDir = mdPath === 'SKILL.md' ? '' : mdPath.replace(/\\/SKILL\\.md$/, '')\n const content = await fetchRawGitLab(owner!, repo!, ref, mdPath)\n if (!content)\n return\n\n const frontmatter = parseSkillFrontmatterName(content)\n const dirName = skillDir ? skillDir.split('/').pop()! : repo!\n const name = frontmatter.name || dirName\n\n // Fetch supporting files\n const supportingFiles: Array<{ path: string, content: string }> = []\n const prefix = skillDir ? `${skillDir}/` : ''\n\n for (const subdir of SUPPORTING_DIRS) {\n const subdirPrefix = `${prefix}${subdir}/`\n const matching = allFiles.filter(f => f.startsWith(subdirPrefix))\n for (const filePath of matching) {\n const fileContent = await fetchRawGitLab(owner!, repo!, ref, filePath)\n if (fileContent) {\n const relativePath = filePath.slice(prefix.length)\n supportingFiles.push({ path: relativePath, content: fileContent })\n }\n }\n }\n\n skills.push({\n name,\n description: frontmatter.description || '',\n path: skillDir,\n content,\n files: supportingFiles,\n })\n })))\n\n return { skills }\n}\n\nasync function fetchRawGitLab(owner: string, repo: string, ref: string, path: string): Promise<string | null> {\n return $fetch(\n `https://gitlab.com/${owner}/${repo}/-/raw/${ref}/${path}`,\n { responseType: 'text' },\n ).catch(() => null)\n}\n","/**\n * llms.txt fetching and parsing\n */\n\nimport type { FetchedDoc, LlmsContent, LlmsLink } from './types.ts'\nimport pLimit from 'p-limit'\nimport { fetchText, verifyUrl } from './utils.ts'\n\n/**\n * Check for llms.txt at a docs URL, returns the llms.txt URL if found\n */\nexport async function fetchLlmsUrl(docsUrl: string): Promise<string | null> {\n const origin = new URL(docsUrl).origin\n const llmsUrl = `${origin}/llms.txt`\n if (await verifyUrl(llmsUrl))\n return llmsUrl\n return null\n}\n\n/**\n * Fetch and parse llms.txt content\n */\nexport async function fetchLlmsTxt(url: string): Promise<LlmsContent | null> {\n const content = await fetchText(url)\n if (!content || content.length < 50)\n return null\n\n return {\n raw: content,\n links: parseMarkdownLinks(content),\n }\n}\n\n/**\n * Parse markdown links from llms.txt to get .md file paths\n */\nexport function parseMarkdownLinks(content: string): LlmsLink[] {\n const links: LlmsLink[] = []\n const seen = new Set<string>()\n const linkRegex = /\\[([^\\]]+)\\]\\(([^)]+\\.md)\\)/g\n for (let match = linkRegex.exec(content); match !== null; match = linkRegex.exec(content)) {\n const url = match[2]!\n if (!seen.has(url)) {\n seen.add(url)\n links.push({ title: match[1]!, url })\n }\n }\n\n return links\n}\n\n/**\n * Download all .md files referenced in llms.txt\n */\n/** Reject non-https URLs and private/link-local IPs */\nfunction isSafeUrl(url: string): boolean {\n try {\n const parsed = new URL(url)\n if (parsed.protocol !== 'https:')\n return false\n const host = parsed.hostname\n // Reject private/link-local/loopback\n if (host === 'localhost' || host === '127.0.0.1' || host === '::1')\n return false\n if (host === '169.254.169.254') // cloud metadata\n return false\n if (/^(?:10\\.|172\\.(?:1[6-9]|2\\d|3[01])\\.|192\\.168\\.)/.test(host))\n return false\n if (host.startsWith('[')) // IPv6 link-local\n return false\n return true\n }\n catch { return false }\n}\n\nexport async function downloadLlmsDocs(\n llmsContent: LlmsContent,\n baseUrl: string,\n onProgress?: (url: string, index: number, total: number) => void,\n): Promise<FetchedDoc[]> {\n const limit = pLimit(5)\n let completed = 0\n\n const results = await Promise.all(\n llmsContent.links.map(link => limit(async () => {\n const url = link.url.startsWith('http')\n ? link.url\n : `${baseUrl.replace(/\\/$/, '')}${link.url.startsWith('/') ? '' : '/'}${link.url}`\n\n if (!isSafeUrl(url))\n return null\n\n onProgress?.(link.url, completed++, llmsContent.links.length)\n\n const content = await fetchText(url)\n if (content && content.length > 100)\n return { url: link.url, title: link.title, content } as FetchedDoc\n return null\n })),\n )\n\n return results.filter((d): d is FetchedDoc => d !== null)\n}\n\n/**\n * Normalize llms.txt links to relative paths for local access\n * Handles: absolute URLs, root-relative paths, and relative paths\n */\nexport function normalizeLlmsLinks(content: string, baseUrl?: string): string {\n let normalized = content\n\n // Handle absolute URLs: https://example.com/docs/foo.md → ./docs/foo.md\n if (baseUrl) {\n const base = baseUrl.replace(/\\/$/, '')\n const escaped = base.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n normalized = normalized.replace(\n new RegExp(`\\\\]\\\\(${escaped}(/[^)]+\\\\.md)\\\\)`, 'g'),\n '](./docs$1)',\n )\n }\n\n // Handle root-relative paths: /foo.md → ./docs/foo.md\n normalized = normalized.replace(/\\]\\(\\/([^)]+\\.md)\\)/g, '](./docs/$1)')\n\n return normalized\n}\n\n/**\n * Extract sections from llms-full.txt by URL patterns\n * Format: ---\\nurl: /path.md\\n---\\n<content>\\n\\n---\\nurl: ...\n */\nexport function extractSections(content: string, patterns: string[]): string | null {\n const sections: string[] = []\n const parts = content.split(/\\n---\\n/)\n\n for (const part of parts) {\n const urlMatch = part.match(/^url: *(\\S.*)$/m)\n if (!urlMatch)\n continue\n\n const url = urlMatch[1]!\n if (patterns.some(p => url.includes(p))) {\n const contentStart = part.indexOf('\\n', part.indexOf('url:'))\n if (contentStart > -1) {\n sections.push(part.slice(contentStart + 1))\n }\n }\n }\n\n if (sections.length === 0)\n return null\n return sections.join('\\n\\n---\\n\\n')\n}\n","/**\n * GitHub/ungh README resolution + versioned docs\n */\n\nimport type { LlmsLink, ResolvedPackage } from './types.ts'\nimport { spawnSync } from 'node:child_process'\nimport { existsSync as fsExistsSync, readFileSync as fsReadFileSync } from 'node:fs'\nimport { fileURLToPath } from 'node:url'\nimport { mapInsert } from '../core/shared.ts'\nimport { isGhAvailable } from './issues.ts'\nimport { fetchLlmsUrl } from './llms.ts'\nimport { getDocOverride } from './package-registry.ts'\nimport { $fetch, extractBranchHint, fetchText, parseGitHubUrl } from './utils.ts'\n\n/** Minimum git-doc file count to prefer over llms.txt */\nexport const MIN_GIT_DOCS = 5\n\n/** True when git-docs exist but are too few to be useful (< MIN_GIT_DOCS) */\nexport const isShallowGitDocs = (n: number) => n > 0 && n < MIN_GIT_DOCS\n\nexport interface GitDocsResult {\n /** URL pattern for fetching docs (use with ref) */\n baseUrl: string\n /** Git ref (tag) used */\n ref: string\n /** List of doc file paths relative to repo root */\n files: string[]\n /** Prefix to strip when normalizing paths to docs/ (e.g. 'apps/evalite-docs/src/content/') for nested monorepo docs */\n docsPrefix?: string\n /** Full repo file tree — only set when discoverDocFiles() heuristic was used (not standard docs/ prefix) */\n allFiles?: string[]\n /** True when ref is a branch (main/master) rather than a version-specific tag */\n fallback?: boolean\n}\n\ninterface UnghFilesResponse {\n meta: { sha: string }\n files: Array<{ path: string, mode: string, sha: string, size: number }>\n}\n\n/**\n * List files at a git ref using ungh (no rate limits)\n */\nasync function listFilesAtRef(owner: string, repo: string, ref: string): Promise<string[]> {\n const data = await $fetch<UnghFilesResponse>(\n `https://ungh.cc/repos/${owner}/${repo}/files/${ref}`,\n ).catch(() => null)\n return data?.files?.map(f => f.path) ?? []\n}\n\ninterface TagResult {\n ref: string\n files: string[]\n /** True when ref is a branch fallback (main/master) rather than a version tag */\n fallback?: boolean\n}\n\n/**\n * Find git tag for a version by checking if ungh can list files at that ref.\n * Tries v{version}, {version}, and optionally {packageName}@{version} (changeset convention).\n */\nasync function findGitTag(owner: string, repo: string, version: string, packageName?: string, branchHint?: string): Promise<TagResult | null> {\n const candidates = [`v${version}`, version]\n if (packageName)\n candidates.push(`${packageName}@${version}`)\n\n for (const tag of candidates) {\n const files = await listFilesAtRef(owner, repo, tag)\n if (files.length > 0)\n return { ref: tag, files }\n }\n\n // Fallback: find latest release tag matching {packageName}@* (version mismatch in monorepos)\n if (packageName) {\n const latestTag = await findLatestReleaseTag(owner, repo, packageName)\n if (latestTag) {\n const files = await listFilesAtRef(owner, repo, latestTag)\n if (files.length > 0)\n return { ref: latestTag, files }\n }\n }\n\n // Last resort: try default branch (prefer hint from repo URL fragment)\n const branches = branchHint\n ? [branchHint, ...['main', 'master'].filter(b => b !== branchHint)]\n : ['main', 'master']\n for (const branch of branches) {\n const files = await listFilesAtRef(owner, repo, branch)\n if (files.length > 0)\n return { ref: branch, files, fallback: true }\n }\n\n return null\n}\n\n/**\n * Find the latest release tag matching `{packageName}@*` via ungh releases API.\n * Handles monorepos where npm version doesn't match git tag version.\n */\nasync function findLatestReleaseTag(owner: string, repo: string, packageName: string): Promise<string | null> {\n const data = await $fetch<{ releases?: Array<{ tag: string }> }>(\n `https://ungh.cc/repos/${owner}/${repo}/releases`,\n ).catch(() => null)\n const prefix = `${packageName}@`\n return data?.releases?.find(r => r.tag.startsWith(prefix))?.tag ?? null\n}\n\n/**\n * Filter file paths by prefix and md/mdx extension\n */\nfunction filterDocFiles(files: string[], pathPrefix: string): string[] {\n return files.filter(f => f.startsWith(pathPrefix) && /\\.(?:md|mdx)$/.test(f))\n}\n\n/** Known noise paths to exclude from doc discovery */\nconst NOISE_PATTERNS = [\n /^\\.changeset\\//,\n /CHANGELOG\\.md$/i,\n /CONTRIBUTING\\.md$/i,\n /^\\.github\\//,\n]\n\n/** Directories to exclude from \"best directory\" heuristic */\nconst EXCLUDE_DIRS = new Set([\n 'test',\n 'tests',\n '__tests__',\n 'fixtures',\n 'fixture',\n 'examples',\n 'example',\n 'node_modules',\n '.git',\n 'dist',\n 'build',\n 'coverage',\n 'e2e',\n 'spec',\n 'mocks',\n '__mocks__',\n])\n\n/** Directory names that suggest documentation */\nconst DOC_DIR_BONUS = new Set([\n 'docs',\n 'documentation',\n 'pages',\n 'content',\n 'website',\n 'guide',\n 'guides',\n 'wiki',\n 'manual',\n 'api',\n])\n\ninterface DiscoveredDocs {\n files: string[]\n /** Prefix before 'docs/' to strip when normalizing (e.g. 'apps/evalite-docs/src/content/') */\n prefix: string\n}\n\n/**\n * Check if a path contains any excluded directory\n */\nfunction hasExcludedDir(path: string): boolean {\n const parts = path.split('/')\n return parts.some(p => EXCLUDE_DIRS.has(p.toLowerCase()))\n}\n\n/**\n * Get the depth of a path (number of directory levels)\n */\nfunction getPathDepth(path: string): number {\n return path.split('/').filter(Boolean).length\n}\n\n/**\n * Check if path contains a doc-related directory name\n */\nfunction hasDocDirBonus(path: string): boolean {\n const parts = path.split('/')\n return parts.some(p => DOC_DIR_BONUS.has(p.toLowerCase()))\n}\n\n/**\n * Score a directory for doc likelihood.\n * Higher = better. Formula: count * nameBonus / depth\n */\nfunction scoreDocDir(dir: string, fileCount: number): number {\n const depth = getPathDepth(dir) || 1\n const nameBonus = hasDocDirBonus(dir) ? 1.5 : 1\n return (fileCount * nameBonus) / depth\n}\n\n/**\n * Discover doc files in non-standard locations.\n * First tries to find clusters of md/mdx files in paths containing /docs/.\n * Falls back to finding the directory with the most markdown files (≥5).\n */\nfunction discoverDocFiles(allFiles: string[]): DiscoveredDocs | null {\n const mdFiles = allFiles\n .filter(f => /\\.(?:md|mdx)$/.test(f))\n .filter(f => !NOISE_PATTERNS.some(p => p.test(f)))\n .filter(f => f.includes('/'))\n\n // Strategy 1: Look for /docs/ clusters (existing behavior)\n const docsGroups = new Map<string, string[]>()\n\n for (const file of mdFiles) {\n const docsIdx = file.lastIndexOf('/docs/')\n if (docsIdx === -1)\n continue\n\n const prefix = file.slice(0, docsIdx + '/docs/'.length)\n mapInsert(docsGroups, prefix, () => []).push(file)\n }\n\n if (docsGroups.size > 0) {\n const largest = [...docsGroups.entries()].sort((a, b) => b[1].length - a[1].length)[0]!\n if (largest[1].length >= 3) {\n const fullPrefix = largest[0]\n const docsIdx = fullPrefix.lastIndexOf('docs/')\n const stripPrefix = docsIdx > 0 ? fullPrefix.slice(0, docsIdx) : ''\n return { files: largest[1], prefix: stripPrefix }\n }\n }\n\n // Strategy 2: Find best directory by file count (for non-standard structures)\n const dirGroups = new Map<string, string[]>()\n\n for (const file of mdFiles) {\n if (hasExcludedDir(file))\n continue\n\n // Group by immediate parent directory\n const lastSlash = file.lastIndexOf('/')\n if (lastSlash === -1)\n continue\n\n const dir = file.slice(0, lastSlash + 1)\n mapInsert(dirGroups, dir, () => []).push(file)\n }\n\n if (dirGroups.size === 0)\n return null\n\n // Score and sort directories\n const scored = [...dirGroups.entries()]\n .map(([dir, files]) => ({ dir, files, score: scoreDocDir(dir, files.length) }))\n .filter(d => d.files.length >= 5) // Minimum threshold\n .sort((a, b) => b.score - a.score)\n\n if (scored.length === 0)\n return null\n\n const best = scored[0]!\n\n // For non-docs paths, the prefix is everything up to (but not including) the final dir\n // e.g. 'website/pages/' -> prefix is 'website/' so files normalize to 'pages/...'\n // But actually we want the full prefix so downstream can strip it\n return { files: best.files, prefix: best.dir }\n}\n\n/**\n * List markdown files in a folder at a specific git ref\n */\nasync function listDocsAtRef(owner: string, repo: string, ref: string, pathPrefix = 'docs/'): Promise<string[]> {\n const files = await listFilesAtRef(owner, repo, ref)\n return filterDocFiles(files, pathPrefix)\n}\n\n/**\n * Fetch versioned docs from GitHub repo's docs/ folder.\n * Pass packageName to check doc overrides (e.g. vue -> vuejs/docs).\n */\nexport async function fetchGitDocs(owner: string, repo: string, version: string, packageName?: string, repoUrl?: string): Promise<GitDocsResult | null> {\n const override = packageName ? getDocOverride(packageName) : undefined\n if (override) {\n const ref = override.ref || 'main'\n const fallback = !override.ref\n const files = await listDocsAtRef(override.owner, override.repo, ref, `${override.path}/`)\n if (files.length === 0)\n return null\n return {\n baseUrl: `https://raw.githubusercontent.com/${override.owner}/${override.repo}/${ref}`,\n ref,\n files,\n fallback,\n // Strip non-standard prefix so sync normalizes paths under docs/\n docsPrefix: `${override.path}/` !== 'docs/' ? `${override.path}/` : undefined,\n }\n }\n\n const branchHint = repoUrl ? extractBranchHint(repoUrl) : undefined\n const tag = await findGitTag(owner, repo, version, packageName, branchHint)\n if (!tag)\n return null\n\n let docs = filterDocFiles(tag.files, 'docs/')\n let docsPrefix: string | undefined\n let allFiles: string[] | undefined\n\n // Fallback: discover docs in nested paths (monorepos, content collections)\n if (docs.length === 0) {\n const discovered = discoverDocFiles(tag.files)\n if (discovered) {\n docs = discovered.files\n docsPrefix = discovered.prefix || undefined\n allFiles = tag.files\n }\n }\n\n if (docs.length === 0)\n return null\n\n return {\n baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,\n ref: tag.ref,\n files: docs,\n docsPrefix,\n allFiles,\n fallback: tag.fallback,\n }\n}\n\n/**\n * Strip file extension (.md, .mdx) and leading slash from a path\n */\nfunction normalizePath(p: string): string {\n return p.replace(/^\\//, '').replace(/\\.(?:md|mdx)$/, '')\n}\n\n/**\n * Validate that discovered git docs are relevant by cross-referencing llms.txt links\n * against the repo file tree. Uses extensionless suffix matching to handle monorepo nesting.\n *\n * Returns { isValid, matchRatio } where isValid = matchRatio >= 0.3\n */\nexport function validateGitDocsWithLlms(\n llmsLinks: LlmsLink[],\n repoFiles: string[],\n): { isValid: boolean, matchRatio: number } {\n if (llmsLinks.length === 0)\n return { isValid: true, matchRatio: 1 }\n\n // Sample up to 10 links\n const sample = llmsLinks.slice(0, 10)\n\n // Normalize llms link paths\n const normalizedLinks = sample.map((link) => {\n let path = link.url\n // Strip absolute URL to pathname\n if (path.startsWith('http')) {\n try {\n path = new URL(path).pathname\n }\n catch { /* keep as-is */ }\n }\n return normalizePath(path)\n })\n\n // Pre-process repo files: strip extensions to get extensionless paths\n const repoNormalized = new Set(repoFiles.map(normalizePath))\n\n let matches = 0\n for (const linkPath of normalizedLinks) {\n // Check if any repo file ends with this path (suffix matching for monorepo nesting)\n for (const repoPath of repoNormalized) {\n if (repoPath === linkPath || repoPath.endsWith(`/${linkPath}`)) {\n matches++\n break\n }\n }\n }\n\n const matchRatio = matches / sample.length\n return { isValid: matchRatio >= 0.3, matchRatio }\n}\n\n/**\n * Verify a GitHub repo is the source for an npm package by checking package.json name field.\n * Checks root first, then common monorepo paths (packages/{shortName}, packages/{name}).\n */\nasync function verifyNpmRepo(owner: string, repo: string, packageName: string): Promise<boolean> {\n const base = `https://raw.githubusercontent.com/${owner}/${repo}/HEAD`\n const shortName = packageName.replace(/^@.*\\//, '')\n const paths = [\n 'package.json',\n `packages/${shortName}/package.json`,\n `packages/${packageName.replace(/^@/, '').replace('/', '-')}/package.json`,\n ]\n for (const path of paths) {\n const text = await fetchText(`${base}/${path}`)\n if (!text)\n continue\n try {\n const pkg = JSON.parse(text) as { name?: string }\n if (pkg.name === packageName)\n return true\n }\n catch {}\n }\n return false\n}\n\nexport async function searchGitHubRepo(packageName: string): Promise<string | null> {\n // Try ungh heuristic first — check if repo name matches package name\n const shortName = packageName.replace(/^@.*\\//, '')\n for (const candidate of [packageName.replace(/^@/, '').replace('/', '/'), shortName]) {\n // Only try if it looks like owner/repo\n if (!candidate.includes('/')) {\n // Try common patterns: {name}/{name}\n const unghRes = await $fetch.raw(`https://ungh.cc/repos/${shortName}/${shortName}`).catch(() => null)\n if (unghRes?.ok)\n return `https://github.com/${shortName}/${shortName}`\n continue\n }\n const unghRes = await $fetch.raw(`https://ungh.cc/repos/${candidate}`).catch(() => null)\n if (unghRes?.ok)\n return `https://github.com/${candidate}`\n }\n\n // Try gh CLI — strip @ to avoid GitHub search syntax issues\n const searchTerm = packageName.replace(/^@/, '')\n if (isGhAvailable()) {\n try {\n const { stdout: json } = spawnSync('gh', ['search', 'repos', searchTerm, '--json', 'fullName', '--limit', '5'], {\n encoding: 'utf-8',\n timeout: 15_000,\n })\n if (!json)\n throw new Error('no output')\n const repos = JSON.parse(json) as Array<{ fullName: string }>\n // Prefer exact suffix match\n const match = repos.find(r =>\n r.fullName.toLowerCase().endsWith(`/${packageName.toLowerCase()}`)\n || r.fullName.toLowerCase().endsWith(`/${shortName.toLowerCase()}`),\n )\n if (match)\n return `https://github.com/${match.fullName}`\n // Validate remaining results via package.json\n for (const candidate of repos) {\n const gh = parseGitHubUrl(`https://github.com/${candidate.fullName}`)\n if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName))\n return `https://github.com/${candidate.fullName}`\n }\n }\n catch {\n // fall through to REST API\n }\n }\n\n // Fallback: GitHub REST search API (no auth needed, but rate-limited)\n const query = encodeURIComponent(`${searchTerm} in:name`)\n const data = await $fetch<{ items?: Array<{ full_name: string }> }>(\n `https://api.github.com/search/repositories?q=${query}&per_page=5`,\n ).catch(() => null)\n if (!data?.items?.length)\n return null\n\n // Prefer exact suffix match\n const match = data.items.find(r =>\n r.full_name.toLowerCase().endsWith(`/${packageName.toLowerCase()}`)\n || r.full_name.toLowerCase().endsWith(`/${shortName.toLowerCase()}`),\n )\n if (match)\n return `https://github.com/${match.full_name}`\n\n // Validate remaining results via package.json\n for (const candidate of data.items) {\n const gh = parseGitHubUrl(`https://github.com/${candidate.full_name}`)\n if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName))\n return `https://github.com/${candidate.full_name}`\n }\n\n return null\n}\n\n/**\n * Fetch GitHub repo metadata to get website URL.\n * Pass packageName to check doc overrides first (avoids API call).\n */\nexport async function fetchGitHubRepoMeta(owner: string, repo: string, packageName?: string): Promise<{ homepage?: string } | null> {\n const override = packageName ? getDocOverride(packageName) : undefined\n if (override?.homepage)\n return { homepage: override.homepage }\n\n // Prefer gh CLI to avoid rate limits\n if (isGhAvailable()) {\n try {\n const { stdout: json } = spawnSync('gh', ['api', `repos/${owner}/${repo}`, '-q', '{homepage}'], {\n encoding: 'utf-8',\n timeout: 10_000,\n })\n if (!json)\n throw new Error('no output')\n const data = JSON.parse(json) as { homepage?: string }\n return data?.homepage ? { homepage: data.homepage } : null\n }\n catch {\n // fall through to fetch\n }\n }\n\n const data = await $fetch<{ homepage?: string }>(\n `https://api.github.com/repos/${owner}/${repo}`,\n ).catch(() => null)\n return data?.homepage ? { homepage: data.homepage } : null\n}\n\n/**\n * Resolve README URL for a GitHub repo, returns ungh:// pseudo-URL or raw URL\n */\nexport async function fetchReadme(owner: string, repo: string, subdir?: string, ref?: string): Promise<string | null> {\n const branch = ref || 'main'\n\n // Try ungh first\n const unghUrl = subdir\n ? `https://ungh.cc/repos/${owner}/${repo}/files/${branch}/${subdir}/README.md`\n : `https://ungh.cc/repos/${owner}/${repo}/readme${ref ? `?ref=${ref}` : ''}`\n\n const unghRes = await $fetch.raw(unghUrl).catch(() => null)\n\n if (unghRes?.ok) {\n return `ungh://${owner}/${repo}${subdir ? `/${subdir}` : ''}${ref ? `@${ref}` : ''}`\n }\n\n // Fallback to raw.githubusercontent.com — use GET instead of HEAD\n // because raw.githubusercontent.com sometimes returns HTML on HEAD for valid URLs\n const basePath = subdir ? `${subdir}/` : ''\n const branches = ref ? [ref] : ['main', 'master']\n for (const b of branches) {\n for (const filename of ['README.md', 'Readme.md', 'readme.md']) {\n const readmeUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${b}/${basePath}${filename}`\n const res = await $fetch.raw(readmeUrl).catch(() => null)\n if (res?.ok)\n return readmeUrl\n }\n }\n\n return null\n}\n\n/**\n * Fetch README content from ungh:// pseudo-URL, file:// URL, or regular URL\n */\nexport interface GitSourceResult {\n /** URL pattern for fetching source */\n baseUrl: string\n /** Git ref (tag) used */\n ref: string\n /** List of source file paths relative to repo root */\n files: string[]\n}\n\n/** Source file extensions to include */\nconst SOURCE_EXTENSIONS = new Set([\n '.ts',\n '.tsx',\n '.mts',\n '.cts',\n '.js',\n '.jsx',\n '.mjs',\n '.cjs',\n '.vue',\n '.svelte',\n '.astro',\n])\n\n/** Paths/patterns to exclude */\nconst EXCLUDE_PATTERNS = [\n /\\.test\\./,\n /\\.spec\\./,\n /\\.d\\.ts$/,\n /__tests__/,\n /__mocks__/,\n /\\.config\\./,\n /fixtures?\\//,\n]\n\n/**\n * Filter source files from a file list\n */\nfunction filterSourceFiles(files: string[]): string[] {\n return files.filter((path) => {\n if (!path.startsWith('src/'))\n return false\n\n const ext = path.slice(path.lastIndexOf('.'))\n if (!SOURCE_EXTENSIONS.has(ext))\n return false\n if (EXCLUDE_PATTERNS.some(p => p.test(path)))\n return false\n\n return true\n })\n}\n\n/**\n * Fetch source files from GitHub repo's src/ folder\n */\nexport async function fetchGitSource(owner: string, repo: string, version: string, packageName?: string, repoUrl?: string): Promise<GitSourceResult | null> {\n const branchHint = repoUrl ? extractBranchHint(repoUrl) : undefined\n const tag = await findGitTag(owner, repo, version, packageName, branchHint)\n if (!tag)\n return null\n\n const files = filterSourceFiles(tag.files)\n if (files.length === 0)\n return null\n\n return {\n baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,\n ref: tag.ref,\n files,\n }\n}\n\n/**\n * Fetch README content from ungh:// pseudo-URL, file:// URL, or regular URL\n */\nexport async function fetchReadmeContent(url: string): Promise<string | null> {\n // Local file\n if (url.startsWith('file://')) {\n const filePath = fileURLToPath(url)\n if (!fsExistsSync(filePath))\n return null\n return fsReadFileSync(filePath, 'utf-8')\n }\n\n if (url.startsWith('ungh://')) {\n let path = url.replace('ungh://', '')\n let ref = 'main'\n\n // Parse ref from owner/repo/subdir@ref\n const atIdx = path.lastIndexOf('@')\n if (atIdx !== -1) {\n ref = path.slice(atIdx + 1)\n path = path.slice(0, atIdx)\n }\n\n const parts = path.split('/')\n const owner = parts[0]\n const repo = parts[1]\n const subdir = parts.slice(2).join('/')\n\n const unghUrl = subdir\n ? `https://ungh.cc/repos/${owner}/${repo}/files/${ref}/${subdir}/README.md`\n : `https://ungh.cc/repos/${owner}/${repo}/readme?ref=${ref}`\n\n const text = await $fetch(unghUrl, { responseType: 'text' }).catch(() => null)\n if (!text)\n return null\n\n try {\n const json = JSON.parse(text) as { markdown?: string, file?: { contents?: string } }\n return json.markdown || json.file?.contents || null\n }\n catch {\n return text\n }\n }\n\n return fetchText(url)\n}\n\n/**\n * Resolve a GitHub repo into a ResolvedPackage (no npm registry needed).\n * Fetches repo meta, latest release version, git docs, README, and llms.txt.\n */\nexport async function resolveGitHubRepo(\n owner: string,\n repo: string,\n onProgress?: (msg: string) => void,\n): Promise<ResolvedPackage | null> {\n onProgress?.('Fetching repo metadata')\n\n // Fetch repo metadata (homepage, description) via gh CLI or GitHub API\n const repoUrl = `https://github.com/${owner}/${repo}`\n let homepage: string | undefined\n let description: string | undefined\n\n if (isGhAvailable()) {\n try {\n const { stdout: json } = spawnSync('gh', ['api', `repos/${owner}/${repo}`, '--jq', '{homepage: .homepage, description: .description}'], {\n encoding: 'utf-8',\n timeout: 10_000,\n })\n if (json) {\n const data = JSON.parse(json) as { homepage?: string, description?: string }\n homepage = data.homepage || undefined\n description = data.description || undefined\n }\n }\n catch { /* fall through */ }\n }\n\n if (!homepage && !description) {\n const data = await $fetch<{ homepage?: string, description?: string }>(\n `https://api.github.com/repos/${owner}/${repo}`,\n ).catch(() => null)\n homepage = data?.homepage || undefined\n description = data?.description || undefined\n }\n\n // Fetch latest release tag for version\n onProgress?.('Fetching latest release')\n const releasesData = await $fetch<{ releases?: Array<{ tag: string, publishedAt?: string }> }>(\n `https://ungh.cc/repos/${owner}/${repo}/releases`,\n ).catch(() => null)\n\n let version = 'main'\n let releasedAt: string | undefined\n const latestRelease = releasesData?.releases?.[0]\n if (latestRelease) {\n // Extract version from tag (strip leading \"v\")\n version = latestRelease.tag.replace(/^v/, '')\n releasedAt = latestRelease.publishedAt\n }\n\n // Fetch git docs\n onProgress?.('Resolving docs')\n const gitDocs = await fetchGitDocs(owner, repo, version)\n const gitDocsUrl = gitDocs ? `${repoUrl}/tree/${gitDocs.ref}/docs` : undefined\n const gitRef = gitDocs?.ref\n\n // Fetch README\n onProgress?.('Fetching README')\n const readmeUrl = await fetchReadme(owner, repo)\n\n // Check for llms.txt at homepage\n let llmsUrl: string | undefined\n if (homepage) {\n onProgress?.('Checking llms.txt')\n llmsUrl = await fetchLlmsUrl(homepage).catch(() => null) ?? undefined\n }\n\n // Must have at least some docs\n if (!gitDocsUrl && !readmeUrl && !llmsUrl)\n return null\n\n return {\n name: repo,\n version: latestRelease ? version : undefined,\n releasedAt,\n description,\n repoUrl,\n docsUrl: homepage,\n gitDocsUrl,\n gitRef,\n gitDocsFallback: gitDocs?.fallback,\n readmeUrl: readmeUrl ?? undefined,\n llmsUrl,\n }\n}\n","/**\n * NPM registry lookup\n */\n\nimport type { LocalDependency, NpmPackageInfo, ResolveAttempt, ResolvedPackage, ResolveResult } from './types.ts'\nimport { spawnSync } from 'node:child_process'\nimport { createWriteStream, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, unlinkSync } from 'node:fs'\nimport { Writable } from 'node:stream'\nimport { pathToFileURL } from 'node:url'\nimport { resolvePathSync } from 'mlly'\nimport { basename, dirname, join, resolve } from 'pathe'\nimport { getCacheDir } from '../cache/version.ts'\nimport { fetchGitDocs, fetchGitHubRepoMeta, fetchReadme, searchGitHubRepo, validateGitDocsWithLlms } from './github.ts'\nimport { fetchLlmsTxt, fetchLlmsUrl } from './llms.ts'\nimport { $fetch, isGitHubRepoUrl, isUselessDocsUrl, normalizeRepoUrl, parseGitHubUrl } from './utils.ts'\n\n/**\n * Search npm registry for packages matching a query.\n * Used as a fallback when direct package lookup fails.\n */\nexport async function searchNpmPackages(query: string, size = 5): Promise<Array<{ name: string, description?: string, version: string }>> {\n const data = await $fetch<{\n objects: Array<{ package: { name: string, description?: string, version: string } }>\n }>(`https://registry.npmjs.org/-/v1/search?text=${encodeURIComponent(query)}&size=${size}`).catch(() => null)\n\n if (!data?.objects?.length)\n return []\n\n return data.objects.map(o => ({\n name: o.package.name,\n description: o.package.description,\n version: o.package.version,\n }))\n}\n\n/**\n * Fetch package info from npm registry\n */\nexport async function fetchNpmPackage(packageName: string): Promise<NpmPackageInfo | null> {\n // Try unpkg first (faster, CDN)\n const data = await $fetch<NpmPackageInfo>(`https://unpkg.com/${packageName}/package.json`).catch(() => null)\n if (data)\n return data\n\n // Fallback to npm registry\n return $fetch<NpmPackageInfo>(`https://registry.npmjs.org/${packageName}/latest`).catch(() => null)\n}\n\nexport interface DistTagInfo {\n version: string\n releasedAt?: string\n}\n\nexport interface NpmRegistryMeta {\n releasedAt?: string\n distTags?: Record<string, DistTagInfo>\n}\n\n/**\n * Fetch release date and dist-tags from npm registry\n */\nexport async function fetchNpmRegistryMeta(packageName: string, version: string): Promise<NpmRegistryMeta> {\n const data = await $fetch<{\n 'time'?: Record<string, string>\n 'dist-tags'?: Record<string, string>\n }>(`https://registry.npmjs.org/${packageName}`).catch(() => null)\n\n if (!data)\n return {}\n\n // Enrich dist-tags with release dates\n const distTags: Record<string, DistTagInfo> | undefined = data['dist-tags']\n ? Object.fromEntries(\n Object.entries(data['dist-tags']).map(([tag, ver]) => [\n tag,\n { version: ver, releasedAt: data.time?.[ver] },\n ]),\n )\n : undefined\n\n return {\n releasedAt: data.time?.[version] || undefined,\n distTags,\n }\n}\n\nexport type ResolveStep = 'npm' | 'github-docs' | 'github-meta' | 'github-search' | 'readme' | 'llms.txt' | 'local'\n\nexport interface ResolveOptions {\n /** User's installed version - used to fetch versioned git docs */\n version?: string\n /** Current working directory - for local readme fallback */\n cwd?: string\n /** Progress callback - called before each resolution step */\n onProgress?: (step: ResolveStep) => void\n}\n\n/**\n * Shared GitHub resolution cascade: git docs → repo meta (homepage) → README.\n * Used for both \"repo URL found in package.json\" and \"repo URL found via search\" paths.\n */\nasync function resolveGitHub(\n gh: { owner: string, repo: string },\n targetVersion: string | undefined,\n pkg: { name: string },\n result: ResolvedPackage,\n attempts: ResolveAttempt[],\n onProgress?: (step: ResolveStep) => void,\n opts?: { rawRepoUrl?: string, subdir?: string },\n): Promise<string[] | undefined> {\n let allFiles: string[] | undefined\n\n // Try versioned git docs first (docs/**/*.md at git tag)\n if (targetVersion) {\n onProgress?.('github-docs')\n const gitDocs = await fetchGitDocs(gh.owner, gh.repo, targetVersion, pkg.name, opts?.rawRepoUrl)\n if (gitDocs) {\n result.gitDocsUrl = gitDocs.baseUrl\n result.gitRef = gitDocs.ref\n result.gitDocsFallback = gitDocs.fallback\n allFiles = gitDocs.allFiles\n attempts.push({\n source: 'github-docs',\n url: gitDocs.baseUrl,\n status: 'success',\n message: gitDocs.fallback\n ? `Found ${gitDocs.files.length} docs at ${gitDocs.ref} (no tag for v${targetVersion})`\n : `Found ${gitDocs.files.length} docs at ${gitDocs.ref}`,\n })\n }\n else {\n attempts.push({\n source: 'github-docs',\n url: `${result.repoUrl}/tree/v${targetVersion}/docs`,\n status: 'not-found',\n message: 'No docs/ folder found at version tag',\n })\n }\n }\n\n // If no docsUrl yet (npm had no homepage), try GitHub repo metadata\n if (!result.docsUrl) {\n onProgress?.('github-meta')\n const repoMeta = await fetchGitHubRepoMeta(gh.owner, gh.repo, pkg.name)\n if (repoMeta?.homepage && !isUselessDocsUrl(repoMeta.homepage)) {\n result.docsUrl = repoMeta.homepage\n attempts.push({\n source: 'github-meta',\n url: result.repoUrl!,\n status: 'success',\n message: `Found homepage: ${repoMeta.homepage}`,\n })\n }\n else {\n attempts.push({\n source: 'github-meta',\n url: result.repoUrl!,\n status: 'not-found',\n message: 'No homepage in repo metadata',\n })\n }\n }\n\n // README fallback via ungh\n onProgress?.('readme')\n const readmeUrl = await fetchReadme(gh.owner, gh.repo, opts?.subdir, result.gitRef)\n if (readmeUrl) {\n result.readmeUrl = readmeUrl\n attempts.push({\n source: 'readme',\n url: readmeUrl,\n status: 'success',\n })\n }\n else {\n attempts.push({\n source: 'readme',\n url: `${result.repoUrl}/README.md`,\n status: 'not-found',\n message: 'No README found',\n })\n }\n\n return allFiles\n}\n\n/**\n * Resolve documentation URL for a package (legacy - returns null on failure)\n */\nexport async function resolvePackageDocs(packageName: string, options: ResolveOptions = {}): Promise<ResolvedPackage | null> {\n const result = await resolvePackageDocsWithAttempts(packageName, options)\n return result.package\n}\n\n/**\n * Resolve documentation URL for a package with attempt tracking\n */\nexport async function resolvePackageDocsWithAttempts(packageName: string, options: ResolveOptions = {}): Promise<ResolveResult> {\n const attempts: ResolveAttempt[] = []\n const { onProgress } = options\n\n onProgress?.('npm')\n const pkg = await fetchNpmPackage(packageName)\n if (!pkg) {\n attempts.push({\n source: 'npm',\n url: `https://registry.npmjs.org/${packageName}/latest`,\n status: 'not-found',\n message: 'Package not found on npm registry',\n })\n return { package: null, attempts }\n }\n\n attempts.push({\n source: 'npm',\n url: `https://registry.npmjs.org/${packageName}/latest`,\n status: 'success',\n message: `Found ${pkg.name}@${pkg.version}`,\n })\n\n // Fetch release date and dist-tags for this version\n const registryMeta = pkg.version\n ? await fetchNpmRegistryMeta(packageName, pkg.version)\n : {}\n\n const result: ResolvedPackage = {\n name: pkg.name,\n version: pkg.version,\n releasedAt: registryMeta.releasedAt,\n description: pkg.description,\n dependencies: pkg.dependencies,\n distTags: registryMeta.distTags,\n }\n\n // Track allFiles from heuristic git doc discovery for llms.txt validation\n let gitDocsAllFiles: string[] | undefined\n\n // Extract repo URL (handle both object and shorthand string formats)\n let subdir: string | undefined\n let rawRepoUrl: string | undefined\n if (typeof pkg.repository === 'object' && pkg.repository?.url) {\n rawRepoUrl = pkg.repository.url\n const normalized = normalizeRepoUrl(rawRepoUrl)\n // Handle shorthand \"owner/repo\" in repository.url field (e.g. cac)\n if (!normalized.includes('://') && normalized.includes('/') && !normalized.includes(':'))\n result.repoUrl = `https://github.com/${normalized}`\n else\n result.repoUrl = normalized\n subdir = pkg.repository.directory\n }\n else if (typeof pkg.repository === 'string') {\n if (pkg.repository.includes('://')) {\n // Full URL string (e.g. \"https://github.com/org/repo/tree/main/packages/sub\")\n const gh = parseGitHubUrl(pkg.repository)\n if (gh)\n result.repoUrl = `https://github.com/${gh.owner}/${gh.repo}`\n }\n else {\n // Shorthand: \"owner/repo\" or \"github:owner/repo\"\n const repo = pkg.repository.replace(/^github:/, '')\n if (repo.includes('/') && !repo.includes(':'))\n result.repoUrl = `https://github.com/${repo}`\n }\n }\n\n // Use npm homepage early (skip GitHub repo URLs)\n if (pkg.homepage && !isGitHubRepoUrl(pkg.homepage) && !isUselessDocsUrl(pkg.homepage)) {\n result.docsUrl = pkg.homepage\n }\n\n // GitHub repo handling - try versioned git docs first\n if (result.repoUrl?.includes('github.com')) {\n const gh = parseGitHubUrl(result.repoUrl)\n if (gh) {\n const targetVersion = options.version || pkg.version\n gitDocsAllFiles = await resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgress, { rawRepoUrl, subdir })\n }\n }\n else if (!result.repoUrl) {\n // No repo URL in package.json — try to find it via GitHub search\n onProgress?.('github-search')\n const searchedUrl = await searchGitHubRepo(pkg.name)\n if (searchedUrl) {\n result.repoUrl = searchedUrl\n attempts.push({\n source: 'github-search',\n url: searchedUrl,\n status: 'success',\n message: `Found via GitHub search: ${searchedUrl}`,\n })\n\n const gh = parseGitHubUrl(searchedUrl)\n if (gh) {\n const targetVersion = options.version || pkg.version\n gitDocsAllFiles = await resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgress)\n }\n }\n else {\n attempts.push({\n source: 'github-search',\n status: 'not-found',\n message: 'No repository URL in package.json and GitHub search found no match',\n })\n }\n }\n\n // Check for llms.txt on docsUrl\n if (result.docsUrl) {\n onProgress?.('llms.txt')\n const llmsUrl = await fetchLlmsUrl(result.docsUrl)\n if (llmsUrl) {\n result.llmsUrl = llmsUrl\n attempts.push({\n source: 'llms.txt',\n url: llmsUrl,\n status: 'success',\n })\n }\n else {\n attempts.push({\n source: 'llms.txt',\n url: `${new URL(result.docsUrl).origin}/llms.txt`,\n status: 'not-found',\n message: 'No llms.txt at docs URL',\n })\n }\n }\n\n // Validate heuristic git docs against llms.txt links\n if (result.gitDocsUrl && result.llmsUrl && gitDocsAllFiles) {\n const llmsContent = await fetchLlmsTxt(result.llmsUrl)\n if (llmsContent && llmsContent.links.length > 0) {\n const validation = validateGitDocsWithLlms(llmsContent.links, gitDocsAllFiles)\n if (!validation.isValid) {\n attempts.push({\n source: 'github-docs',\n url: result.gitDocsUrl,\n status: 'not-found',\n message: `Heuristic git docs don't match llms.txt links (${Math.round(validation.matchRatio * 100)}% match), preferring llms.txt`,\n })\n result.gitDocsUrl = undefined\n result.gitRef = undefined\n }\n }\n }\n\n // Fallback: check local node_modules readme when all else fails\n if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl && options.cwd) {\n onProgress?.('local')\n const pkgDir = join(options.cwd, 'node_modules', packageName)\n // Check common readme variations (case-insensitive)\n const readmeFile = existsSync(pkgDir) && readdirSync(pkgDir).find(f => /^readme\\.md$/i.test(f))\n if (readmeFile) {\n const readmePath = join(pkgDir, readmeFile)\n result.readmeUrl = pathToFileURL(readmePath).href\n attempts.push({\n source: 'readme',\n url: readmePath,\n status: 'success',\n message: 'Found local readme in node_modules',\n })\n }\n }\n\n // Must have at least one source\n if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl) {\n return { package: null, attempts }\n }\n\n return { package: result, attempts }\n}\n\n/**\n * Parse version specifier, handling protocols like link:, workspace:, npm:, file:\n */\nexport function parseVersionSpecifier(\n name: string,\n version: string,\n cwd: string,\n): LocalDependency | null {\n // link: - resolve local package.json\n if (version.startsWith('link:')) {\n const linkPath = resolve(cwd, version.slice(5))\n const linkedPkgPath = join(linkPath, 'package.json')\n if (existsSync(linkedPkgPath)) {\n const linkedPkg = JSON.parse(readFileSync(linkedPkgPath, 'utf-8'))\n return {\n name: linkedPkg.name || name,\n version: linkedPkg.version || '0.0.0',\n }\n }\n return null // linked package doesn't exist\n }\n\n // npm: - extract aliased package name\n if (version.startsWith('npm:')) {\n const specifier = version.slice(4)\n const atIndex = specifier.startsWith('@')\n ? specifier.indexOf('@', 1)\n : specifier.indexOf('@')\n const realName = atIndex > 0 ? specifier.slice(0, atIndex) : specifier\n return { name: realName, version: resolveInstalledVersion(realName, cwd) || '*' }\n }\n\n // file: and git: - skip (local/custom sources)\n if (version.startsWith('file:') || version.startsWith('git:') || version.startsWith('git+')) {\n return null\n }\n\n // For everything else (semver, catalog:, workspace:, etc.)\n // resolve the actual installed version from node_modules\n const installed = resolveInstalledVersion(name, cwd)\n if (installed)\n return { name, version: installed }\n\n // Fallback: strip semver prefix if it looks like one\n if (/^[\\^~>=<\\d]/.test(version))\n return { name, version: version.replace(/^[\\^~>=<]/, '') }\n\n // catalog: and workspace: specifiers - include with wildcard version\n // so the dep isn't silently dropped from state.deps\n if (version.startsWith('catalog:') || version.startsWith('workspace:'))\n return { name, version: '*' }\n\n return null\n}\n\n/**\n * Resolve the actual installed version of a package by finding its package.json\n * via mlly's resolvePathSync. Works regardless of package manager or version protocol.\n */\nexport function resolveInstalledVersion(name: string, cwd: string): string | null {\n try {\n const resolved = resolvePathSync(`${name}/package.json`, { url: cwd })\n const pkg = JSON.parse(readFileSync(resolved, 'utf-8'))\n return pkg.version || null\n }\n catch {\n // Packages with `exports` that don't expose ./package.json\n // Resolve the entry point, then walk up to find package.json\n try {\n const entry = resolvePathSync(name, { url: cwd })\n let dir = dirname(entry)\n while (dir && basename(dir) !== 'node_modules') {\n const pkgPath = join(dir, 'package.json')\n if (existsSync(pkgPath)) {\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n return pkg.version || null\n }\n dir = dirname(dir)\n }\n }\n catch {}\n return null\n }\n}\n\n/**\n * Read package.json dependencies with versions\n */\nexport async function readLocalDependencies(cwd: string): Promise<LocalDependency[]> {\n const pkgPath = join(cwd, 'package.json')\n if (!existsSync(pkgPath)) {\n throw new Error('No package.json found in current directory')\n }\n\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n const deps: Record<string, string> = {\n ...pkg.dependencies,\n ...pkg.devDependencies,\n }\n\n const results: LocalDependency[] = []\n\n for (const [name, version] of Object.entries(deps)) {\n const parsed = parseVersionSpecifier(name, version, cwd)\n if (parsed) {\n results.push(parsed)\n }\n }\n\n return results\n}\n\nexport interface LocalPackageInfo {\n name: string\n version: string\n description?: string\n repoUrl?: string\n localPath: string\n}\n\n/**\n * Read package info from a local path (for link: deps)\n */\nexport function readLocalPackageInfo(localPath: string): LocalPackageInfo | null {\n const pkgPath = join(localPath, 'package.json')\n if (!existsSync(pkgPath))\n return null\n\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n\n let repoUrl: string | undefined\n if (pkg.repository?.url) {\n repoUrl = normalizeRepoUrl(pkg.repository.url)\n }\n else if (typeof pkg.repository === 'string') {\n repoUrl = normalizeRepoUrl(pkg.repository)\n }\n\n return {\n name: pkg.name,\n version: pkg.version || '0.0.0',\n description: pkg.description,\n repoUrl,\n localPath,\n }\n}\n\n/**\n * Resolve docs for a local package (link: dependency)\n */\nexport async function resolveLocalPackageDocs(localPath: string): Promise<ResolvedPackage | null> {\n const info = readLocalPackageInfo(localPath)\n if (!info)\n return null\n\n const result: ResolvedPackage = {\n name: info.name,\n version: info.version,\n description: info.description,\n repoUrl: info.repoUrl,\n }\n\n // Try GitHub if repo URL available\n if (info.repoUrl?.includes('github.com')) {\n const gh = parseGitHubUrl(info.repoUrl)\n if (gh) {\n // Try versioned git docs\n const gitDocs = await fetchGitDocs(gh.owner, gh.repo, info.version, info.name)\n if (gitDocs) {\n result.gitDocsUrl = gitDocs.baseUrl\n result.gitRef = gitDocs.ref\n result.gitDocsFallback = gitDocs.fallback\n }\n\n // README fallback via ungh\n const readmeUrl = await fetchReadme(gh.owner, gh.repo, undefined, result.gitRef)\n if (readmeUrl) {\n result.readmeUrl = readmeUrl\n }\n }\n }\n\n // Fallback: read local readme (case-insensitive)\n if (!result.readmeUrl && !result.gitDocsUrl) {\n const readmeFile = readdirSync(localPath).find(f => /^readme\\.md$/i.test(f))\n if (readmeFile) {\n result.readmeUrl = pathToFileURL(join(localPath, readmeFile)).href\n }\n }\n\n if (!result.readmeUrl && !result.gitDocsUrl) {\n return null\n }\n\n return result\n}\n\n/**\n * Download and extract npm package tarball to cache directory.\n * Used when the package isn't available in node_modules.\n *\n * Extracts to: ~/.skilld/references/<pkg>@<version>/pkg/\n * Returns the extracted directory path, or null on failure.\n */\nexport async function fetchPkgDist(name: string, version: string): Promise<string | null> {\n const cacheDir = getCacheDir(name, version)\n const pkgDir = join(cacheDir, 'pkg')\n\n // Already extracted\n if (existsSync(join(pkgDir, 'package.json')))\n return pkgDir\n\n // Fetch version metadata to get tarball URL\n const data = await $fetch<{ dist?: { tarball?: string } }>(\n `https://registry.npmjs.org/${name}/${version}`,\n ).catch(() => null)\n if (!data)\n return null\n const tarballUrl = data.dist?.tarball\n if (!tarballUrl)\n return null\n\n // Download tarball to temp file\n const tarballRes = await fetch(tarballUrl, {\n headers: { 'User-Agent': 'skilld/1.0' },\n }).catch(() => null)\n\n if (!tarballRes?.ok || !tarballRes.body)\n return null\n\n mkdirSync(pkgDir, { recursive: true })\n\n const tmpTarball = join(cacheDir, '_pkg.tgz')\n const fileStream = createWriteStream(tmpTarball)\n\n // Stream response body to file\n const reader = tarballRes.body.getReader()\n await new Promise<void>((res, reject) => {\n const writable = new Writable({\n write(chunk, _encoding, callback) {\n fileStream.write(chunk, callback)\n },\n })\n writable.on('finish', () => {\n fileStream.end()\n res()\n })\n writable.on('error', reject)\n\n function pump() {\n reader.read().then(({ done, value }) => {\n if (done) {\n writable.end()\n return\n }\n writable.write(value, () => pump())\n }).catch(reject)\n }\n pump()\n })\n\n // Extract tarball — npm tarballs have a \"package/\" prefix\n const { status } = spawnSync('tar', ['xzf', tmpTarball, '--strip-components=1', '-C', pkgDir], { stdio: 'ignore' })\n if (status !== 0) {\n rmSync(pkgDir, { recursive: true, force: true })\n rmSync(tmpTarball, { force: true })\n return null\n }\n\n unlinkSync(tmpTarball)\n return pkgDir\n}\n\n/**\n * Fetch just the latest version string from npm (lightweight)\n */\nexport async function fetchLatestVersion(packageName: string): Promise<string | null> {\n const data = await $fetch<{ version?: string }>(\n `https://unpkg.com/${packageName}/package.json`,\n ).catch(() => null)\n return data?.version || null\n}\n\n/**\n * Get installed skill version from SKILL.md\n */\nexport function getInstalledSkillVersion(skillDir: string): string | null {\n const skillPath = join(skillDir, 'SKILL.md')\n if (!existsSync(skillPath))\n return null\n\n const content = readFileSync(skillPath, 'utf-8')\n const match = content.match(/^version:\\s*\"?([^\"\\n]+)\"?/m)\n return match?.[1] || null\n}\n"],"mappings":";;;;;;;;;;;;;AAIA,MAAa,YAAY,IAAI,IAAI;CAC/B;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAa,WAAW,QAAgB,IAAI,MAAM,IAAI,CAAC;AAGvD,SAAgB,iBAAiB,QAAuE;CACtG,MAAM,QAAQ,CAAC,MAAM;AACrB,MAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,OAAO,CACzC,KAAI,MAAM,KAAA,EACR,OAAM,KAAK,GAAG,EAAE,IAAI,OAAO,MAAM,YAAY,UAAU,KAAK,EAAE,GAAG,IAAI,EAAE,QAAQ,MAAM,OAAM,CAAC,KAAK,IAAI;AAEzG,OAAM,KAAK,MAAM;AACjB,QAAO,MAAM,KAAK,KAAK;;AClBzB,SAAgB,UAAgB,KAAgB,KAAQ,QAAoB;CAC1E,IAAI,MAAM,IAAI,IAAI,IAAI;AACtB,KAAI,QAAQ,KAAA,GAAW;AACrB,QAAM,QAAQ;AACd,MAAI,IAAI,KAAK,IAAI;;AAEnB,QAAO;;AAIT,SAAgB,SAAS,GAAW,GAAoB;AACtD,QAAOA,GAAU,GAAG,GAAG,KAAK;;AAG9B,MAAa,oBAAoB;AAGjC,SAAgB,mBAAmB,MAAc,QAAQ,KAAK,EAAiB;CAC7E,MAAM,MAAM,KAAK,KAAK,kBAAkB;AACxC,QAAO,WAAW,IAAI,GAAG,MAAM;;ACcjC,IAAI;AAKJ,SAAgB,gBAAyB;AACvC,KAAI,iBAAiB,KAAA,EACnB,QAAO;CACT,MAAM,EAAE,WAAW,UAAU,MAAM,CAAC,QAAQ,SAAS,EAAE,EAAE,OAAO,UAAU,CAAC;AAC3E,QAAQ,eAAe,WAAW;;AAIpC,MAAM,eAAe,IAAI,IAAI;CAC3B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAM,iBAAiB,IAAI,IAAI;CAC7B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,aAAa,IAAI,IAAI;CACzB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,kBAAkB,IAAI,IAAI;CAC9B;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,cAAc,IAAI,IAAI;CAC1B;CACA;CACA;CACA;CACD,CAAC;AAMF,SAAS,gBAAgB,OAAe,UAAgC;AACtE,MAAK,MAAM,WAAW,SACpB,KAAI,UAAU,WAAW,MAAM,SAAS,QAAQ,CAC9C,QAAO;AAEX,QAAO;;AAMT,SAAgB,cAAc,QAA6B;CACzD,MAAM,QAAQ,OAAO,KAAI,MAAK,EAAE,aAAa,CAAC;AAC9C,KAAI,MAAM,MAAK,MAAK,gBAAgB,GAAG,WAAW,CAAC,CACjD,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,gBAAgB,GAAG,gBAAgB,CAAC,CACtD,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,gBAAgB,GAAG,YAAY,CAAC,CAClD,QAAO;AACT,KAAI,MAAM,MAAK,MAAK,gBAAgB,GAAG,eAAe,CAAC,CACrD,QAAO;AACT,QAAO;;AAMT,SAAS,aAAa,OAAmE;AAEvF,KADc,MAAM,OAAO,KAAI,MAAK,EAAE,aAAa,CAAC,CAC1C,MAAK,MAAK,gBAAgB,GAAG,aAAa,CAAC,CACnD,QAAO;AAET,KAAI,MAAM,MAAM,WAAW,KAAK,IAAI,MAAM,MAAM,WAAW,SAAS,IAAI,MAAM,MAAM,WAAW,aAAa,CAC1G,QAAO;AACT,QAAO;;AAIT,SAASC,eAAa,MAAuB;AAC3C,QAAO,iBAAiB,KAAK,KAAK,IAAI,UAAU,KAAK,KAAK;;AAQ5D,SAAgB,eAAe,OAAoE;CACjG,MAAM,QAAQ,MAAM,QAAQ,IAAI,MAAM;AAEtC,KAAI,KAAK,SAAS,OAAO,CAACA,eAAa,KAAK,IAAI,MAAM,YAAY,GAChE,QAAO;AAET,KAAI,yCAAyC,KAAK,MAAM,MAAM,IAAI,CAACA,eAAa,KAAK,CACnF,QAAO;AACT,QAAO;;AAQT,SAAgB,eAAe,WAAmB,WAA2B;AAG3E,QAAO,aAAa,KAAK,KAFX,KAAK,KAAK,GAAG,IAAI,KAAK,UAAU,CAAC,SAAS,KAC9B,SAAS,KAAK,KAAK,KAAK,OACV;;AAO1C,SAAS,gBAAgB,QAAuB,OAA8B;CAC5E,MAAM,yBAAS,IAAI,KAA+B;AAClD,MAAK,MAAM,SAAS,OAClB,WAAU,QAAQ,MAAM,YAAY,EAAE,CAAC,CAAC,KAAK,MAAM;AAIrD,MAAK,MAAM,SAAS,OAAO,QAAQ,CACjC,OAAM,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;CAGzC,MAAM,SAAgC;EACpC,CAAC,OAAO,KAAK,KAAK,QAAQ,GAAK,CAAC;EAChC,CAAC,YAAY,KAAK,KAAK,QAAQ,GAAK,CAAC;EACrC,CAAC,QAAQ,KAAK,KAAK,QAAQ,IAAK,CAAC;EACjC,CAAC,WAAW,KAAK,KAAK,QAAQ,GAAK,CAAC;EACpC,CAAC,SAAS,KAAK,KAAK,QAAQ,IAAK,CAAA;EAClC;CAED,MAAM,WAA0B,EAAE;CAClC,MAAM,uBAAO,IAAI,KAAa;CAC9B,IAAI,YAAY;AAGhB,MAAK,MAAM,CAAC,MAAM,UAAU,QAAQ;EAClC,MAAM,QAAQ,OAAO,IAAI,KAAK,IAAI,EAAE;EACpC,MAAM,OAAO,KAAK,IAAI,OAAO,MAAM,QAAQ,UAAU;AACrD,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,KAAK;AAC7B,YAAS,KAAK,MAAM,GAAI;AACxB,QAAK,IAAI,MAAM,GAAI,OAAO;AAC1B;;;AAKJ,KAAI,YAAY,GAAG;EACjB,MAAM,SAAS,OACZ,QAAO,MAAK,CAAC,KAAK,IAAI,EAAE,OAAO,IAAI,EAAE,SAAS,UAAU,CACxD,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AACpC,OAAK,MAAM,SAAS,QAAQ;AAC1B,OAAI,aAAa,EACf;AACF,YAAS,KAAK,MAAM;AACpB;;;AAIJ,QAAO,SAAS,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;;AAMnD,SAAS,UAAU,WAA2B;AAC5C,KAAI,aAAa,GACf,QAAO;AACT,KAAI,aAAa,EACf,QAAO;AACT,QAAO;;AAOT,SAASC,eAAa,MAAc,OAAuB;AACzD,KAAI,KAAK,UAAU,MACjB,QAAO;CAGT,MAAM,cAAc;CACpB,IAAI,cAAc;CAClB,IAAI;AAGJ,SAAQ,QAAQ,YAAY,KAAK,KAAK,MAAM,MAAM;EAChD,MAAM,aAAa,MAAM;EACzB,MAAM,WAAW,aAAa,MAAM,GAAG;AAIvC,MAAI,aAAa,SAAS,WAAW,OAAO;AAC1C,OAAI,YAAY,QAAQ,IAEtB,eAAc;OAId,eAAc;AAEhB;;;CAKJ,MAAM,QAAQ,KAAK,MAAM,GAAG,YAAY;CACxC,MAAM,gBAAgB,MAAM,YAAY,OAAO;AAC/C,KAAI,gBAAgB,cAAc,GAChC,QAAO,GAAG,MAAM,MAAM,GAAG,cAAc,CAAC;AAE1C,QAAO,GAAG,MAAM;;AAMlB,SAAS,mBACP,OACA,MACA,OACA,OACA,YACA,UACe;CACf,MAAM,aAAa,KAAK,IAAI,QAAQ,GAAG,IAAI;CAC3C,IAAI,WAAW;AACf,KAAI,SAEF,YAAW,UAAU,WACjB,aAAa,aACb,cAAc;UAEX,UAAU,SACjB,KAAI,YAAY;EAEd,MAAM,OAAO,IAAI,KAAK,WAAW;AACjC,OAAK,SAAS,KAAK,UAAU,GAAG,EAAE;AAClC,aAAW,aAAa,QAAQ,KAAK,aAAa,CAAC;OAGnD,YAAW,YAAY,YAAY;UAG9B,YAAY;EAEnB,MAAM,OAAO,IAAI,KAAK,WAAW;AACjC,OAAK,SAAS,KAAK,UAAU,GAAG,EAAE;AAClC,aAAW,cAAc,QAAQ,KAAK,aAAa,CAAC;;CAKtD,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;EACzC;EACA,mBAJQ,QAAQ,MAAM,GAAG,KAAK,eAAe,QAAQ,WAIhC,sCAAsC;EAC3D;EACA;EACD,EAAE;EAAE,UAAU;EAAS,WAAW,KAAK,OAAO;EAAM,CAAC;AAEtD,KAAI,CAAC,OACH,QAAO,EAAE;AAEX,QAAO,OACJ,MAAM,CACN,MAAM,KAAK,CACX,OAAO,QAAQ,CACf,KAAI,SAAQ,KAAK,MAAM,KAAK,CAAgF,CAC5G,QAAO,UAAS,CAAC,UAAU,IAAI,MAAM,KAAK,IAAI,MAAM,aAAa,MAAM,CACvE,QAAO,UAAS,CAAC,aAAa,MAAM,CAAC,CACrC,QAAO,UAAS,CAAC,eAAe,MAAM,CAAC,CACvC,KAAK,EAAE,MAAM,GAAG,UAAU,IAAI,mBAAmB,GAAG,YAAY;EAC/D,MAAM,eAAe;GAAC;GAAS;GAAU;GAAe,CAAC,SAAS,kBAAkB;EACpF,MAAM,YAAY,eAAe,KAAK,MAAM,MAAM,IAAI,MAAM,OAAO,MAAK,MAAK,WAAW,KAAK,EAAE,CAAC;AAChG,SAAO;GACL,GAAG;GACH,MAAM,cAAc,MAAM,OAAO;GACjC,aAAa,EAAE;GACf,OAAO,eAAe,MAAM,WAAW,MAAM,UAAU,IAAI,gBAAgB,YAAY,IAAI;GAC5F;GACD,CACD,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,MAAM,GAAG,MAAM;;AAGpB,SAAS,aAAqB;CAC5B,MAAM,oBAAI,IAAI,MAAM;AACpB,GAAE,YAAY,EAAE,aAAa,GAAG,EAAE;AAClC,QAAO,QAAQ,EAAE,aAAa,CAAC;;AAIjC,MAAMC,qBAAmB;AAQzB,SAAS,mBAAmB,OAAe,MAAc,QAAuB,OAAO,IAAU;CAE/F,MAAM,QAAQ,OACX,QAAO,MAAK,EAAE,WAAW,MAAM,EAAE,SAAS,SAAS,EAAE,SAAS,cAAc,EAAE,aAAa,GAAG,CAC9F,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,MAAM,GAAG,KAAK;AAEjB,KAAI,MAAM,WAAW,EACnB;CAQF,MAAM,QAAQ,qFAJI,MAAM,KAAK,OAAO,MAClC,IAAI,EAAE,kBAAkB,MAAM,OAAO,0GACtC,CAAC,KAAK,IAAI,CAEkG;AAE7G,KAAI;EACF,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;GACzC;GACA;GACA;GACA,SAAS;GACT;GACA,SAAS;GACT;GACA,QAAQ;GACT,EAAE;GAAE,UAAU;GAAS,WAAW,KAAK,OAAO;GAAM,CAAC;AAEtD,MAAI,CAAC,OACH;EAGF,MAAM,QADO,KAAK,MAAM,OAAO,EACX,MAAM;AAC1B,MAAI,CAAC,MACH;AAEF,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,QAAQ,MAAM,IAAI,MAAM,UAAU;AACxC,OAAI,CAAC,MAAM,QAAQ,MAAM,CACvB;GAEF,MAAM,QAAQ,MAAM;GAEpB,MAAM,WAAkD,MACrD,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,QAAQ,MAAW,CAACA,mBAAiB,MAAM,EAAE,QAAQ,IAAI,MAAM,CAAC,CAAC,CACjE,KAAK,MAAW;IACf,MAAM,eAAe;KAAC;KAAS;KAAU;KAAe,CAAC,SAAS,EAAE,kBAAkB;IACtF,MAAM,OAAO,EAAE,QAAQ;IACvB,MAAM,YAAY,EAAE,WAAW,cAAc;IAE7C,MAAM,UAAU,eAAe,IAAI,MAAMF,eAAa,KAAK,GAAG,IAAI,MAAM,IAAI;AAC5E,WAAO;KAAE;KAAM,QAAQ,EAAE,OAAO;KAAO;KAAW;KAAc;KAAQ;KACxE,CACD,MAAM,GAAQ,MAAW,EAAE,SAAS,EAAE,OAAO;AAGhD,SAAM,cAAc,SAAS,MAAM,GAAG,EAAE,CAAC,KAAK,EAAE,QAAQ,GAAG,GAAG,QAAQ,EAAE;AAGxE,OAAI,MAAM,UAAU,SAClB,OAAM,aAAa,sBAAsB,SAAS;;SAIlD;;AASR,SAAS,sBAAsB,UAA8C;CAC3E,MAAM,qBAAqB,SAAS,QAAO,MAAK,EAAE,aAAa;AAE/D,MAAK,MAAM,KAAK,mBAAmB,SAAS,EAAE;EAE5C,MAAM,QAAQ,EAAE,KAAK,MAAM,+FAA+F;AAC1H,MAAI,MACF,QAAO,MAAM;AAEf,MAAI,EAAE,KAAK,SAAS,KAAK;GACvB,MAAM,SAAS,EAAE,KAAK,MAAM,wBAAwB;AACpD,OAAI,OACF,QAAO,OAAO;;;;AAWtB,eAAsB,kBACpB,OACA,MACA,QAAQ,IACR,YACA,UACwB;AACxB,KAAI,CAAC,eAAe,CAClB,QAAO,EAAE;CAEX,MAAM,YAAY,KAAK,KAAK,QAAQ,IAAK;CACzC,MAAM,cAAc,QAAQ;AAE5B,KAAI;EAEF,MAAM,OAAO,mBAAmB,OAAO,MAAM,QAAQ,KAAK,IAAI,YAAY,GAAG,IAAI,EAAE,YAAY,SAAS;EACxG,MAAM,SAAS,mBAAmB,OAAO,MAAM,UAAU,KAAK,IAAI,cAAc,GAAG,GAAG,EAAE,YAAY,SAAS;EAE7G,MAAM,WAAW,gBADL,CAAC,GAAG,MAAM,GAAG,OAAO,EACM,MAAM;AAC5C,qBAAmB,OAAO,MAAM,SAAS;AACzC,SAAO;SAEH;AACJ,SAAO,EAAE;;;AAOb,SAAgB,sBAAsB,OAA4B;CAChE,MAAM,QAAQ,UAAU,MAAM,UAAU;CACxC,MAAM,WAAkE;EACtE,QAAQ,MAAM;EACd,OAAO,MAAM;EACb,MAAM,MAAM;EACZ,OAAO,MAAM;EACb,SAAS,QAAQ,MAAM,UAAU;EACjC,KAAK,MAAM;EACX,WAAW,MAAM;EACjB,UAAU,MAAM;EACjB;AACD,KAAI,MAAM,WACR,UAAS,aAAa,MAAM;AAC9B,KAAI,MAAM,OAAO,SAAS,EACxB,UAAS,SAAS,IAAI,MAAM,OAAO,KAAK,KAAK,CAAC;CAGhD,MAAM,QAAQ;EAFH,iBAAiB,SAAS;EAElB;EAAI,KAAK,MAAM;EAAQ;AAE1C,KAAI,MAAM,MAAM;EACd,MAAM,OAAOC,eAAa,MAAM,MAAM,MAAM;AAC5C,QAAM,KAAK,IAAI,KAAK;;AAGtB,KAAI,MAAM,YAAY,SAAS,GAAG;AAChC,QAAM,KAAK,IAAI,OAAO,IAAI,kBAAkB;AAC5C,OAAK,MAAM,KAAK,MAAM,aAAa;GACjC,MAAM,YAAY,EAAE,YAAY,IAAI,MAAM,EAAE,UAAU,KAAK;GAC3D,MAAM,aAAa,EAAE,eAAe,kBAAkB;GACtD,MAAM,cAAcA,eAAa,EAAE,MAAM,IAAI;AAC7C,SAAM,KAAK,IAAI,MAAM,EAAE,OAAO,IAAI,aAAa,UAAU,IAAI,IAAI,YAAY;;;AAIjF,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,mBAAmB,QAA+B;CAChE,MAAM,yBAAS,IAAI,KAA+B;AAClD,MAAK,MAAM,SAAS,OAClB,WAAU,QAAQ,MAAM,YAAY,EAAE,CAAC,CAAC,KAAK,MAAM;CAGrD,MAAM,aAAwC;EAC5C,KAAK;EACL,UAAU;EACV,MAAM;EACN,SAAS;EACT,OAAO;EACR;CAED,MAAM,YAAyB;EAAC;EAAO;EAAY;EAAQ;EAAS;EAAU;CAU9E,MAAM,WAAqB;EARhB;GACT;GACA,UAAU,OAAO;GACjB,SAAS,OAAO,QAAO,MAAK,EAAE,UAAU,OAAO,CAAC;GAChD,WAAW,OAAO,QAAO,MAAK,EAAE,UAAU,OAAO,CAAC;GAClD;GACD,CAE8B,KAAK,KAAK;EAAE;EAAI;EAAkB;EAAG;AAEpE,MAAK,MAAM,QAAQ,WAAW;EAC5B,MAAM,QAAQ,OAAO,IAAI,KAAK;AAC9B,MAAI,CAAC,OAAO,OACV;AACF,WAAS,KAAK,MAAM,WAAW,MAAM,IAAI,MAAM,OAAO,IAAI,GAAG;AAC7D,OAAK,MAAM,SAAS,OAAO;GACzB,MAAM,YAAY,MAAM,YAAY,IAAI,MAAM,MAAM,UAAU,KAAK;GACnE,MAAM,QAAQ,MAAM,UAAU,SAAS,KAAK;GAC5C,MAAM,WAAW,MAAM,aAAa,cAAc,MAAM,WAAW,KAAK;GACxE,MAAM,OAAO,QAAQ,MAAM,UAAU;AACrC,YAAS,KAAK,OAAO,MAAM,OAAO,YAAY,MAAM,OAAO,QAAQ,MAAM,QAAQ,YAAY,QAAQ,SAAS,IAAI,KAAK,GAAG;;AAE5H,WAAS,KAAK,GAAG;;AAGnB,QAAO,SAAS,KAAK,KAAK;;AC/jB5B,MAAa,SAAS,OAAO,OAAO;CAClC,OAAO;CACP,YAAY;CACZ,SAAS;CACT,SAAS,EAAE,cAAc,cAAA;CAC1B,CAAC;AAKF,eAAsB,UAAU,KAAqC;AACnE,QAAO,OAAO,KAAK,EAAE,cAAc,QAAQ,CAAC,CAAC,YAAY,KAAK;;AAMhE,eAAsB,UAAU,KAA+B;CAC7D,MAAM,MAAM,MAAM,OAAO,IAAI,KAAK,EAAE,QAAQ,QAAQ,CAAC,CAAC,YAAY,KAAK;AACvE,KAAI,CAAC,IACH,QAAO;AAET,QAAO,EADa,IAAI,QAAQ,IAAI,eAAe,IAAI,IACnC,SAAS,YAAY;;AAM3C,MAAM,gBAAgB,IAAI,IAAI;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,SAAgB,iBAAiB,KAAsB;AACrD,KAAI;EACF,MAAM,EAAE,aAAa,IAAI,IAAI,IAAI;AACjC,SAAO,cAAc,IAAI,SAAS;SAE9B;AAAE,SAAO;;;AAMjB,SAAgB,gBAAgB,KAAsB;AACpD,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,SAAO,OAAO,aAAa,gBAAgB,OAAO,aAAa;SAE3D;AACJ,SAAO;;;AAOX,SAAgB,eAAe,KAAqD;CAClF,MAAM,QAAQ,IAAI,MAAM,qDAAqD;AAC7E,KAAI,CAAC,MACH,QAAO;AACT,QAAO;EAAE,OAAO,MAAM;EAAK,MAAM,MAAM;EAAK;;AAM9C,SAAgB,iBAAiB,KAAqB;AACpD,QAAO,IACJ,QAAQ,UAAU,GAAG,CACrB,QAAQ,QAAQ,GAAG,CACnB,QAAQ,UAAU,GAAG,CACrB,QAAQ,aAAa,WAAW,CAChC,QAAQ,4BAA4B,qBAAqB,CAEzD,QAAQ,qBAAqB,sBAAsB;;AAOxD,SAAgB,iBAAiB,MAA8C;AAE7E,KAAI,KAAK,WAAW,IAAI,EAAE;EACxB,MAAM,WAAW,KAAK,QAAQ,IAAI;AAClC,MAAI,aAAa,IAAI;GACnB,MAAM,QAAQ,KAAK,QAAQ,KAAK,WAAW,EAAE;AAC7C,OAAI,UAAU,GACZ,QAAO;IAAE,MAAM,KAAK,MAAM,GAAG,MAAM;IAAE,KAAK,KAAK,MAAM,QAAQ,EAAA;IAAI;;AAErE,SAAO,EAAE,MAAM,MAAM;;CAGvB,MAAM,QAAQ,KAAK,QAAQ,IAAI;AAC/B,KAAI,UAAU,GACZ,QAAO;EAAE,MAAM,KAAK,MAAM,GAAG,MAAM;EAAE,KAAK,KAAK,MAAM,QAAQ,EAAA;EAAI;AACnE,QAAO,EAAE,MAAM,MAAM;;AAMvB,SAAgB,kBAAkB,KAAiC;CACjE,MAAM,OAAO,IAAI,QAAQ,IAAI;AAC7B,KAAI,SAAS,GACX,QAAO,KAAA;CACT,MAAM,WAAW,IAAI,MAAM,OAAO,EAAE;AAEpC,KAAI,CAAC,YAAY,aAAa,SAC5B,QAAO,KAAA;AACT,QAAO;;ACzFT,SAAgB,YAAY,SAAgC;CAC1D,MAAM,QAAQ,QAAQ,QAAQ,MAAM,GAAG;CACvC,MAAM,QAAQ,MAAM,MAAM,iCAAiC;AAC3D,KAAI,CAAC,MACH,QAAO;AACT,QAAO;EACL,OAAO,CAAC,MAAM;EACd,OAAO,MAAM,KAAK,CAAC,MAAM,KAAK;EAC9B,OAAO,MAAM,KAAK,CAAC,MAAM,KAAK;EAC9B,KAAK;EACN;;AAUH,SAAS,eAAe,KAAa,aAAqC;AACxE,KAAI,aAAa;EAEf,MAAM,UAAU,IAAI,MAAM,IAAI,OAAO,IAAI,YAAY,YAAY,CAAC,QAAQ,CAAC;AAC3E,MAAI,QACF,QAAO,QAAQ;EACjB,MAAM,YAAY,IAAI,MAAM,IAAI,OAAO,IAAI,YAAY,YAAY,CAAC,UAAU,CAAC;AAC/E,MAAI,UACF,QAAO,UAAU;;AAGrB,QAAO,IAAI,QAAQ,MAAM,GAAG;;AAG9B,SAAS,YAAY,KAAqB;AACxC,QAAO,IAAI,QAAQ,uBAAuB,OAAO;;AAMnD,SAAS,kBAAkB,KAAa,aAA8B;AAEpE,QAAO,IAAI,WAAW,GAAG,YAAY,GAAG,IAAI,IAAI,WAAW,GAAG,YAAY,IAAI,IAAI,IAAI,WAAW,GAAG,YAAY,GAAG;;AAMrH,SAAgB,aAAa,SAA0B;AACrD,QAAO,oBAAoB,KAAK,QAAQ,QAAQ,MAAM,GAAG,CAAC;;AAG5D,SAAgB,cAAc,GAAW,GAAmB;AAC1D,KAAI,EAAE,UAAU,EAAE,MAChB,QAAO,EAAE,QAAQ,EAAE;AACrB,KAAI,EAAE,UAAU,EAAE,MAChB,QAAO,EAAE,QAAQ,EAAE;AACrB,QAAO,EAAE,QAAQ,EAAE;;AAMrB,SAAS,mBAAmB,OAAe,MAA+B;AACxE,KAAI;EACF,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;GACzC;GACA,SAAS,MAAM,GAAG,KAAK;GACvB;GACA;GACA;GACD,EAAE;GAAE,UAAU;GAAS,SAAS;GAAQ,OAAO;IAAC;IAAU;IAAQ;;GAAW,CAAC;AAC/E,MAAI,CAAC,OACH,QAAO,EAAE;AACX,SAAO,OAAO,MAAM,CAAC,MAAM,KAAK,CAAC,OAAO,QAAQ,CAAC,KAAI,SAAQ,KAAK,MAAM,KAAK,CAAC;SAE1E;AACJ,SAAO,EAAE;;;AAOb,eAAe,qBAAqB,OAAe,MAAwC;AAKzF,SAJa,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,YACvC,EAAE,QAAQ,YAAY,QAAQ,KAAO,EAAE,CACxC,CAAC,YAAY,KAAK,GACN,YAAY,EAAE;;AAM7B,eAAe,iBAAiB,OAAe,MAAwC;AACrF,KAAI,eAAe,EAAE;EACnB,MAAM,WAAW,mBAAmB,OAAO,KAAK;AAChD,MAAI,SAAS,SAAS,EACpB,QAAO;;AAEX,QAAO,qBAAqB,OAAO,KAAK;;AAS1C,SAAgB,eAAe,UAA2B,aAAsB,kBAA2B,UAAoC;CAE7I,MAAM,kBAAkB,eAAe,SAAS,MAAK,MAAK,kBAAkB,EAAE,KAAK,YAAY,CAAC;CAChG,MAAM,cAAc,mBAAmB,YAAY,iBAAiB,GAAG;CACvE,MAAM,wBAAwB,mBAAmB,aAAa,iBAAiB,GAAG;CAClF,MAAM,SAAS,WAAW,IAAI,KAAK,SAAS,CAAC,SAAS,GAAG;CAoCzD,MAAM,SAlCW,SAAS,QAAQ,MAAM;EACtC,MAAM,MAAM,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;AAC5E,MAAI,CAAC,IACH,QAAO;EAET,MAAM,KAAK,YAAY,IAAI;AAC3B,MAAI,CAAC,GACH,QAAO;AAGT,MAAI,mBAAmB,eAAe,CAAC,kBAAkB,EAAE,KAAK,YAAY,CAC1E,QAAO;AAGT,MAAI,QAAQ;GACV,MAAM,UAAU,EAAE,eAAe,EAAE;AACnC,OAAI,WAAW,IAAI,KAAK,QAAQ,CAAC,SAAS,GAAG,OAC3C,QAAO;;AAIX,MAAI,EAAE,YAAY;AAChB,OAAI,CAAC,yBAAyB,CAAC,YAC7B,QAAO;AACT,UAAO,GAAG,UAAU,YAAY,SAAS,GAAG,UAAU,YAAY;;AAIpE,MAAI,eAAe,cAAc,IAAI,YAAY,GAAG,EAClD,QAAO;AAET,SAAO;GACP,CAGC,MAAM,GAAG,MAAM;EACd,MAAM,OAAO,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;EAC7E,MAAM,OAAO,eAAe,EAAE,KAAK,kBAAkB,cAAc,KAAA,EAAU;AAC7E,MAAI,CAAC,QAAQ,CAAC,KACZ,QAAO;AACT,SAAO,cAAc,YAAY,KAAK,EAAG,YAAY,KAAK,CAAE;GAC5D;AAGJ,QAAO,WAAW,SAAS,OAAO,MAAM,GAAG,GAAG;;AAMhD,SAAS,cAAc,SAAwB,aAA8B;CAC3E,MAAM,OAAO,QAAQ,QAAQ,eAAe,QAAQ,UAAU;CAC9D,MAAM,UAAU,eAAe,QAAQ,KAAK,YAAY,IAAI,QAAQ;CAEpE,MAAM,KAAK;EACT;EACA,QAAQ,QAAQ;EAChB,YAAY;EACZ,cAAc;EACf;AACD,KAAI,QAAQ,QAAQ,QAAQ,SAAS,QAAQ,IAC3C,IAAG,KAAK,UAAU,QAAQ,KAAK,QAAQ,MAAM,OAAM,CAAC,GAAG;AACzD,IAAG,KAAK,MAAM;AAEd,QAAO,GAAG,GAAG,KAAK,KAAK,CAAC,QAAQ,QAAQ,QAAQ,QAAQ,IAAI,MAAM,QAAQ;;AAc5E,SAAgB,qBAAqB,gBAAuD,aAA8B;CAExH,MAAM,OAA4B,MAAM,QAAQ,eAAe,GAC3D;EAAE,UAAU;EAAgB;EAAa,GACzC;CAEJ,MAAM,EAAE,UAAU,cAAc,iBAAiB;CACjD,MAAM,MAAM,KAAK;CAUjB,MAAM,QAAkB;EAPb;GACT;GACA,UAHY,SAAS,UAAU,cAAc,UAAU;GAIvD,WAAW,SAAS,IAAI,OAAO;GAC/B;GACD,CAE2B,KAAK,KAAK;EAAE;EAAI;EAAoB;EAAG;AAGnE,KAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,QAAM,KAAK,oBAAoB,GAAG;AAClC,OAAK,MAAM,KAAK,aACd,OAAM,KAAK,MAAM,EAAE,QAAQ,WAAW,EAAE,QAAQ,QAAQ,EAAE,MAAM,IAAI,EAAE,KAAK,GAAG;AAEhF,QAAM,KAAK,GAAG;;AAIhB,KAAI,SAAS,SAAS,GAAG;AACvB,MAAI,gBAAgB,aAAa,SAAS,EACxC,OAAM,KAAK,oBAAoB,GAAG;AACpC,OAAK,MAAM,KAAK,UAAU;GACxB,MAAM,OAAO,QAAQ,EAAE,eAAe,EAAE,UAAU;GAClD,MAAM,WAAW,EAAE,IAAI,SAAS,IAAI,IAAI,EAAE,IAAI,WAAW,IAAI,GAAG,EAAE,MAAM,IAAI,EAAE;GAE9E,MAAM,KAAK,YADK,eAAe,EAAE,KAAK,IAAI,IAAI,EAAE,IACjB;GAC/B,MAAM,QAAQ,IAAI,UAAU,KAAK,GAAG,UAAU,IAAI,iBAAiB,IAAI,UAAU,IAAI,iBAAiB;AACtG,SAAM,KAAK,MAAM,EAAE,IAAI,MAAM,SAAS,QAAQ,EAAE,QAAQ,EAAE,IAAI,IAAI,KAAK,GAAG,QAAQ;;AAEpF,QAAM,KAAK,GAAG;;AAIhB,KAAI,cAAc;AAChB,QAAM,KAAK,gBAAgB,GAAG;AAC9B,QAAM,KAAK,mCAAmC;AAC9C,QAAM,KAAK,GAAG;;AAGhB,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,cAAc,SAAiC;CAC7D,MAAM,QAAQ,QAAQ,YAAY,IAAI,MAAM;AAC5C,QAAO,KAAK,SAAS,OAAO,iBAAiB,KAAK,KAAK;;AAkBzD,eAAe,eAAe,OAAe,MAAc,KAAa,aAA8C;CACpH,MAAM,QAAkB,EAAE;AAG1B,KAAI,aAAa;EACf,MAAM,YAAY,YAAY,QAAQ,UAAU,GAAG;EACnD,MAAM,YAAY,YAAY,QAAQ,MAAM,GAAG,CAAC,QAAQ,KAAK,IAAI;EACjE,MAAM,aAAa,CAAC,GAAG,IAAI,IAAI,CAAC,WAAW,UAAU,CAAC,CAAC;AACvD,OAAK,MAAM,QAAQ,WACjB,OAAM,KAAK,YAAY,KAAK,eAAe;;AAK/C,OAAM,KAAK,gBAAgB,gBAAgB,aAAa;AAExD,MAAK,MAAM,QAAQ,OAAO;EAExB,MAAM,UAAU,MAAM,OADV,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI,GAAG,QACvC;GAAE,cAAc;GAAQ,QAAQ,YAAY,QAAQ,IAAA;GAAS,CAAC,CAAC,YAAY,KAAK;AAClH,MAAI,QACF,QAAO;;AAEX,QAAO;;AAUT,eAAsB,kBACpB,OACA,MACA,kBACA,QACA,aACA,UACA,cACsB;CAEtB,MAAM,WAAW,eADA,MAAM,iBAAiB,OAAO,KAAK,EACV,aAAa,kBAAkB,SAAS;AAElF,KAAI,SAAS,SAAS,GAAG;EAIvB,MAAM,OAFc,SAAS,QAAO,MAAK,CAAC,cAAc,EAAE,CAAC,CAElC,KAAK,MAAM;AAIlC,UAAO;IACL,MAAM,YAJS,EAAE,IAAI,SAAS,IAAI,IAAI,EAAE,IAAI,WAAW,IAAI,GACzD,EAAE,MACF,IAAI,EAAE,MAEmB;IAC3B,SAAS,cAAc,GAAG,YAAA;IAC3B;IACD;EAIF,MAAM,YAAY,MAAM,eAAe,OAAO,MADlC,gBAAgB,UAAU,SAAS,GAAI,KACM,YAAY;AACrE,MAAI,aAAa,UAAU,SAAS,IAClC,MAAK,KAAK;GAAE,MAAM;GAAyB,SAAS;GAAW,CAAC;AAGlE,SAAO;;CAKT,MAAM,YAAY,MAAM,eAAe,OAAO,MADlC,gBAAgB,UAAU,QACmB,YAAY;AACrE,KAAI,CAAC,UACH,QAAO,EAAE;AAEX,QAAO,CAAC;EAAE,MAAM;EAAyB,SAAS;EAAW,CAAC;;ACnWhE,SAAS,kBAAkB,SAAkC;AAW3D,QAAO,GAVI;EACT;EACA,YAAY,QAAQ;EACpB,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAM,CAAC;EAC9C,SAAS,QAAQ;EACjB,QAAQ,QAAQ;EAChB;EACA;EACD,CAEY,KAAK,KAAK,CAAC,QAAQ,QAAQ,MAAM,MAAM,QAAQ;;AAM9D,eAAe,cAAc,OAAqD;AAChF,KAAI;EACF,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK;GAAE,cAAc;GAAQ,QAAQ,YAAY,QAAQ,IAAA;GAAS,CAAC,CAAC,YAAY,KAAK;AACrH,MAAI,CAAC,KACH,QAAO;EAGT,IAAI,QAAQ;EACZ,MAAM,aAAa,KAAK,MAAM,yBAAyB;AACvD,MAAI,WACF,SAAQ,WAAW,GAAI,MAAM;AAE/B,MAAI,CAAC,OAAO;GACV,MAAM,iBAAiB,KAAK,MAAM,0BAA0B;AAC5D,OAAI,eACF,SAAQ,eAAe,GAAI,MAAM;;EAGrC,MAAM,WAAW,eAAe,KAAK;AACrC,MAAI,CAAC,SACH,QAAO;AAET,SAAO;GACL,SAAS,MAAM;GACf,OAAO,SAAS,MAAM,SAAS,WAAW,MAAM;GAChD,MAAM,MAAM;GACZ;GACA,KAAK,MAAM;GACZ;SAEG;AACJ,SAAO;;;AASX,SAAS,qBAAqB,SAAwB,kBAAyC;CAC7F,MAAM,cAAc,YAAY,iBAAiB;AACjD,KAAI,CAAC,YACH,QAAO;AAET,QAAO,QAAQ,QAAQ,UAAU;EAC/B,MAAM,UAAU,YAAY,MAAM,QAAQ;AAC1C,MAAI,CAAC,QACH,QAAO;AAET,SAAO,cAAc,SAAS,YAAY,IAAI;GAC9C;;AAQJ,eAAsB,kBACpB,aACA,kBACsB;CACtB,MAAM,SAAS,cAAc,YAAY;AACzC,KAAI,CAAC,OACH,QAAO,EAAE;CAEX,MAAM,mBAAmB,qBAAqB,OAAO,UAAU,iBAAiB;AAChF,KAAI,iBAAiB,WAAW,EAC9B,QAAO,EAAE;CAEX,MAAM,WAA8B,EAAE;CAGtC,MAAM,YAAY;AAClB,MAAK,IAAI,IAAI,GAAG,IAAI,iBAAiB,QAAQ,KAAK,WAAW;EAC3D,MAAM,QAAQ,iBAAiB,MAAM,GAAG,IAAI,UAAU;EACtD,MAAM,UAAU,MAAM,QAAQ,IAAI,MAAM,KAAI,UAAS,cAAc,MAAM,CAAC,CAAC;AAC3E,OAAK,MAAM,UAAU,QACnB,KAAI,OACF,UAAS,KAAK,OAAO;;AAI3B,KAAI,SAAS,WAAW,EACtB,QAAO,EAAE;AAGX,UAAS,MAAM,GAAG,MAAM;EACtB,MAAM,OAAO,EAAE,QAAQ,MAAM,IAAI,CAAC,IAAI,OAAO;EAC7C,MAAM,OAAO,EAAE,QAAQ,MAAM,IAAI,CAAC,IAAI,OAAO;AAC7C,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,KAAK,QAAQ,KAAK,OAAO,EAAE,KAAK;GAC3D,MAAM,QAAQ,KAAK,MAAM,MAAM,KAAK,MAAM;AAC1C,OAAI,SAAS,EACX,QAAO;;AAEX,SAAO;GACP;AAGF,QAAO,SAAS,KAAI,OAAM;EACxB,MAAM,iBAAiB,EAAE,QAAQ;EACjC,SAAS,kBAAkB,EAAA;EAC5B,EAAE;;ACvIL,MAAM,wBAAwB,IAAI,IAAI;CACpC;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,uBAAuB,IAAI,IAAI;CACnC;CACA;CACA;CACD,CAAC;AAwBF,MAAM,mBAAmB;AAGzB,SAAS,aAAa,MAAuB;AAC3C,QAAO,iBAAiB,KAAK,KAAK,IAAI,UAAU,KAAK,KAAK;;AAO5D,SAAS,aAAa,MAAc,OAAuB;AACzD,KAAI,KAAK,UAAU,MACjB,QAAO;CAGT,MAAM,cAAc;CACpB,IAAI,cAAc;CAClB,IAAI;AAGJ,SAAQ,QAAQ,YAAY,KAAK,KAAK,MAAM,MAAM;EAChD,MAAM,aAAa,MAAM;EACzB,MAAM,WAAW,aAAa,MAAM,GAAG;AAEvC,MAAI,aAAa,SAAS,WAAW,OAAO;AAC1C,OAAI,YAAY,QAAQ,IACtB,eAAc;OAGd,eAAc;AAEhB;;;CAKJ,MAAM,QAAQ,KAAK,MAAM,GAAG,YAAY;CACxC,MAAM,gBAAgB,MAAM,YAAY,OAAO;AAC/C,KAAI,gBAAgB,cAAc,GAChC,QAAO,GAAG,MAAM,MAAM,GAAG,cAAc,CAAC;AAE1C,QAAO,GAAG,MAAM;;AAIlB,MAAM,iBAAiB;AAGvB,MAAM,uBAAuB;AAM7B,SAAS,aAAa,GAAwE;AAC5F,SAAQ,EAAE,eAAe,IAAI,MAAM,aAAa,EAAE,KAAK,GAAG,IAAI,MAAM,IAAI,EAAE;;AAO5E,SAAgB,gBAAgB,GAA6B;AAC3D,KAAI,eAAe,KAAK,EAAE,MAAM,CAC9B,QAAO;CAET,IAAI,QAAQ;AAGZ,KAAI,EAAE,aACJ,UAAS;AAIX,KAAI,aADY;EAAC,EAAE;EAAM,EAAE,UAAU;EAAI,GAAG,EAAE,YAAY,KAAI,MAAK,EAAE,KAAA;EAAM,CAAC,KAAK,KAAK,CAC7D,CACvB,UAAS;AAGX,UAAS,KAAK,IAAI,EAAE,aAAa,EAAE;AAGnC,KAAI,EAAE,QAAQ;AACZ,WAAS;AACT,MAAI,EAAE,OAAO,SAAS,IACpB,UAAS;;AAIb,KAAI,EAAE,YAAY,MAAK,MAAK,EAAE,aAAa,CACzC,UAAS;AAGX,KAAI,EAAE,YAAY,MAAK,MAAK,EAAE,YAAY,EAAE,CAC1C,UAAS;AAEX,QAAO;;AAQT,eAAsB,uBACpB,OACA,MACA,QAAQ,IACR,YACA,UAC6B;AAC7B,KAAI,CAAC,eAAe,CAClB,QAAO,EAAE;AAMX,KAAI,CAAC,YAAY,YAAY;EAC3B,MAAM,SAAS,IAAI,KAAK,WAAW;AACnC,SAAO,SAAS,OAAO,UAAU,GAAG,EAAE;AACtC,MAAI,yBAAS,IAAI,MAAM,CACrB,QAAO,EAAE;;AAGb,KAAI;EAMF,MAAM,EAAE,QAAQ,WAAW,UAAU,MAAM;GAAC;GAAO;GAAW;GAAM,SAFtD,wGAFK,KAAK,IAAI,QAAQ,GAAG,GAAG,CAEuF;GAE3C;GAAM,SAAS;GAAS;GAAM,QAAQ;GAAO,EAAE;GACnI,UAAU;GACV,WAAW,KAAK,OAAO;GACxB,CAAC;AACF,MAAI,CAAC,OACH,QAAO,EAAE;EAGX,MAAM,QADO,KAAK,MAAM,OAAO,EACX,MAAM,YAAY,aAAa;AACnD,MAAI,CAAC,MAAM,QAAQ,MAAM,CACvB,QAAO,EAAE;EAEX,MAAM,SAAS,WAAW,IAAI,KAAK,SAAS,CAAC,SAAS,GAAG;AA6DzD,SA5DoB,MACjB,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,QAAQ,MAAW;GAClB,MAAM,OAAO,EAAE,UAAU,QAAQ,IAAI,aAAa;AAClD,UAAO,CAAC,qBAAqB,IAAI,IAAI;IACrC,CACD,QAAQ,MAAW,CAAC,UAAU,IAAI,KAAK,EAAE,UAAU,CAAC,SAAS,IAAI,OAAO,CACxE,KAAK,MAAW;GAEf,IAAI;AACJ,OAAI,EAAE,QAAQ,MAAM;IAClB,MAAM,eAAe;KAAC;KAAS;KAAU;KAAe,CAAC,SAAS,EAAE,OAAO,kBAAkB;IAC7F,MAAM,SAAS,EAAE,OAAO,QAAQ;AAEhC,aAAS,GADG,gBAAgB,SAAS,MAAM,OAAO,wBAAwB,KACxD,EAAE,OAAO;;GAI7B,MAAM,YAAiC,EAAE,UAAU,SAAS,EAAE,EAC3D,QAAQ,MAAW,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE,OAAO,MAAM,CAAC,CAC9D,QAAQ,MAAW,CAAC,iBAAiB,MAAM,EAAE,QAAQ,IAAI,MAAM,CAAC,CAAC,CACjE,KAAK,MAAW;IACf,MAAM,eAAe;KAAC;KAAS;KAAU;KAAe,CAAC,SAAS,EAAE,kBAAkB;AACtF,WAAO;KACL,MAAM,EAAE,QAAQ;KAChB,QAAQ,EAAE,OAAO;KACjB,WAAW,EAAE,WAAW,cAAc;KACtC;KACD;KACD,CACD,MAAM,GAAsB,MAAyB,aAAa,EAAE,GAAG,aAAa,EAAE,CAAC,CACvF,MAAM,GAAG,EAAE;AAEd,UAAO;IACL,QAAQ,EAAE;IACV,OAAO,EAAE;IACT,MAAM,EAAE,QAAQ;IAChB,UAAU,EAAE,UAAU,QAAQ;IAC9B,WAAW,EAAE;IACb,KAAK,EAAE;IACP,aAAa,EAAE,eAAe;IAC9B,UAAU,EAAE,UAAU,cAAc;IACpC,cAAc;KAAC;KAAS;KAAU;KAAe,CAAC,SAAS,EAAE,kBAAkB;IAC/E;IACA,aAAa;IACd;IACD,CAED,KAAK,OAAyB;GAAE;GAAG,OAAO,gBAAgB,EAAA;GAAI,EAAE,CAChE,QAAQ,EAAE,YAAY,SAAS,qBAAqB,CACpD,MAAM,GAAG,MAAM;GACd,MAAM,QAAQ,sBAAsB,IAAI,EAAE,EAAE,SAAS,aAAa,CAAC,GAAG,IAAI;GAC1E,MAAM,QAAQ,sBAAsB,IAAI,EAAE,EAAE,SAAS,aAAa,CAAC,GAAG,IAAI;AAC1E,OAAI,UAAU,MACZ,QAAO,QAAQ;AACjB,UAAO,EAAE,QAAQ,EAAE;IACnB,CACD,MAAM,GAAG,MAAM,CACf,KAAK,EAAE,QAAQ,EAAE;SAIhB;AACJ,SAAO,EAAE;;;AAOb,SAAgB,2BAA2B,GAA6B;CACtE,MAAM,KAAK,iBAAiB;EAC1B,QAAQ,EAAE;EACV,OAAO,EAAE;EACT,UAAU,EAAE;EACZ,SAAS,QAAQ,EAAE,UAAU;EAC7B,KAAK,EAAE;EACP,SAAS,EAAE;EACX,UAAU,EAAE;EACZ,UAAU,CAAC,CAAC,EAAE;EACf,CAAC;CAEF,MAAM,YAAY,EAAE,eAAe,IAAI,OAAO;CAC9C,MAAM,QAAQ;EAAC;EAAI;EAAI,KAAK,EAAE;EAAQ;AAEtC,KAAI,EAAE,KACJ,OAAM,KAAK,IAAI,aAAa,EAAE,MAAM,UAAU,CAAC;AAGjD,KAAI,EAAE,OACJ,OAAM,KAAK,IAAI,OAAO,IAAI,sBAAsB,IAAI,aAAa,EAAE,QAAQ,IAAK,CAAC;UAE1E,EAAE,YAAY,SAAS,GAAG;AAEjC,QAAM,KAAK,IAAI,OAAO,IAAI,kBAAkB;AAC5C,OAAK,MAAM,KAAK,EAAE,aAAa;GAC7B,MAAM,YAAY,EAAE,YAAY,IAAI,MAAM,EAAE,UAAU,KAAK;GAC3D,MAAM,aAAa,EAAE,eAAe,kBAAkB;AACtD,SAAM,KAAK,IAAI,MAAM,EAAE,OAAO,IAAI,aAAa,UAAU,IAAI,IAAI,aAAa,EAAE,MAAM,IAAI,CAAC;;;AAI/F,QAAO,MAAM,KAAK,KAAK;;AAOzB,SAAgB,wBAAwB,aAAyC;CAC/E,MAAM,6BAAa,IAAI,KAAiC;AACxD,MAAK,MAAM,KAAK,YAEd,WAAU,YADE,EAAE,YAAY,uBACO,EAAE,CAAC,CAAC,KAAK,EAAE;CAG9C,MAAM,WAAW,YAAY,QAAO,MAAK,EAAE,OAAO,CAAC;CASnD,MAAM,WAAqB;EAPhB;GACT;GACA,UAAU,YAAY;GACtB,aAAa;GACb;GACD,CAE8B,KAAK,KAAK;EAAE;EAAI;EAAuB;EAAG;CAGzE,MAAM,OAAO,CAAC,GAAG,WAAW,MAAM,CAAC,CAAC,MAAM,GAAG,MAAM;AAGjD,UAFc,sBAAsB,IAAI,EAAE,aAAa,CAAC,GAAG,IAAI,MACjD,sBAAsB,IAAI,EAAE,aAAa,CAAC,GAAG,IAAI,MACvC,EAAE,cAAc,EAAE;GAC1C;AAEF,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,QAAQ,WAAW,IAAI,IAAI;AACjC,WAAS,KAAK,MAAM,IAAI,IAAI,MAAM,OAAO,IAAI,GAAG;AAChD,OAAK,MAAM,KAAK,OAAO;GACrB,MAAM,UAAU,EAAE,cAAc,IAAI,MAAM,EAAE,YAAY,KAAK;GAC7D,MAAM,WAAW,EAAE,SAAS,gBAAgB;GAC5C,MAAM,OAAO,QAAQ,EAAE,UAAU;AACjC,YAAS,KAAK,OAAO,EAAE,OAAO,iBAAiB,EAAE,OAAO,QAAQ,EAAE,QAAQ,UAAU,SAAS,IAAI,KAAK,GAAG;;AAE3G,WAAS,KAAK,GAAG;;AAGnB,QAAO,SAAS,KAAK,KAAK;;ACnU5B,MAAM,YAAY;CAChB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAM,gBAAgB;CACpB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAM,gBAAgB,MAAM;AAK5B,eAAsB,kBAAkB,YAA0C;AAChF,KAAI,CAAC,WAAW,KAAK,YAAY,eAAe,CAAC,CAC/C,QAAO,EAAE;CAOX,MAAM,QAAQ,MAAM,OAAO,CAAC,sBAAsB,EAAE;EAClD,KAAK;EACL,QAPa,CACb,GAAG,UAAU,KAAI,MAAK,MAAM,EAAE,KAAK,EACnC,GAAG,cACJ;EAKC,UAAU;EACX,CAAC;CAEF,MAAM,UAAuB,EAAE;AAE/B,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,UAAU,KAAK,YAAY,KAAK;EACtC,IAAI;AACJ,MAAI;AACF,aAAU,aAAa,SAAS,QAAQ;UAEpC;AACJ;;AAGF,MAAI,QAAQ,SAAS,cACnB;AAEF,UAAQ,KAAK;GAAE,MAAM;GAAM;GAAS,MAAM;GAAS,CAAC;;AAGtD,QAAO;;ACtCT,SAAgB,mBAAmB,OAAsC;CACvE,MAAM,UAAU,MAAM,MAAM;AAG5B,KAAI,QAAQ,WAAW,IAAI,CACzB,QAAO;AAGT,KAAI,QAAQ,WAAW,KAAK,IAAI,QAAQ,WAAW,MAAM,IAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,WAAW,IAAI,CAI7G,QAAO;EAAE,MAAM;EAAS,WAHN,QAAQ,WAAW,IAAI,GACrC,QAAQ,QAAQ,IAAI,QAAQ,IAAI,QAAQ,MAAM,EAAE,CAAC,GACjD,QAAQ,QAAA;EACuB;AAIrC,KAAI,QAAQ,WAAW,OAAO,EAAE;EAE9B,MAAM,KAAK,eADQ,iBAAiB,QAAQ,CACP;AACrC,MAAI,GACF,QAAO;GAAE,MAAM;GAAU,OAAO,GAAG;GAAO,MAAM,GAAG;GAAM;AAC3D,SAAO;;AAIT,KAAI,QAAQ,WAAW,WAAW,IAAI,QAAQ,WAAW,UAAU,CACjE,QAAO,YAAY,QAAQ;AAI7B,KAAI,qBAAqB,KAAK,QAAQ,CACpC,QAAO;EAAE,MAAM;EAAU,OAAO,QAAQ,MAAM,IAAI,CAAC;EAAI,MAAM,QAAQ,MAAM,IAAI,CAAC;EAAI;AAItF,QAAO;;AAGT,SAAS,YAAY,KAAoC;AACvD,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAE3B,MAAI,OAAO,aAAa,gBAAgB,OAAO,aAAa,kBAAkB;GAC5E,MAAM,QAAQ,OAAO,SAAS,QAAQ,OAAO,GAAG,CAAC,QAAQ,UAAU,GAAG,CAAC,MAAM,IAAI;GACjF,MAAM,QAAQ,MAAM;GACpB,MAAM,OAAO,MAAM;AACnB,OAAI,CAAC,SAAS,CAAC,KACb,QAAO;AAGT,OAAI,MAAM,OAAO,UAAU,MAAM,UAAU,EAGzC,QAAO;IAAE,MAAM;IAAU;IAAO;IAAM,KAF1B,MAAM;IAEyB,WADzB,MAAM,SAAS,IAAI,MAAM,MAAM,EAAE,CAAC,KAAK,IAAI,GAAG,KAAA;IACV;AAGxD,UAAO;IAAE,MAAM;IAAU;IAAO;IAAM;;AAGxC,MAAI,OAAO,aAAa,cAAc;GACpC,MAAM,QAAQ,OAAO,SAAS,QAAQ,OAAO,GAAG,CAAC,QAAQ,UAAU,GAAG,CAAC,MAAM,IAAI;GACjF,MAAM,QAAQ,MAAM;GACpB,MAAM,OAAO,MAAM;AACnB,OAAI,CAAC,SAAS,CAAC,KACb,QAAO;AACT,UAAO;IAAE,MAAM;IAAU;IAAO;IAAM;;AAGxC,SAAO;SAEH;AACJ,SAAO;;;AAOX,SAAgB,0BAA0B,SAA0D;CAClG,MAAM,QAAQ,QAAQ,MAAM,wBAAwB;AACpD,KAAI,CAAC,MACH,QAAO,EAAE;CAEX,MAAM,SAAkD,EAAE;AAC1D,MAAK,MAAM,QAAQ,MAAM,GAAG,MAAM,KAAK,EAAE;EACvC,MAAM,KAAK,YAAY,KAAK;AAC5B,MAAI,CAAC,GACH;AACF,MAAI,GAAG,OAAO,OACZ,QAAO,OAAO,GAAG;AACnB,MAAI,GAAG,OAAO,cACZ,QAAO,cAAc,GAAG;;AAE5B,QAAO;;AAST,MAAM,kBAAkB;CAAC;CAAW;CAAc;CAAS;AAK3D,eAAsB,eACpB,QACA,YACwD;AACxD,KAAI,OAAO,SAAS,QAClB,QAAO,iBAAiB,OAAO;AACjC,KAAI,OAAO,SAAS,SAClB,QAAO,kBAAkB,QAAQ,WAAW;AAC9C,KAAI,OAAO,SAAS,SAClB,QAAO,kBAAkB,QAAQ,WAAW;AAC9C,QAAO,EAAE,QAAQ,EAAE,EAAE;;AAKvB,SAAS,iBAAiB,QAAmD;CAC3E,MAAM,OAAO,OAAO;AACpB,KAAI,CAAC,WAAW,KAAK,CACnB,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,SAAwB,EAAE;CAGhC,MAAM,YAAY,QAAQ,MAAM,SAAS;AACzC,KAAI,WAAW,UAAU,CACvB,MAAK,MAAM,SAAS,YAAY,WAAW,EAAE,eAAe,MAAM,CAAC,EAAE;AACnE,MAAI,CAAC,MAAM,aAAa,CACtB;EACF,MAAM,QAAQ,eAAe,QAAQ,WAAW,MAAM,KAAK,EAAE,UAAU,MAAM,OAAO;AACpF,MAAI,MACF,QAAO,KAAK,MAAM;;AAKxB,KAAI,OAAO,WAAW,GAAG;EACvB,MAAM,QAAQ,eAAe,MAAM,GAAG;AACtC,MAAI,MACF,QAAO,KAAK,MAAM;;AAGtB,QAAO,EAAE,QAAQ;;AAGnB,SAAS,eAAe,KAAa,UAAsC;CACzE,MAAM,cAAc,QAAQ,KAAK,WAAW;AAC5C,KAAI,CAAC,WAAW,YAAY,CAC1B,QAAO;CAET,MAAM,UAAU,aAAa,aAAa,QAAQ;CAClD,MAAM,cAAc,0BAA0B,QAAQ;CACtD,MAAM,UAAU,IAAI,MAAM,IAAI,CAAC,KAAK;CACpC,MAAM,OAAO,YAAY,QAAQ;CAEjC,MAAM,QAAkD,EAAE;AAC1D,MAAK,MAAM,UAAU,iBAAiB;EACpC,MAAM,aAAa,QAAQ,KAAK,OAAO;AACvC,MAAI,CAAC,WAAW,WAAW,CACzB;AACF,OAAK,MAAM,QAAQ,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,EAAE;AACnE,OAAI,CAAC,KAAK,QAAQ,CAChB;AACF,SAAM,KAAK;IACT,MAAM,GAAG,OAAO,GAAG,KAAK;IACxB,SAAS,aAAa,QAAQ,YAAY,KAAK,KAAK,EAAE,QAAA;IACvD,CAAC;;;AAIN,QAAO;EACL;EACA,aAAa,YAAY,eAAe;EACxC,MAAM;EACN;EACA;EACD;;AAKH,eAAe,kBACb,QACA,YACwD;CACxD,MAAM,EAAE,OAAO,SAAS;AACxB,KAAI,CAAC,SAAS,CAAC,KACb,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,MAAM,OAAO,OAAO;AAC1B,cAAa,oBAAoB,MAAM,GAAG,KAAK,GAAG,MAAM;CAExD,MAAM,OAAO,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,SAAS,MACjD,CAAC,YAAY,KAAK;AAEnB,KAAI,CAAC,MAAM,OAAO,QAAQ;AAExB,MAAI,QAAQ,QAAQ;GAClB,MAAM,WAAW,MAAM,OACrB,yBAAyB,MAAM,GAAG,KAAK,eACxC,CAAC,YAAY,KAAK;AACnB,OAAI,UAAU,OAAO,OACnB,QAAO,oBAAoB,OAAQ,MAAO,UAAU,UAAU,OAAO,WAAW,WAAW;;AAE/F,SAAO,EAAE,QAAQ,EAAE,EAAE;;AAGvB,QAAO,oBAAoB,OAAQ,MAAO,KAAK,MAAM,OAAO,WAAW,WAAW;;AAGpF,eAAe,oBACb,OACA,MACA,KACA,MACA,WACA,YACwD;CACxD,MAAM,WAAW,KAAK,MAAM,KAAI,MAAK,EAAE,KAAK;CAC5C,MAAM,YAAY,KAAK,MAAM;CAG7B,IAAI;AAEJ,KAAI,WAAW;EAEb,MAAM,aAAa,CACjB,GAAG,UAAU,YAEb,UAAU,SAAS,YAAY,GAAG,YAAY,KAC/C,CAAC,OAAO,QAAQ;AAEjB,iBAAe,SAAS,QAAO,MAAK,WAAW,SAAS,EAAE,CAAC;OAI3D,gBAAe,SAAS,QAAO,MAC7B,EAAE,MAAM,6BAA6B,IAAI,MAAM,WAChD;AAGH,KAAI,aAAa,WAAW,EAC1B,QAAO;EAAE,QAAQ,EAAE;EAAE;EAAW;CAElC,MAAM,QAAQ,OAAO,EAAE;CACvB,MAAM,SAAwB,EAAE;AAEhC,cAAa,SAAS,aAAa,OAAO,2BAA2B;AAErE,OAAM,QAAQ,IAAI,aAAa,KAAI,WAAU,MAAM,YAAY;EAC7D,MAAM,WAAW,WAAW,aAAa,KAAK,OAAO,QAAQ,gBAAgB,GAAG;EAChF,MAAM,UAAU,MAAM,eAAe,OAAO,MAAM,KAAK,OAAO;AAC9D,MAAI,CAAC,QACH;EAEF,MAAM,cAAc,0BAA0B,QAAQ;EACtD,MAAM,UAAU,WAAW,SAAS,MAAM,IAAI,CAAC,KAAK,GAAI;EACxD,MAAM,OAAO,YAAY,QAAQ;EAGjC,MAAM,kBAA4D,EAAE;EACpE,MAAM,SAAS,WAAW,GAAG,SAAS,KAAK;AAE3C,OAAK,MAAM,UAAU,iBAAiB;GACpC,MAAM,eAAe,GAAG,SAAS,OAAO;GACxC,MAAM,WAAW,SAAS,QAAO,MAAK,EAAE,WAAW,aAAa,CAAC;AACjE,QAAK,MAAM,YAAY,UAAU;IAC/B,MAAM,cAAc,MAAM,eAAe,OAAO,MAAM,KAAK,SAAS;AACpE,QAAI,aAAa;KACf,MAAM,eAAe,SAAS,MAAM,OAAO,OAAO;AAClD,qBAAgB,KAAK;MAAE,MAAM;MAAc,SAAS;MAAa,CAAC;;;;AAKxE,SAAO,KAAK;GACV;GACA,aAAa,YAAY,eAAe;GACxC,MAAM;GACN;GACA,OAAO;GACR,CAAC;GACF,CAAC,CAAC;AAEJ,QAAO;EAAE;EAAQ;EAAW;;AAG9B,eAAe,eAAe,OAAe,MAAc,KAAa,MAAsC;AAC5G,QAAO,OACL,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI,GAAG,QAC7D,EAAE,cAAc,QAAQ,CACzB,CAAC,YAAY,KAAK;;AAarB,eAAe,kBACb,QACA,YACwD;CACxD,MAAM,EAAE,OAAO,SAAS;AACxB,KAAI,CAAC,SAAS,CAAC,KACb,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,MAAM,OAAO,OAAO;CAC1B,MAAM,YAAY,mBAAmB,GAAG,MAAM,GAAG,OAAO;AAExD,cAAa,oBAAoB,MAAM,GAAG,KAAK,GAAG,MAAM;CAExD,MAAM,OAAO,MAAM,OACjB,sCAAsC,UAAU,uBAAuB,IAAI,8BAC5E,CAAC,YAAY,KAAK;AAEnB,KAAI,CAAC,MAAM,OACT,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,WAAW,KAAK,QAAO,MAAK,EAAE,SAAS,OAAO,CAAC,KAAI,MAAK,EAAE,KAAK;CAGrE,MAAM,eAAe,OAAO,YACxB,SAAS,QAAO,MAAK,MAAM,GAAG,OAAO,UAAU,WAAW,GAC1D,SAAS,QAAO,MAAK,EAAE,MAAM,6BAA6B,IAAI,MAAM,WAAW;AAEnF,KAAI,aAAa,WAAW,EAC1B,QAAO,EAAE,QAAQ,EAAE,EAAE;CAEvB,MAAM,QAAQ,OAAO,EAAE;CACvB,MAAM,SAAwB,EAAE;AAEhC,cAAa,SAAS,aAAa,OAAO,2BAA2B;AAErE,OAAM,QAAQ,IAAI,aAAa,KAAI,WAAU,MAAM,YAAY;EAC7D,MAAM,WAAW,WAAW,aAAa,KAAK,OAAO,QAAQ,gBAAgB,GAAG;EAChF,MAAM,UAAU,MAAM,eAAe,OAAQ,MAAO,KAAK,OAAO;AAChE,MAAI,CAAC,QACH;EAEF,MAAM,cAAc,0BAA0B,QAAQ;EACtD,MAAM,UAAU,WAAW,SAAS,MAAM,IAAI,CAAC,KAAK,GAAI;EACxD,MAAM,OAAO,YAAY,QAAQ;EAGjC,MAAM,kBAA4D,EAAE;EACpE,MAAM,SAAS,WAAW,GAAG,SAAS,KAAK;AAE3C,OAAK,MAAM,UAAU,iBAAiB;GACpC,MAAM,eAAe,GAAG,SAAS,OAAO;GACxC,MAAM,WAAW,SAAS,QAAO,MAAK,EAAE,WAAW,aAAa,CAAC;AACjE,QAAK,MAAM,YAAY,UAAU;IAC/B,MAAM,cAAc,MAAM,eAAe,OAAQ,MAAO,KAAK,SAAS;AACtE,QAAI,aAAa;KACf,MAAM,eAAe,SAAS,MAAM,OAAO,OAAO;AAClD,qBAAgB,KAAK;MAAE,MAAM;MAAc,SAAS;MAAa,CAAC;;;;AAKxE,SAAO,KAAK;GACV;GACA,aAAa,YAAY,eAAe;GACxC,MAAM;GACN;GACA,OAAO;GACR,CAAC;GACF,CAAC,CAAC;AAEJ,QAAO,EAAE,QAAQ;;AAGnB,eAAe,eAAe,OAAe,MAAc,KAAa,MAAsC;AAC5G,QAAO,OACL,sBAAsB,MAAM,GAAG,KAAK,SAAS,IAAI,GAAG,QACpD,EAAE,cAAc,QAAQ,CACzB,CAAC,YAAY,KAAK;;ACnarB,eAAsB,aAAa,SAAyC;CAE1E,MAAM,UAAU,GADD,IAAI,IAAI,QAAQ,CAAC,OACN;AAC1B,KAAI,MAAM,UAAU,QAAQ,CAC1B,QAAO;AACT,QAAO;;AAMT,eAAsB,aAAa,KAA0C;CAC3E,MAAM,UAAU,MAAM,UAAU,IAAI;AACpC,KAAI,CAAC,WAAW,QAAQ,SAAS,GAC/B,QAAO;AAET,QAAO;EACL,KAAK;EACL,OAAO,mBAAmB,QAAA;EAC3B;;AAMH,SAAgB,mBAAmB,SAA6B;CAC9D,MAAM,QAAoB,EAAE;CAC5B,MAAM,uBAAO,IAAI,KAAa;CAC9B,MAAM,YAAY;AAClB,MAAK,IAAI,QAAQ,UAAU,KAAK,QAAQ,EAAE,UAAU,MAAM,QAAQ,UAAU,KAAK,QAAQ,EAAE;EACzF,MAAM,MAAM,MAAM;AAClB,MAAI,CAAC,KAAK,IAAI,IAAI,EAAE;AAClB,QAAK,IAAI,IAAI;AACb,SAAM,KAAK;IAAE,OAAO,MAAM;IAAK;IAAK,CAAC;;;AAIzC,QAAO;;AAOT,SAAS,UAAU,KAAsB;AACvC,KAAI;EACF,MAAM,SAAS,IAAI,IAAI,IAAI;AAC3B,MAAI,OAAO,aAAa,SACtB,QAAO;EACT,MAAM,OAAO,OAAO;AAEpB,MAAI,SAAS,eAAe,SAAS,eAAe,SAAS,MAC3D,QAAO;AACT,MAAI,SAAS,kBACX,QAAO;AACT,MAAI,mDAAmD,KAAK,KAAK,CAC/D,QAAO;AACT,MAAI,KAAK,WAAW,IAAI,CACtB,QAAO;AACT,SAAO;SAEH;AAAE,SAAO;;;AAGjB,eAAsB,iBACpB,aACA,SACA,YACuB;CACvB,MAAM,QAAQ,OAAO,EAAE;CACvB,IAAI,YAAY;AAoBhB,SAlBgB,MAAM,QAAQ,IAC5B,YAAY,MAAM,KAAI,SAAQ,MAAM,YAAY;EAC9C,MAAM,MAAM,KAAK,IAAI,WAAW,OAAO,GACnC,KAAK,MACL,GAAG,QAAQ,QAAQ,OAAO,GAAG,GAAG,KAAK,IAAI,WAAW,IAAI,GAAG,KAAK,MAAM,KAAK;AAE/E,MAAI,CAAC,UAAU,IAAI,CACjB,QAAO;AAET,eAAa,KAAK,KAAK,aAAa,YAAY,MAAM,OAAO;EAE7D,MAAM,UAAU,MAAM,UAAU,IAAI;AACpC,MAAI,WAAW,QAAQ,SAAS,IAC9B,QAAO;GAAE,KAAK,KAAK;GAAK,OAAO,KAAK;GAAO;GAAS;AACtD,SAAO;GACP,CAAC,CACJ,EAEc,QAAQ,MAAuB,MAAM,KAAK;;AAO3D,SAAgB,mBAAmB,SAAiB,SAA0B;CAC5E,IAAI,aAAa;AAGjB,KAAI,SAAS;EAEX,MAAM,UADO,QAAQ,QAAQ,OAAO,GAAG,CAClB,QAAQ,uBAAuB,OAAO;AAC3D,eAAa,WAAW,QACtB,IAAI,OAAO,SAAS,QAAQ,mBAAmB,IAAI,EACnD,cACD;;AAIH,cAAa,WAAW,QAAQ,wBAAwB,eAAe;AAEvE,QAAO;;AAOT,SAAgB,gBAAgB,SAAiB,UAAmC;CAClF,MAAM,WAAqB,EAAE;CAC7B,MAAM,QAAQ,QAAQ,MAAM,UAAU;AAEtC,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,WAAW,KAAK,MAAM,kBAAkB;AAC9C,MAAI,CAAC,SACH;EAEF,MAAM,MAAM,SAAS;AACrB,MAAI,SAAS,MAAK,MAAK,IAAI,SAAS,EAAE,CAAC,EAAE;GACvC,MAAM,eAAe,KAAK,QAAQ,MAAM,KAAK,QAAQ,OAAO,CAAC;AAC7D,OAAI,eAAe,GACjB,UAAS,KAAK,KAAK,MAAM,eAAe,EAAE,CAAC;;;AAKjD,KAAI,SAAS,WAAW,EACtB,QAAO;AACT,QAAO,SAAS,KAAK,cAAc;;ACxIrC,MAAa,eAAe;AAG5B,MAAa,oBAAoB,MAAc,IAAI,KAAK,IAAI;AAyB5D,eAAe,eAAe,OAAe,MAAc,KAAgC;AAIzF,SAHa,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,SAAS,MACjD,CAAC,YAAY,KAAK,GACN,OAAO,KAAI,MAAK,EAAE,KAAK,IAAI,EAAE;;AAc5C,eAAe,WAAW,OAAe,MAAc,SAAiB,aAAsB,YAAgD;CAC5I,MAAM,aAAa,CAAC,IAAI,WAAW,QAAQ;AAC3C,KAAI,YACF,YAAW,KAAK,GAAG,YAAY,GAAG,UAAU;AAE9C,MAAK,MAAM,OAAO,YAAY;EAC5B,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,IAAI;AACpD,MAAI,MAAM,SAAS,EACjB,QAAO;GAAE,KAAK;GAAK;GAAO;;AAI9B,KAAI,aAAa;EACf,MAAM,YAAY,MAAM,qBAAqB,OAAO,MAAM,YAAY;AACtE,MAAI,WAAW;GACb,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,UAAU;AAC1D,OAAI,MAAM,SAAS,EACjB,QAAO;IAAE,KAAK;IAAW;IAAO;;;CAKtC,MAAM,WAAW,aACb,CAAC,YAAY,GAAG,CAAC,QAAQ,SAAS,CAAC,QAAO,MAAK,MAAM,WAAW,CAAC,GACjE,CAAC,QAAQ,SAAS;AACtB,MAAK,MAAM,UAAU,UAAU;EAC7B,MAAM,QAAQ,MAAM,eAAe,OAAO,MAAM,OAAO;AACvD,MAAI,MAAM,SAAS,EACjB,QAAO;GAAE,KAAK;GAAQ;GAAO,UAAU;GAAM;;AAGjD,QAAO;;AAOT,eAAe,qBAAqB,OAAe,MAAc,aAA6C;CAC5G,MAAM,OAAO,MAAM,OACjB,yBAAyB,MAAM,GAAG,KAAK,WACxC,CAAC,YAAY,KAAK;CACnB,MAAM,SAAS,GAAG,YAAY;AAC9B,QAAO,MAAM,UAAU,MAAK,MAAK,EAAE,IAAI,WAAW,OAAO,CAAC,EAAE,OAAO;;AAMrE,SAAS,eAAe,OAAiB,YAA8B;AACrE,QAAO,MAAM,QAAO,MAAK,EAAE,WAAW,WAAW,IAAI,gBAAgB,KAAK,EAAE,CAAC;;AAI/E,MAAM,iBAAiB;CACrB;CACA;CACA;CACA;CACD;AAGD,MAAM,eAAe,IAAI,IAAI;CAC3B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAGF,MAAM,gBAAgB,IAAI,IAAI;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAWF,SAAS,eAAe,MAAuB;AAE7C,QADc,KAAK,MAAM,IAAI,CAChB,MAAK,MAAK,aAAa,IAAI,EAAE,aAAa,CAAC,CAAC;;AAM3D,SAAS,aAAa,MAAsB;AAC1C,QAAO,KAAK,MAAM,IAAI,CAAC,OAAO,QAAQ,CAAC;;AAMzC,SAAS,eAAe,MAAuB;AAE7C,QADc,KAAK,MAAM,IAAI,CAChB,MAAK,MAAK,cAAc,IAAI,EAAE,aAAa,CAAC,CAAC;;AAO5D,SAAS,YAAY,KAAa,WAA2B;CAC3D,MAAM,QAAQ,aAAa,IAAI,IAAI;AAEnC,QAAQ,aADU,eAAe,IAAI,GAAG,MAAM,KACb;;AAQnC,SAAS,iBAAiB,UAA2C;CACnE,MAAM,UAAU,SACb,QAAO,MAAK,gBAAgB,KAAK,EAAE,CAAC,CACpC,QAAO,MAAK,CAAC,eAAe,MAAK,MAAK,EAAE,KAAK,EAAE,CAAC,CAAC,CACjD,QAAO,MAAK,EAAE,SAAS,IAAI,CAAC;CAG/B,MAAM,6BAAa,IAAI,KAAuB;AAE9C,MAAK,MAAM,QAAQ,SAAS;EAC1B,MAAM,UAAU,KAAK,YAAY,SAAS;AAC1C,MAAI,YAAY,GACd;AAGF,YAAU,YADK,KAAK,MAAM,GAAG,UAAU,EAAgB,QACnB,EAAE,CAAC,CAAC,KAAK,KAAK;;AAGpD,KAAI,WAAW,OAAO,GAAG;EACvB,MAAM,UAAU,CAAC,GAAG,WAAW,SAAS,CAAC,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS,EAAE,GAAG,OAAO,CAAC;AACpF,MAAI,QAAQ,GAAG,UAAU,GAAG;GAC1B,MAAM,aAAa,QAAQ;GAC3B,MAAM,UAAU,WAAW,YAAY,QAAQ;GAC/C,MAAM,cAAc,UAAU,IAAI,WAAW,MAAM,GAAG,QAAQ,GAAG;AACjE,UAAO;IAAE,OAAO,QAAQ;IAAI,QAAQ;IAAa;;;CAKrD,MAAM,4BAAY,IAAI,KAAuB;AAE7C,MAAK,MAAM,QAAQ,SAAS;AAC1B,MAAI,eAAe,KAAK,CACtB;EAGF,MAAM,YAAY,KAAK,YAAY,IAAI;AACvC,MAAI,cAAc,GAChB;AAGF,YAAU,WADE,KAAK,MAAM,GAAG,YAAY,EAAE,QACR,EAAE,CAAC,CAAC,KAAK,KAAK;;AAGhD,KAAI,UAAU,SAAS,EACrB,QAAO;CAGT,MAAM,SAAS,CAAC,GAAG,UAAU,SAAS,CAAC,CACpC,KAAK,CAAC,KAAK,YAAY;EAAE;EAAK;EAAO,OAAO,YAAY,KAAK,MAAM,OAAA;EAAS,EAAE,CAC9E,QAAO,MAAK,EAAE,MAAM,UAAU,EAAE,CAChC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAEpC,KAAI,OAAO,WAAW,EACpB,QAAO;CAET,MAAM,OAAO,OAAO;AAKpB,QAAO;EAAE,OAAO,KAAK;EAAO,QAAQ,KAAK;EAAK;;AAMhD,eAAe,cAAc,OAAe,MAAc,KAAa,aAAa,SAA4B;AAE9G,QAAO,eADO,MAAM,eAAe,OAAO,MAAM,IAAI,EACvB,WAAW;;AAO1C,eAAsB,aAAa,OAAe,MAAc,SAAiB,aAAsB,SAAiD;CACtJ,MAAM,WAAW,cAAc,eAAe,YAAY,GAAG,KAAA;AAC7D,KAAI,UAAU;EACZ,MAAM,MAAM,SAAS,OAAO;EAC5B,MAAM,WAAW,CAAC,SAAS;EAC3B,MAAM,QAAQ,MAAM,cAAc,SAAS,OAAO,SAAS,MAAM,KAAK,GAAG,SAAS,KAAK,GAAG;AAC1F,MAAI,MAAM,WAAW,EACnB,QAAO;AACT,SAAO;GACL,SAAS,qCAAqC,SAAS,MAAM,GAAG,SAAS,KAAK,GAAG;GACjF;GACA;GACA;GAEA,YAAY,GAAG,SAAS,KAAK,OAAO,UAAU,GAAG,SAAS,KAAK,KAAK,KAAA;GACrE;;CAIH,MAAM,MAAM,MAAM,WAAW,OAAO,MAAM,SAAS,aADhC,UAAU,kBAAkB,QAAQ,GAAG,KAAA,EACiB;AAC3E,KAAI,CAAC,IACH,QAAO;CAET,IAAI,OAAO,eAAe,IAAI,OAAO,QAAQ;CAC7C,IAAI;CACJ,IAAI;AAGJ,KAAI,KAAK,WAAW,GAAG;EACrB,MAAM,aAAa,iBAAiB,IAAI,MAAM;AAC9C,MAAI,YAAY;AACd,UAAO,WAAW;AAClB,gBAAa,WAAW,UAAU,KAAA;AAClC,cAAW,IAAI;;;AAInB,KAAI,KAAK,WAAW,EAClB,QAAO;AAET,QAAO;EACL,SAAS,qCAAqC,MAAM,GAAG,KAAK,GAAG,IAAI;EACnE,KAAK,IAAI;EACT,OAAO;EACP;EACA;EACA,UAAU,IAAI;EACf;;AAMH,SAAS,cAAc,GAAmB;AACxC,QAAO,EAAE,QAAQ,OAAO,GAAG,CAAC,QAAQ,iBAAiB,GAAG;;AAS1D,SAAgB,wBACd,WACA,WAC0C;AAC1C,KAAI,UAAU,WAAW,EACvB,QAAO;EAAE,SAAS;EAAM,YAAY;EAAG;CAGzC,MAAM,SAAS,UAAU,MAAM,GAAG,GAAG;CAGrC,MAAM,kBAAkB,OAAO,KAAK,SAAS;EAC3C,IAAI,OAAO,KAAK;AAEhB,MAAI,KAAK,WAAW,OAAO,CACzB,KAAI;AACF,UAAO,IAAI,IAAI,KAAK,CAAC;UAEjB;AAER,SAAO,cAAc,KAAK;GAC1B;CAGF,MAAM,iBAAiB,IAAI,IAAI,UAAU,IAAI,cAAc,CAAC;CAE5D,IAAI,UAAU;AACd,MAAK,MAAM,YAAY,gBAErB,MAAK,MAAM,YAAY,eACrB,KAAI,aAAa,YAAY,SAAS,SAAS,IAAI,WAAW,EAAE;AAC9D;AACA;;CAKN,MAAM,aAAa,UAAU,OAAO;AACpC,QAAO;EAAE,SAAS,cAAc;EAAK;EAAY;;AAOnD,eAAe,cAAc,OAAe,MAAc,aAAuC;CAC/F,MAAM,OAAO,qCAAqC,MAAM,GAAG,KAAK;CAEhE,MAAM,QAAQ;EACZ;EACA,YAHgB,YAAY,QAAQ,UAAU,GAAG,CAG3B;EACtB,YAAY,YAAY,QAAQ,MAAM,GAAG,CAAC,QAAQ,KAAK,IAAI,CAAC;EAC7D;AACD,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,OAAO,MAAM,UAAU,GAAG,KAAK,GAAG,OAAO;AAC/C,MAAI,CAAC,KACH;AACF,MAAI;AAEF,OADY,KAAK,MAAM,KAAK,CACpB,SAAS,YACf,QAAO;UAEL;;AAER,QAAO;;AAGT,eAAsB,iBAAiB,aAA6C;CAElF,MAAM,YAAY,YAAY,QAAQ,UAAU,GAAG;AACnD,MAAK,MAAM,aAAa,CAAC,YAAY,QAAQ,MAAM,GAAG,CAAC,QAAQ,KAAK,IAAI,EAAE,UAAU,EAAE;AAEpF,MAAI,CAAC,UAAU,SAAS,IAAI,EAAE;AAG5B,QADgB,MAAM,OAAO,IAAI,yBAAyB,UAAU,GAAG,YAAY,CAAC,YAAY,KAAK,GACxF,GACX,QAAO,sBAAsB,UAAU,GAAG;AAC5C;;AAGF,OADgB,MAAM,OAAO,IAAI,yBAAyB,YAAY,CAAC,YAAY,KAAK,GAC3E,GACX,QAAO,sBAAsB;;CAIjC,MAAM,aAAa,YAAY,QAAQ,MAAM,GAAG;AAChD,KAAI,eAAe,CACjB,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GAAC;GAAU;GAAS;GAAY;GAAU;GAAY;GAAW;GAAI,EAAE;GAC9G,UAAU;GACV,SAAS;GACV,CAAC;AACF,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,YAAY;EAC9B,MAAM,QAAQ,KAAK,MAAM,KAAK;EAE9B,MAAM,QAAQ,MAAM,MAAK,MACvB,EAAE,SAAS,aAAa,CAAC,SAAS,IAAI,YAAY,aAAa,GAAG,IAC/D,EAAE,SAAS,aAAa,CAAC,SAAS,IAAI,UAAU,aAAa,GAAG,CACpE;AACD,MAAI,MACF,QAAO,sBAAsB,MAAM;AAErC,OAAK,MAAM,aAAa,OAAO;GAC7B,MAAM,KAAK,eAAe,sBAAsB,UAAU,WAAW;AACrE,OAAI,MAAM,MAAM,cAAc,GAAG,OAAO,GAAG,MAAM,YAAY,CAC3D,QAAO,sBAAsB,UAAU;;SAGvC;CAOR,MAAM,OAAO,MAAM,OACjB,gDAFY,mBAAmB,GAAG,WAAW,UAAU,CAED,aACvD,CAAC,YAAY,KAAK;AACnB,KAAI,CAAC,MAAM,OAAO,OAChB,QAAO;CAGT,MAAM,QAAQ,KAAK,MAAM,MAAK,MAC5B,EAAE,UAAU,aAAa,CAAC,SAAS,IAAI,YAAY,aAAa,GAAG,IAChE,EAAE,UAAU,aAAa,CAAC,SAAS,IAAI,UAAU,aAAa,GAAG,CACrE;AACD,KAAI,MACF,QAAO,sBAAsB,MAAM;AAGrC,MAAK,MAAM,aAAa,KAAK,OAAO;EAClC,MAAM,KAAK,eAAe,sBAAsB,UAAU,YAAY;AACtE,MAAI,MAAM,MAAM,cAAc,GAAG,OAAO,GAAG,MAAM,YAAY,CAC3D,QAAO,sBAAsB,UAAU;;AAG3C,QAAO;;AAOT,eAAsB,oBAAoB,OAAe,MAAc,aAA6D;CAClI,MAAM,WAAW,cAAc,eAAe,YAAY,GAAG,KAAA;AAC7D,KAAI,UAAU,SACZ,QAAO,EAAE,UAAU,SAAS,UAAU;AAGxC,KAAI,eAAe,CACjB,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GAAC;GAAO,SAAS,MAAM,GAAG;GAAQ;GAAM;GAAa,EAAE;GAC9F,UAAU;GACV,SAAS;GACV,CAAC;AACF,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,YAAY;EAC9B,MAAM,OAAO,KAAK,MAAM,KAAK;AAC7B,SAAO,MAAM,WAAW,EAAE,UAAU,KAAK,UAAU,GAAG;SAElD;CAKR,MAAM,OAAO,MAAM,OACjB,gCAAgC,MAAM,GAAG,OAC1C,CAAC,YAAY,KAAK;AACnB,QAAO,MAAM,WAAW,EAAE,UAAU,KAAK,UAAU,GAAG;;AAMxD,eAAsB,YAAY,OAAe,MAAc,QAAiB,KAAsC;CAIpH,MAAM,UAAU,SACZ,yBAAyB,MAAM,GAAG,KAAK,SAJ5B,OAAO,OAIqC,GAAG,OAAO,cACjE,yBAAyB,MAAM,GAAG,KAAK,SAAS,MAAM,QAAQ,QAAQ;AAI1E,MAFgB,MAAM,OAAO,IAAI,QAAQ,CAAC,YAAY,KAAK,GAE9C,GACX,QAAO,UAAU,MAAM,GAAG,OAAO,SAAS,IAAI,WAAW,KAAK,MAAM,IAAI,QAAQ;CAKlF,MAAM,WAAW,SAAS,GAAG,OAAO,KAAK;CACzC,MAAM,WAAW,MAAM,CAAC,IAAI,GAAG,CAAC,QAAQ,SAAS;AACjD,MAAK,MAAM,KAAK,SACd,MAAK,MAAM,YAAY;EAAC;EAAa;EAAa;EAAY,EAAE;EAC9D,MAAM,YAAY,qCAAqC,MAAM,GAAG,KAAK,GAAG,EAAE,GAAG,WAAW;AAExF,OADY,MAAM,OAAO,IAAI,UAAU,CAAC,YAAY,KAAK,GAChD,GACP,QAAO;;AAIb,QAAO;;AAkFT,eAAsB,mBAAmB,KAAqC;AAE5E,KAAI,IAAI,WAAW,UAAU,EAAE;EAC7B,MAAM,WAAW,cAAc,IAAI;AACnC,MAAI,CAACE,WAAa,SAAS,CACzB,QAAO;AACT,SAAOC,aAAe,UAAU,QAAQ;;AAG1C,KAAI,IAAI,WAAW,UAAU,EAAE;EAC7B,IAAI,OAAO,IAAI,QAAQ,WAAW,GAAG;EACrC,IAAI,MAAM;EAGV,MAAM,QAAQ,KAAK,YAAY,IAAI;AACnC,MAAI,UAAU,IAAI;AAChB,SAAM,KAAK,MAAM,QAAQ,EAAE;AAC3B,UAAO,KAAK,MAAM,GAAG,MAAM;;EAG7B,MAAM,QAAQ,KAAK,MAAM,IAAI;EAC7B,MAAM,QAAQ,MAAM;EACpB,MAAM,OAAO,MAAM;EACnB,MAAM,SAAS,MAAM,MAAM,EAAE,CAAC,KAAK,IAAI;EAMvC,MAAM,OAAO,MAAM,OAJH,SACZ,yBAAyB,MAAM,GAAG,KAAK,SAAS,IAAI,GAAG,OAAO,cAC9D,yBAAyB,MAAM,GAAG,KAAK,cAAc,OAEtB,EAAE,cAAc,QAAQ,CAAC,CAAC,YAAY,KAAK;AAC9E,MAAI,CAAC,KACH,QAAO;AAET,MAAI;GACF,MAAM,OAAO,KAAK,MAAM,KAAK;AAC7B,UAAO,KAAK,YAAY,KAAK,MAAM,YAAY;UAE3C;AACJ,UAAO;;;AAIX,QAAO,UAAU,IAAI;;AAOvB,eAAsB,kBACpB,OACA,MACA,YACiC;AACjC,cAAa,yBAAyB;CAGtC,MAAM,UAAU,sBAAsB,MAAM,GAAG;CAC/C,IAAI;CACJ,IAAI;AAEJ,KAAI,eAAe,CACjB,KAAI;EACF,MAAM,EAAE,QAAQ,SAAS,UAAU,MAAM;GAAC;GAAO,SAAS,MAAM,GAAG;GAAQ;GAAQ;GAAmD,EAAE;GACtI,UAAU;GACV,SAAS;GACV,CAAC;AACF,MAAI,MAAM;GACR,MAAM,OAAO,KAAK,MAAM,KAAK;AAC7B,cAAW,KAAK,YAAY,KAAA;AAC5B,iBAAc,KAAK,eAAe,KAAA;;SAGhC;AAGR,KAAI,CAAC,YAAY,CAAC,aAAa;EAC7B,MAAM,OAAO,MAAM,OACjB,gCAAgC,MAAM,GAAG,OAC1C,CAAC,YAAY,KAAK;AACnB,aAAW,MAAM,YAAY,KAAA;AAC7B,gBAAc,MAAM,eAAe,KAAA;;AAIrC,cAAa,0BAA0B;CACvC,MAAM,eAAe,MAAM,OACzB,yBAAyB,MAAM,GAAG,KAAK,WACxC,CAAC,YAAY,KAAK;CAEnB,IAAI,UAAU;CACd,IAAI;CACJ,MAAM,gBAAgB,cAAc,WAAW;AAC/C,KAAI,eAAe;AAEjB,YAAU,cAAc,IAAI,QAAQ,MAAM,GAAG;AAC7C,eAAa,cAAc;;AAI7B,cAAa,iBAAiB;CAC9B,MAAM,UAAU,MAAM,aAAa,OAAO,MAAM,QAAQ;CACxD,MAAM,aAAa,UAAU,GAAG,QAAQ,QAAQ,QAAQ,IAAI,SAAS,KAAA;CACrE,MAAM,SAAS,SAAS;AAGxB,cAAa,kBAAkB;CAC/B,MAAM,YAAY,MAAM,YAAY,OAAO,KAAK;CAGhD,IAAI;AACJ,KAAI,UAAU;AACZ,eAAa,oBAAoB;AACjC,YAAU,MAAM,aAAa,SAAS,CAAC,YAAY,KAAK,IAAI,KAAA;;AAI9D,KAAI,CAAC,cAAc,CAAC,aAAa,CAAC,QAChC,QAAO;AAET,QAAO;EACL,MAAM;EACN,SAAS,gBAAgB,UAAU,KAAA;EACnC;EACA;EACA;EACA,SAAS;EACT;EACA;EACA,iBAAiB,SAAS;EAC1B,WAAW,aAAa,KAAA;EACxB;EACD;;AC/tBH,eAAsB,kBAAkB,OAAe,OAAO,GAA4E;CACxI,MAAM,OAAO,MAAM,OAEhB,+CAA+C,mBAAmB,MAAM,CAAC,QAAQ,OAAO,CAAC,YAAY,KAAK;AAE7G,KAAI,CAAC,MAAM,SAAS,OAClB,QAAO,EAAE;AAEX,QAAO,KAAK,QAAQ,KAAI,OAAM;EAC5B,MAAM,EAAE,QAAQ;EAChB,aAAa,EAAE,QAAQ;EACvB,SAAS,EAAE,QAAQ;EACpB,EAAE;;AAML,eAAsB,gBAAgB,aAAqD;CAEzF,MAAM,OAAO,MAAM,OAAuB,qBAAqB,YAAY,eAAe,CAAC,YAAY,KAAK;AAC5G,KAAI,KACF,QAAO;AAGT,QAAO,OAAuB,8BAA8B,YAAY,SAAS,CAAC,YAAY,KAAK;;AAgBrG,eAAsB,qBAAqB,aAAqB,SAA2C;CACzG,MAAM,OAAO,MAAM,OAGhB,8BAA8B,cAAc,CAAC,YAAY,KAAK;AAEjE,KAAI,CAAC,KACH,QAAO,EAAE;CAGX,MAAM,WAAoD,KAAK,eAC3D,OAAO,YACL,OAAO,QAAQ,KAAK,aAAa,CAAC,KAAK,CAAC,KAAK,SAAS,CACpD,KACA;EAAE,SAAS;EAAK,YAAY,KAAK,OAAO;EAAM,CAC/C,CAAC,CACH,GACD,KAAA;AAEJ,QAAO;EACL,YAAY,KAAK,OAAO,YAAY,KAAA;EACpC;EACD;;AAkBH,eAAe,cACb,IACA,eACA,KACA,QACA,UACA,YACA,MAC+B;CAC/B,IAAI;AAGJ,KAAI,eAAe;AACjB,eAAa,cAAc;EAC3B,MAAM,UAAU,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,eAAe,IAAI,MAAM,MAAM,WAAW;AAChG,MAAI,SAAS;AACX,UAAO,aAAa,QAAQ;AAC5B,UAAO,SAAS,QAAQ;AACxB,UAAO,kBAAkB,QAAQ;AACjC,cAAW,QAAQ;AACnB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK,QAAQ;IACb,QAAQ;IACR,SAAS,QAAQ,WACb,SAAS,QAAQ,MAAM,OAAO,WAAW,QAAQ,IAAI,gBAAgB,cAAc,KACnF,SAAS,QAAQ,MAAM,OAAO,WAAW,QAAQ;IACtD,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,GAAG,OAAO,QAAQ,SAAS,cAAc;GAC9C,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,CAAC,OAAO,SAAS;AACnB,eAAa,cAAc;EAC3B,MAAM,WAAW,MAAM,oBAAoB,GAAG,OAAO,GAAG,MAAM,IAAI,KAAK;AACvE,MAAI,UAAU,YAAY,CAAC,iBAAiB,SAAS,SAAS,EAAE;AAC9D,UAAO,UAAU,SAAS;AAC1B,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK,OAAO;IACZ,QAAQ;IACR,SAAS,mBAAmB,SAAS;IACtC,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,OAAO;GACZ,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,cAAa,SAAS;CACtB,MAAM,YAAY,MAAM,YAAY,GAAG,OAAO,GAAG,MAAM,MAAM,QAAQ,OAAO,OAAO;AACnF,KAAI,WAAW;AACb,SAAO,YAAY;AACnB,WAAS,KAAK;GACZ,QAAQ;GACR,KAAK;GACL,QAAQ;GACT,CAAC;OAGF,UAAS,KAAK;EACZ,QAAQ;EACR,KAAK,GAAG,OAAO,QAAQ;EACvB,QAAQ;EACR,SAAS;EACV,CAAC;AAGJ,QAAO;;AAMT,eAAsB,mBAAmB,aAAqB,UAA0B,EAAE,EAAmC;AAE3H,SADe,MAAM,+BAA+B,aAAa,QAAQ,EAC3D;;AAMhB,eAAsB,+BAA+B,aAAqB,UAA0B,EAAE,EAA0B;CAC9H,MAAM,WAA6B,EAAE;CACrC,MAAM,EAAE,eAAe;AAEvB,cAAa,MAAM;CACnB,MAAM,MAAM,MAAM,gBAAgB,YAAY;AAC9C,KAAI,CAAC,KAAK;AACR,WAAS,KAAK;GACZ,QAAQ;GACR,KAAK,8BAA8B,YAAY;GAC/C,QAAQ;GACR,SAAS;GACV,CAAC;AACF,SAAO;GAAE,SAAS;GAAM;GAAU;;AAGpC,UAAS,KAAK;EACZ,QAAQ;EACR,KAAK,8BAA8B,YAAY;EAC/C,QAAQ;EACR,SAAS,SAAS,IAAI,KAAK,GAAG,IAAI;EACnC,CAAC;CAGF,MAAM,eAAe,IAAI,UACrB,MAAM,qBAAqB,aAAa,IAAI,QAAQ,GACpD,EAAE;CAEN,MAAM,SAA0B;EAC9B,MAAM,IAAI;EACV,SAAS,IAAI;EACb,YAAY,aAAa;EACzB,aAAa,IAAI;EACjB,cAAc,IAAI;EAClB,UAAU,aAAa;EACxB;CAGD,IAAI;CAGJ,IAAI;CACJ,IAAI;AACJ,KAAI,OAAO,IAAI,eAAe,YAAY,IAAI,YAAY,KAAK;AAC7D,eAAa,IAAI,WAAW;EAC5B,MAAM,aAAa,iBAAiB,WAAW;AAE/C,MAAI,CAAC,WAAW,SAAS,MAAM,IAAI,WAAW,SAAS,IAAI,IAAI,CAAC,WAAW,SAAS,IAAI,CACtF,QAAO,UAAU,sBAAsB;MAEvC,QAAO,UAAU;AACnB,WAAS,IAAI,WAAW;YAEjB,OAAO,IAAI,eAAe,SACjC,KAAI,IAAI,WAAW,SAAS,MAAM,EAAE;EAElC,MAAM,KAAK,eAAe,IAAI,WAAW;AACzC,MAAI,GACF,QAAO,UAAU,sBAAsB,GAAG,MAAM,GAAG,GAAG;QAErD;EAEH,MAAM,OAAO,IAAI,WAAW,QAAQ,YAAY,GAAG;AACnD,MAAI,KAAK,SAAS,IAAI,IAAI,CAAC,KAAK,SAAS,IAAI,CAC3C,QAAO,UAAU,sBAAsB;;AAK7C,KAAI,IAAI,YAAY,CAAC,gBAAgB,IAAI,SAAS,IAAI,CAAC,iBAAiB,IAAI,SAAS,CACnF,QAAO,UAAU,IAAI;AAIvB,KAAI,OAAO,SAAS,SAAS,aAAa,EAAE;EAC1C,MAAM,KAAK,eAAe,OAAO,QAAQ;AACzC,MAAI,GAEF,mBAAkB,MAAM,cAAc,IADhB,QAAQ,WAAW,IAAI,SACY,KAAK,QAAQ,UAAU,YAAY;GAAE;GAAY;GAAQ,CAAC;YAG9G,CAAC,OAAO,SAAS;AAExB,eAAa,gBAAgB;EAC7B,MAAM,cAAc,MAAM,iBAAiB,IAAI,KAAK;AACpD,MAAI,aAAa;AACf,UAAO,UAAU;AACjB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACR,SAAS,4BAA4B;IACtC,CAAC;GAEF,MAAM,KAAK,eAAe,YAAY;AACtC,OAAI,GAEF,mBAAkB,MAAM,cAAc,IADhB,QAAQ,WAAW,IAAI,SACY,KAAK,QAAQ,UAAU,WAAW;QAI7F,UAAS,KAAK;GACZ,QAAQ;GACR,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,OAAO,SAAS;AAClB,eAAa,WAAW;EACxB,MAAM,UAAU,MAAM,aAAa,OAAO,QAAQ;AAClD,MAAI,SAAS;AACX,UAAO,UAAU;AACjB,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACT,CAAC;QAGF,UAAS,KAAK;GACZ,QAAQ;GACR,KAAK,GAAG,IAAI,IAAI,OAAO,QAAQ,CAAC,OAAO;GACvC,QAAQ;GACR,SAAS;GACV,CAAC;;AAKN,KAAI,OAAO,cAAc,OAAO,WAAW,iBAAiB;EAC1D,MAAM,cAAc,MAAM,aAAa,OAAO,QAAQ;AACtD,MAAI,eAAe,YAAY,MAAM,SAAS,GAAG;GAC/C,MAAM,aAAa,wBAAwB,YAAY,OAAO,gBAAgB;AAC9E,OAAI,CAAC,WAAW,SAAS;AACvB,aAAS,KAAK;KACZ,QAAQ;KACR,KAAK,OAAO;KACZ,QAAQ;KACR,SAAS,kDAAkD,KAAK,MAAM,WAAW,aAAa,IAAI,CAAC;KACpG,CAAC;AACF,WAAO,aAAa,KAAA;AACpB,WAAO,SAAS,KAAA;;;;AAMtB,KAAI,CAAC,OAAO,WAAW,CAAC,OAAO,WAAW,CAAC,OAAO,aAAa,CAAC,OAAO,cAAc,QAAQ,KAAK;AAChG,eAAa,QAAQ;EACrB,MAAM,SAAS,KAAK,QAAQ,KAAK,gBAAgB,YAAY;EAE7D,MAAM,aAAa,WAAW,OAAO,IAAI,YAAY,OAAO,CAAC,MAAK,MAAK,gBAAgB,KAAK,EAAE,CAAC;AAC/F,MAAI,YAAY;GACd,MAAM,aAAa,KAAK,QAAQ,WAAW;AAC3C,UAAO,YAAY,cAAc,WAAW,CAAC;AAC7C,YAAS,KAAK;IACZ,QAAQ;IACR,KAAK;IACL,QAAQ;IACR,SAAS;IACV,CAAC;;;AAKN,KAAI,CAAC,OAAO,WAAW,CAAC,OAAO,WAAW,CAAC,OAAO,aAAa,CAAC,OAAO,WACrE,QAAO;EAAE,SAAS;EAAM;EAAU;AAGpC,QAAO;EAAE,SAAS;EAAQ;EAAU;;AAMtC,SAAgB,sBACd,MACA,SACA,KACwB;AAExB,KAAI,QAAQ,WAAW,QAAQ,EAAE;EAE/B,MAAM,gBAAgB,KADL,QAAQ,KAAK,QAAQ,MAAM,EAAE,CAAC,EACV,eAAe;AACpD,MAAI,WAAW,cAAc,EAAE;GAC7B,MAAM,YAAY,KAAK,MAAM,aAAa,eAAe,QAAQ,CAAC;AAClE,UAAO;IACL,MAAM,UAAU,QAAQ;IACxB,SAAS,UAAU,WAAW;IAC/B;;AAEH,SAAO;;AAIT,KAAI,QAAQ,WAAW,OAAO,EAAE;EAC9B,MAAM,YAAY,QAAQ,MAAM,EAAE;EAClC,MAAM,UAAU,UAAU,WAAW,IAAI,GACrC,UAAU,QAAQ,KAAK,EAAE,GACzB,UAAU,QAAQ,IAAI;EAC1B,MAAM,WAAW,UAAU,IAAI,UAAU,MAAM,GAAG,QAAQ,GAAG;AAC7D,SAAO;GAAE,MAAM;GAAU,SAAS,wBAAwB,UAAU,IAAI,IAAI;GAAK;;AAInF,KAAI,QAAQ,WAAW,QAAQ,IAAI,QAAQ,WAAW,OAAO,IAAI,QAAQ,WAAW,OAAO,CACzF,QAAO;CAKT,MAAM,YAAY,wBAAwB,MAAM,IAAI;AACpD,KAAI,UACF,QAAO;EAAE;EAAM,SAAS;EAAW;AAGrC,KAAI,cAAc,KAAK,QAAQ,CAC7B,QAAO;EAAE;EAAM,SAAS,QAAQ,QAAQ,aAAa,GAAA;EAAK;AAI5D,KAAI,QAAQ,WAAW,WAAW,IAAI,QAAQ,WAAW,aAAa,CACpE,QAAO;EAAE;EAAM,SAAS;EAAK;AAE/B,QAAO;;AAOT,SAAgB,wBAAwB,MAAc,KAA4B;AAChF,KAAI;EACF,MAAM,WAAW,gBAAgB,GAAG,KAAK,gBAAgB,EAAE,KAAK,KAAK,CAAC;AAEtE,SADY,KAAK,MAAM,aAAa,UAAU,QAAQ,CAAC,CAC5C,WAAW;SAElB;AAGJ,MAAI;GAEF,IAAI,MAAM,QADI,gBAAgB,MAAM,EAAE,KAAK,KAAK,CAAC,CACzB;AACxB,UAAO,OAAO,SAAS,IAAI,KAAK,gBAAgB;IAC9C,MAAM,UAAU,KAAK,KAAK,eAAe;AACzC,QAAI,WAAW,QAAQ,CAErB,QADY,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC,CAC3C,WAAW;AAExB,UAAM,QAAQ,IAAI;;UAGhB;AACN,SAAO;;;AAOX,eAAsB,sBAAsB,KAAyC;CACnF,MAAM,UAAU,KAAK,KAAK,eAAe;AACzC,KAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,6CAA6C;CAG/D,MAAM,MAAM,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;CACtD,MAAM,OAA+B;EACnC,GAAG,IAAI;EACP,GAAG,IAAI;EACR;CAED,MAAM,UAA6B,EAAE;AAErC,MAAK,MAAM,CAAC,MAAM,YAAY,OAAO,QAAQ,KAAK,EAAE;EAClD,MAAM,SAAS,sBAAsB,MAAM,SAAS,IAAI;AACxD,MAAI,OACF,SAAQ,KAAK,OAAO;;AAIxB,QAAO;;AAcT,SAAgB,qBAAqB,WAA4C;CAC/E,MAAM,UAAU,KAAK,WAAW,eAAe;AAC/C,KAAI,CAAC,WAAW,QAAQ,CACtB,QAAO;CAET,MAAM,MAAM,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;CAEtD,IAAI;AACJ,KAAI,IAAI,YAAY,IAClB,WAAU,iBAAiB,IAAI,WAAW,IAAI;UAEvC,OAAO,IAAI,eAAe,SACjC,WAAU,iBAAiB,IAAI,WAAW;AAG5C,QAAO;EACL,MAAM,IAAI;EACV,SAAS,IAAI,WAAW;EACxB,aAAa,IAAI;EACjB;EACA;EACD;;AAMH,eAAsB,wBAAwB,WAAoD;CAChG,MAAM,OAAO,qBAAqB,UAAU;AAC5C,KAAI,CAAC,KACH,QAAO;CAET,MAAM,SAA0B;EAC9B,MAAM,KAAK;EACX,SAAS,KAAK;EACd,aAAa,KAAK;EAClB,SAAS,KAAK;EACf;AAGD,KAAI,KAAK,SAAS,SAAS,aAAa,EAAE;EACxC,MAAM,KAAK,eAAe,KAAK,QAAQ;AACvC,MAAI,IAAI;GAEN,MAAM,UAAU,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,KAAK,SAAS,KAAK,KAAK;AAC9E,OAAI,SAAS;AACX,WAAO,aAAa,QAAQ;AAC5B,WAAO,SAAS,QAAQ;AACxB,WAAO,kBAAkB,QAAQ;;GAInC,MAAM,YAAY,MAAM,YAAY,GAAG,OAAO,GAAG,MAAM,KAAA,GAAW,OAAO,OAAO;AAChF,OAAI,UACF,QAAO,YAAY;;;AAMzB,KAAI,CAAC,OAAO,aAAa,CAAC,OAAO,YAAY;EAC3C,MAAM,aAAa,YAAY,UAAU,CAAC,MAAK,MAAK,gBAAgB,KAAK,EAAE,CAAC;AAC5E,MAAI,WACF,QAAO,YAAY,cAAc,KAAK,WAAW,WAAW,CAAC,CAAC;;AAIlE,KAAI,CAAC,OAAO,aAAa,CAAC,OAAO,WAC/B,QAAO;AAGT,QAAO;;AAUT,eAAsB,aAAa,MAAc,SAAyC;CACxF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,SAAS,KAAK,UAAU,MAAM;AAGpC,KAAI,WAAW,KAAK,QAAQ,eAAe,CAAC,CAC1C,QAAO;CAGT,MAAM,OAAO,MAAM,OACjB,8BAA8B,KAAK,GAAG,UACvC,CAAC,YAAY,KAAK;AACnB,KAAI,CAAC,KACH,QAAO;CACT,MAAM,aAAa,KAAK,MAAM;AAC9B,KAAI,CAAC,WACH,QAAO;CAGT,MAAM,aAAa,MAAM,MAAM,YAAY,EACzC,SAAS,EAAE,cAAc,cAAc,EACxC,CAAC,CAAC,YAAY,KAAK;AAEpB,KAAI,CAAC,YAAY,MAAM,CAAC,WAAW,KACjC,QAAO;AAET,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;CAEtC,MAAM,aAAa,KAAK,UAAU,WAAW;CAC7C,MAAM,aAAa,kBAAkB,WAAW;CAGhD,MAAM,SAAS,WAAW,KAAK,WAAW;AAC1C,OAAM,IAAI,SAAe,KAAK,WAAW;EACvC,MAAM,WAAW,IAAI,SAAS,EAC5B,MAAM,OAAO,WAAW,UAAU;AAChC,cAAW,MAAM,OAAO,SAAS;KAEpC,CAAC;AACF,WAAS,GAAG,gBAAgB;AAC1B,cAAW,KAAK;AAChB,QAAK;IACL;AACF,WAAS,GAAG,SAAS,OAAO;EAE5B,SAAS,OAAO;AACd,UAAO,MAAM,CAAC,MAAM,EAAE,MAAM,YAAY;AACtC,QAAI,MAAM;AACR,cAAS,KAAK;AACd;;AAEF,aAAS,MAAM,aAAa,MAAM,CAAC;KACnC,CAAC,MAAM,OAAO;;AAElB,QAAM;GACN;CAGF,MAAM,EAAE,WAAW,UAAU,OAAO;EAAC;EAAO;EAAY;EAAwB;EAAM;EAAO,EAAE,EAAE,OAAO,UAAU,CAAC;AACnH,KAAI,WAAW,GAAG;AAChB,SAAO,QAAQ;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;AAChD,SAAO,YAAY,EAAE,OAAO,MAAM,CAAC;AACnC,SAAO;;AAGT,YAAW,WAAW;AACtB,QAAO;;AAMT,eAAsB,mBAAmB,aAA6C;AAIpF,SAHa,MAAM,OACjB,qBAAqB,YAAY,eAClC,CAAC,YAAY,KAAK,GACN,WAAW;;AAM1B,SAAgB,yBAAyB,UAAiC;CACxE,MAAM,YAAY,KAAK,UAAU,WAAW;AAC5C,KAAI,CAAC,WAAW,UAAU,CACxB,QAAO;AAIT,QAFgB,aAAa,WAAW,QAAQ,CAC1B,MAAM,6BAA6B,GAC1C,MAAM"}
|
package/dist/_chunks/utils.d.mts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"utils.d.mts","names":[],"sources":["../../src/sources/blog-releases.ts","../../src/sources/discussions.ts","../../src/sources/entries.ts","../../src/sources/git-skills.ts","../../src/sources/types.ts","../../src/sources/github.ts","../../src/sources/issues.ts","../../src/sources/llms.ts","../../src/sources/npm.ts","../../src/sources/package-registry.ts","../../src/sources/releases.ts","../../src/sources/utils.ts"],"mappings":";;;UAmBU,WAAA;EACR,IAAA;EACA,OAAA;AAAA;;;;;;iBAkFoB,iBAAA,CACpB,WAAA,UACA,gBAAA,WACC,OAAA,CAAQ,WAAA;;;;;;AAzFV;;UCQgB,iBAAA;EACf,IAAA;EACA,MAAA;EACA,SAAA;EACA,YAAA;AAAA;AAAA,UAGe,gBAAA;EACf,MAAA;EACA,KAAA;EACA,IAAA;EACA,QAAA;EACA,SAAA;EACA,GAAA;EACA,WAAA;EACA,QAAA;EACA,MAAA;EACA,WAAA,EAAa,iBAAA;AAAA
|
|
1
|
+
{"version":3,"file":"utils.d.mts","names":[],"sources":["../../src/sources/blog-releases.ts","../../src/sources/discussions.ts","../../src/sources/entries.ts","../../src/sources/git-skills.ts","../../src/sources/types.ts","../../src/sources/github.ts","../../src/sources/issues.ts","../../src/sources/llms.ts","../../src/sources/npm.ts","../../src/sources/package-registry.ts","../../src/sources/releases.ts","../../src/sources/utils.ts"],"mappings":";;;UAmBU,WAAA;EACR,IAAA;EACA,OAAA;AAAA;;;;;;iBAkFoB,iBAAA,CACpB,WAAA,UACA,gBAAA,WACC,OAAA,CAAQ,WAAA;;;;;;AAzFV;;UCQgB,iBAAA;EACf,IAAA;EACA,MAAA;EACA,SAAA;EACA,YAAA;AAAA;AAAA,UAGe,gBAAA;EACf,MAAA;EACA,KAAA;EACA,IAAA;EACA,QAAA;EACA,SAAA;EACA,GAAA;EACA,WAAA;EACA,QAAA;EACA,YAAA;EACA,MAAA;EACA,WAAA,EAAa,iBAAA;AAAA;;;AAXf;;;iBAuHsB,sBAAA,CACpB,KAAA,UACA,IAAA,UACA,KAAA,WACA,UAAA,WACA,QAAA,YACC,OAAA,CAAQ,gBAAA;;;;iBAwGK,0BAAA,CAA2B,CAAA,EAAG,gBAAA;;;;;iBAuC9B,uBAAA,CAAwB,WAAA,EAAa,gBAAA;;;UCpSpC,SAAA;EACf,IAAA;EACA,OAAA;EACA,IAAA;AAAA;;;;iBAoCoB,iBAAA,CAAkB,UAAA,WAAqB,OAAA,CAAQ,SAAA;;;;;;AF9BpE;;;UGJgB,cAAA;EACf,IAAA;EACA,KAAA;EACA,IAAA;;EAEA,SAAA;EHsFA;EGpFA,GAAA;EHsFC;EGpFD,SAAA;AAAA;AAAA,UAGe,WAAA;;EAEf,IAAA;;EAEA,WAAA;EFJgC;EEMhC,IAAA;EFNgC;EEQhC,OAAA;EFNA;EEQA,KAAA,EAAO,KAAA;IAAQ,IAAA;IAAc,OAAA;EAAA;AAAA;;;;;iBAOf,kBAAA,CAAmB,KAAA,WAAgB,cAAA;;;;iBA8EnC,yBAAA,CAA0B,OAAA;EAAoB,IAAA;EAAe,WAAA;AAAA;;;;iBA6BvD,cAAA,CACpB,MAAA,EAAQ,cAAA,EACR,UAAA,IAAc,GAAA,oBACb,OAAA;EAAU,MAAA,EAAQ,WAAA;EAAe,SAAA;AAAA;;;;;;UCpJnB,cAAA;EACf,IAAA;EACA,OAAA;EACA,WAAA;EACA,QAAA;EACA,UAAA;IACE,IAAA;IACA,GAAA;IACA,SAAA;EAAA;EAEF,MAAA;EACA,YAAA,GAAe,MAAA;EACf,eAAA,GAAkB,MAAA;EAClB,gBAAA,GAAmB,MAAA;AAAA;AAAA,UAGJ,eAAA;EACf,IAAA;EACA,OAAA;EHGe;EGDf,UAAA;EACA,WAAA;EHAgC;EGEhC,YAAA,GAAe,MAAA;EHAf;EGEA,QAAA,GAAW,MAAA;IAAiB,OAAA;IAAiB,UAAA;EAAA;EAC7C,OAAA;EACA,OAAA;EACA,SAAA;EACA,OAAA;EHAA;EGEA,UAAA;EHAA;EGEA,MAAA;EHAA;EGEA,eAAA;AAAA;AAAA,UAGe,eAAA;EACf,IAAA;EACA,OAAA;AAAA;AAAA,UAGe,WAAA;EACf,GAAA;EHL8B;EGO9B,KAAA,EAAO,QAAA;AAAA;AAAA,UAGQ,QAAA;EACf,KAAA;EACA,GAAA;AAAA;AAAA,UAGe,UAAA;EACf,GAAA;EACA,KAAA;EACA,OAAA;AAAA;AAAA,UAGe,cAAA;EACf,MAAA;EACA,GAAA;EACA,MAAA;EACA,OAAA;AAAA;AAAA,UAGe,aAAA;EACf,OAAA,EAAS,eAAA;EACT,QAAA,EAAU,cAAA;AAAA;;;;cC1DC,YAAA;;cAGA,gBAAA,GAAoB,CAAA;AAAA,UAEhB,aAAA;ELmFsB;EKjFrC,OAAA;ELoFQ;EKlFR,GAAA;ELiFA;EK/EA,KAAA;ELgFS;EK9ET,UAAA;EL8EkB;EK5ElB,QAAA;;EAEA,QAAA;AAAA;;;;;iBAoPoB,YAAA,CAAa,KAAA,UAAe,IAAA,UAAc,OAAA,UAAiB,WAAA,WAAsB,OAAA,YAAmB,OAAA,CAAQ,aAAA;;;;;AJpPlI;;iBImTgB,uBAAA,CACd,SAAA,EAAW,QAAA,IACX,SAAA;EACG,OAAA;EAAkB,UAAA;AAAA;;;;;iBA6ID,mBAAA,CAAoB,KAAA,UAAe,IAAA,UAAc,WAAA,YAAuB,OAAA;EAAU,QAAA;AAAA;;;;iBA+BlF,WAAA,CAAY,KAAA,UAAe,IAAA,UAAc,MAAA,WAAiB,GAAA,YAAe,OAAA;;;;iBA6GzE,kBAAA,CAAmB,GAAA,WAAc,OAAA;;;;;iBAiDjC,iBAAA,CACpB,KAAA,UACA,IAAA,UACA,UAAA,IAAc,GAAA,oBACb,OAAA,CAAQ,eAAA;;;;;;ALnpBV;;KMNW,SAAA;AAAA,UAEK,YAAA;EACf,IAAA;EACA,MAAA;EACA,SAAA;EACA,YAAA;AAAA;AAAA,UAGe,WAAA;EACf,MAAA;EACA,KAAA;EACA,KAAA;EACA,MAAA;EACA,IAAA;EACA,SAAA;EACA,GAAA;EACA,SAAA;EACA,QAAA;EACA,IAAA,EAAM,SAAA;EACN,WAAA,EAAa,YAAA;ELNmB;EKQhC,KAAA;ELNA;EKQA,UAAA;AAAA;;;ALHF;iBKWgB,aAAA,CAAA;;;;;;iBAyaM,iBAAA,CACpB,KAAA,UACA,IAAA,UACA,KAAA,WACA,UAAA,WACA,QAAA,YACC,OAAA,CAAQ,WAAA;;;;iBAwBK,qBAAA,CAAsB,KAAA,EAAO,WAAA;;;;;iBA0C7B,kBAAA,CAAmB,MAAA,EAAQ,WAAA;;;;;;iBCjhBrB,YAAA,CAAa,OAAA,WAAkB,OAAA;AP4FrD;;;AAAA,iBOjFsB,YAAA,CAAa,GAAA,WAAc,OAAA,CAAQ,WAAA;;;;iBAczC,kBAAA,CAAmB,OAAA,WAAkB,QAAA;AAAA,iBAuC/B,gBAAA,CACpB,WAAA,EAAa,WAAA,EACb,OAAA,UACA,UAAA,IAAc,GAAA,UAAa,KAAA,UAAe,KAAA,oBACzC,OAAA,CAAQ,UAAA;;;;;iBA6BK,kBAAA,CAAmB,OAAA,UAAiB,OAAA;;;;;iBAuBpC,eAAA,CAAgB,OAAA,UAAiB,QAAA;;;;;;;iBC/G3B,iBAAA,CAAkB,KAAA,UAAe,IAAA,YAAW,OAAA,CAAQ,KAAA;EAAQ,IAAA;EAAc,WAAA;EAAsB,OAAA;AAAA;;;;iBAkBhG,eAAA,CAAgB,WAAA,WAAsB,OAAA,CAAQ,cAAA;AAAA,UAUnD,WAAA;EACf,OAAA;EACA,UAAA;AAAA;AAAA,UAGe,eAAA;EACf,UAAA;EACA,QAAA,GAAW,MAAA,SAAe,WAAA;AAAA;;;;iBAMN,oBAAA,CAAqB,WAAA,UAAqB,OAAA,WAAkB,OAAA,CAAQ,eAAA;AAAA,KAyB9E,WAAA;AAAA,UAEK,cAAA;EPxDA;EO0Df,OAAA;;EAEA,GAAA;EP3DA;EO6DA,UAAA,IAAc,IAAA,EAAM,WAAA;AAAA;;;;iBA+FA,kBAAA,CAAmB,WAAA,UAAqB,OAAA,GAAS,cAAA,GAAsB,OAAA,CAAQ,eAAA;;;;iBAQ/E,8BAAA,CAA+B,WAAA,UAAqB,OAAA,GAAS,cAAA,GAAsB,OAAA,CAAQ,aAAA;;;;iBAkLjG,qBAAA,CACd,IAAA,UACA,OAAA,UACA,GAAA,WACC,eAAA;;;;;iBAoDa,uBAAA,CAAwB,IAAA,UAAc,GAAA;;;;iBA6BhC,qBAAA,CAAsB,GAAA,WAAc,OAAA,CAAQ,eAAA;AAAA,UAwBjD,gBAAA;EACf,IAAA;EACA,OAAA;EACA,WAAA;EACA,OAAA;EACA,SAAA;AAAA;;AP7LF;;iBOmMgB,oBAAA,CAAqB,SAAA,WAAoB,gBAAA;;;;iBA2BnC,uBAAA,CAAwB,SAAA,WAAoB,OAAA,CAAQ,eAAA;;ANlgB1E;;;;;;iBMwjBsB,YAAA,CAAa,IAAA,UAAc,OAAA,WAAkB,OAAA;;;ANjhBnE;iBMylBsB,kBAAA,CAAmB,WAAA,WAAsB,OAAA;;;;iBAU/C,wBAAA,CAAyB,QAAA;;;;;;ARjoBxC;;USXgB,WAAA;EACf,OAAA;EACA,GAAA;EACA,IAAA;EACA,KAAA;AAAA;AAAA,UAGe,YAAA;EACf,YAAA;EACA,OAAA;ET2FC;ESzFD,KAAA;AAAA;AAAA,UAGe,SAAA;EACf,KAAA;EACA,IAAA;;EAEA,QAAA;ERCgC;EQChC,QAAA;ERDgC;EQGhC,OAAA;ERDA;EQGA,QAAA;ERDA;EQGA,sBAAA;ERHY;EQKZ,QAAA,EAAU,MAAA,SAAe,YAAA;ERFM;EQI/B,YAAA,GAAe,WAAA;AAAA;AAAA,UAIA,WAAA;EACf,KAAA;EACA,IAAA;EACA,IAAA;EACA,GAAA;EACA,QAAA;AAAA;AAAA,UAGe,UAAA;EACf,WAAA;EACA,QAAA,EAAU,WAAA;AAAA;AAAA,iBAyWI,cAAA,CAAe,WAAA,WAAsB,WAAA;AAAA,iBAiBrC,aAAA,CAAc,WAAA,WAAsB,UAAA;AAAA,iBAcpC,eAAA,CAAgB,WAAA;AAAA,iBAShB,YAAA,CAAa,OAAA,WAAkB,SAAA;AAAA,iBAI/B,oBAAA,CAAqB,WAAA;AAAA,iBAWrB,yBAAA,CAA0B,WAAA;AAAA,iBAO1B,kBAAA,CAAmB,WAAA;;;;;;UChdlB,aAAA;EACf,EAAA;EACA,GAAA;EACA,IAAA;EACA,UAAA;EACA,SAAA;EACA,WAAA;EACA,QAAA;AAAA;AAAA,UAOQ,SAAA;EACR,IAAA;EACA,OAAA;AAAA;AAAA,UAGe,MAAA;EACf,KAAA;EACA,KAAA;EACA,KAAA;EACA,GAAA;AAAA;AAAA,iBAGc,WAAA,CAAY,OAAA,WAAkB,MAAA;;;;iBAiD9B,YAAA,CAAa,OAAA;AAAA,iBAIb,aAAA,CAAc,CAAA,EAAG,MAAA,EAAQ,CAAA,EAAG,MAAA;AAAA,UAoI3B,mBAAA;EACf,QAAA,EAAU,aAAA;EACV,WAAA;EACA,YAAA,GAAe,KAAA;IAAQ,OAAA;IAAiB,KAAA;IAAe,IAAA;EAAA;EACvD,YAAA;AAAA;;;;;iBAOc,oBAAA,CAAqB,cAAA,EAAgB,aAAA,KAAkB,mBAAA,EAAqB,WAAA;;;AT8B5F;;;;;iBS+EsB,iBAAA,CACpB,KAAA,UACA,IAAA,UACA,gBAAA,UACA,MAAA,WACA,WAAA,WACA,QAAA,WACA,YAAA,YACC,OAAA,CAAQ,SAAA;;;;;;cCtVE,MAAA,EAKX,QAAA,CALiB,MAAA;;;;iBAUG,SAAA,CAAU,GAAA,WAAc,OAAA;AXuF9C;;;AAAA,iBWhFsB,SAAA,CAAU,GAAA,WAAc,OAAA;;;;iBAkC9B,eAAA,CAAgB,GAAA;;;;iBAahB,cAAA,CAAe,GAAA;EAAgB,KAAA;EAAe,IAAA;AAAA;;;;iBAU9C,gBAAA,CAAiB,GAAA;;;;AVhDjC;iBU+DgB,gBAAA,CAAiB,IAAA;EAAiB,IAAA;EAAc,GAAA;AAAA;;;;iBAqBhD,iBAAA,CAAkB,GAAA"}
|