skilld 0.15.3 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/README.md +7 -5
  2. package/dist/_chunks/{detect-imports.mjs → agent.mjs} +48 -15
  3. package/dist/_chunks/agent.mjs.map +1 -0
  4. package/dist/_chunks/{storage.mjs → cache.mjs} +81 -1
  5. package/dist/_chunks/cache.mjs.map +1 -0
  6. package/dist/_chunks/cache2.mjs +71 -0
  7. package/dist/_chunks/cache2.mjs.map +1 -0
  8. package/dist/_chunks/config.mjs +23 -0
  9. package/dist/_chunks/config.mjs.map +1 -1
  10. package/dist/_chunks/{embedding-cache2.mjs → embedding-cache.mjs} +1 -1
  11. package/dist/_chunks/embedding-cache.mjs.map +1 -0
  12. package/dist/_chunks/formatting.mjs +634 -0
  13. package/dist/_chunks/formatting.mjs.map +1 -0
  14. package/dist/_chunks/{version.d.mts → index.d.mts} +1 -1
  15. package/dist/_chunks/index.d.mts.map +1 -0
  16. package/dist/_chunks/{utils.d.mts → index2.d.mts} +1 -1
  17. package/dist/_chunks/index2.d.mts.map +1 -0
  18. package/dist/_chunks/install.mjs +539 -0
  19. package/dist/_chunks/install.mjs.map +1 -0
  20. package/dist/_chunks/list.mjs +70 -0
  21. package/dist/_chunks/list.mjs.map +1 -0
  22. package/dist/_chunks/markdown.mjs +7 -0
  23. package/dist/_chunks/markdown.mjs.map +1 -1
  24. package/dist/_chunks/pool.mjs +174 -0
  25. package/dist/_chunks/pool.mjs.map +1 -0
  26. package/dist/_chunks/pool2.mjs +1 -6
  27. package/dist/_chunks/pool2.mjs.map +1 -1
  28. package/dist/_chunks/prompts.mjs +234 -2
  29. package/dist/_chunks/prompts.mjs.map +1 -1
  30. package/dist/_chunks/sanitize.mjs +71 -0
  31. package/dist/_chunks/sanitize.mjs.map +1 -1
  32. package/dist/_chunks/search-interactive.mjs +245 -0
  33. package/dist/_chunks/search-interactive.mjs.map +1 -0
  34. package/dist/_chunks/search.mjs +12 -0
  35. package/dist/_chunks/shared.mjs +4 -0
  36. package/dist/_chunks/shared.mjs.map +1 -1
  37. package/dist/_chunks/{npm.mjs → sources.mjs} +401 -4
  38. package/dist/_chunks/sources.mjs.map +1 -0
  39. package/dist/_chunks/sync.mjs +1937 -0
  40. package/dist/_chunks/sync.mjs.map +1 -0
  41. package/dist/_chunks/sync2.mjs +13 -0
  42. package/dist/_chunks/uninstall.mjs +207 -0
  43. package/dist/_chunks/uninstall.mjs.map +1 -0
  44. package/dist/_chunks/validate.mjs +3 -0
  45. package/dist/_chunks/validate.mjs.map +1 -1
  46. package/dist/_chunks/yaml.mjs +19 -0
  47. package/dist/_chunks/yaml.mjs.map +1 -1
  48. package/dist/agent/index.d.mts +1 -1
  49. package/dist/agent/index.mjs +4 -3
  50. package/dist/cache/index.d.mts +2 -2
  51. package/dist/cache/index.mjs +2 -1
  52. package/dist/cli.mjs +146 -3823
  53. package/dist/cli.mjs.map +1 -1
  54. package/dist/index.d.mts +2 -3
  55. package/dist/index.mjs +4 -4
  56. package/dist/retriv/index.mjs +14 -2
  57. package/dist/retriv/index.mjs.map +1 -1
  58. package/dist/retriv/worker.mjs +3 -3
  59. package/dist/sources/index.d.mts +2 -2
  60. package/dist/sources/index.mjs +2 -1
  61. package/dist/types.d.mts +2 -3
  62. package/package.json +9 -9
  63. package/dist/_chunks/detect-imports.mjs.map +0 -1
  64. package/dist/_chunks/embedding-cache2.mjs.map +0 -1
  65. package/dist/_chunks/npm.mjs.map +0 -1
  66. package/dist/_chunks/storage.mjs.map +0 -1
  67. package/dist/_chunks/utils.d.mts.map +0 -1
  68. package/dist/_chunks/version.d.mts.map +0 -1
@@ -8,12 +8,15 @@ import { htmlToMarkdown } from "mdream";
8
8
  import { spawnSync } from "node:child_process";
9
9
  import { ofetch } from "ofetch";
10
10
  import { crawlAndGenerate } from "@mdream/crawl";
11
- import { globby } from "globby";
11
+ import { glob } from "tinyglobby";
12
12
  import { downloadTemplate } from "giget";
13
13
  import { fileURLToPath, pathToFileURL } from "node:url";
14
14
  import pLimit from "p-limit";
15
15
  import { Writable } from "node:stream";
16
16
  import { resolvePathSync } from "mlly";
17
+ /**
18
+ * Shared constants and helpers for GitHub source modules (issues, discussions, releases)
19
+ */
17
20
  const BOT_USERS = new Set([
18
21
  "renovate[bot]",
19
22
  "dependabot[bot]",
@@ -21,19 +24,30 @@ const BOT_USERS = new Set([
21
24
  "dependabot",
22
25
  "github-actions[bot]"
23
26
  ]);
27
+ /** Extract YYYY-MM-DD date from an ISO timestamp */
24
28
  const isoDate = (iso) => iso.split("T")[0];
29
+ /** Build YAML frontmatter from a key-value object, auto-quoting strings with special chars */
25
30
  function buildFrontmatter(fields) {
26
31
  const lines = ["---"];
27
32
  for (const [k, v] of Object.entries(fields)) if (v !== void 0) lines.push(`${k}: ${typeof v === "string" && /[:"[\]]/.test(v) ? `"${v.replace(/"/g, "\\\"")}"` : v}`);
28
33
  lines.push("---");
29
34
  return lines.join("\n");
30
35
  }
36
+ /**
37
+ * GitHub issues fetching via gh CLI Search API
38
+ * Freshness-weighted scoring, type quotas, comment quality filtering
39
+ * Categorized by labels, noise filtered out, non-technical issues detected
40
+ */
31
41
  let _ghAvailable;
42
+ /**
43
+ * Check if gh CLI is installed and authenticated (cached)
44
+ */
32
45
  function isGhAvailable() {
33
46
  if (_ghAvailable !== void 0) return _ghAvailable;
34
47
  const { status } = spawnSync("gh", ["auth", "status"], { stdio: "ignore" });
35
48
  return _ghAvailable = status === 0;
36
49
  }
50
+ /** Labels that indicate noise — filter these out entirely */
37
51
  const NOISE_LABELS = new Set([
38
52
  "duplicate",
39
53
  "stale",
@@ -45,6 +59,7 @@ const NOISE_LABELS = new Set([
45
59
  "needs triage",
46
60
  "triage"
47
61
  ]);
62
+ /** Labels that indicate feature requests — deprioritize */
48
63
  const FEATURE_LABELS = new Set([
49
64
  "enhancement",
50
65
  "feature",
@@ -80,10 +95,17 @@ const DOCS_LABELS = new Set([
80
95
  "doc",
81
96
  "typo"
82
97
  ]);
98
+ /**
99
+ * Check if a label contains any keyword from a set.
100
+ * Handles emoji-prefixed labels like ":sparkles: feature request" or ":lady_beetle: bug".
101
+ */
83
102
  function labelMatchesAny(label, keywords) {
84
103
  for (const keyword of keywords) if (label === keyword || label.includes(keyword)) return true;
85
104
  return false;
86
105
  }
106
+ /**
107
+ * Classify an issue by its labels into a type useful for skill generation
108
+ */
87
109
  function classifyIssue(labels) {
88
110
  const lower = labels.map((l) => l.toLowerCase());
89
111
  if (lower.some((l) => labelMatchesAny(l, BUG_LABELS))) return "bug";
@@ -92,23 +114,41 @@ function classifyIssue(labels) {
92
114
  if (lower.some((l) => labelMatchesAny(l, FEATURE_LABELS))) return "feature";
93
115
  return "other";
94
116
  }
117
+ /**
118
+ * Check if an issue should be filtered out entirely
119
+ */
95
120
  function isNoiseIssue(issue) {
96
121
  if (issue.labels.map((l) => l.toLowerCase()).some((l) => labelMatchesAny(l, NOISE_LABELS))) return true;
97
122
  if (issue.title.startsWith("☂️") || issue.title.startsWith("[META]") || issue.title.startsWith("[Tracking]")) return true;
98
123
  return false;
99
124
  }
125
+ /** Check if body contains a code block */
100
126
  function hasCodeBlock$1(text) {
101
127
  return /```[\s\S]*?```/.test(text) || /`[^`]+`/.test(text);
102
128
  }
129
+ /**
130
+ * Detect non-technical issues: fan mail, showcases, sentiment.
131
+ * Short body + no code + high reactions = likely non-technical.
132
+ * Note: roadmap/tracking issues are NOT filtered — they get score-boosted instead.
133
+ */
103
134
  function isNonTechnical(issue) {
104
135
  const body = (issue.body || "").trim();
105
136
  if (body.length < 200 && !hasCodeBlock$1(body) && issue.reactions > 50) return true;
106
137
  if (/\b(?:love|thank|awesome|great work)\b/i.test(issue.title) && !hasCodeBlock$1(body)) return true;
107
138
  return false;
108
139
  }
140
+ /**
141
+ * Freshness-weighted score: reactions * decay(age_in_years)
142
+ * Steep decay so recent issues dominate over old high-reaction ones.
143
+ * At 0.6: 1yr=0.63x, 2yr=0.45x, 4yr=0.29x, 6yr=0.22x
144
+ */
109
145
  function freshnessScore(reactions, createdAt) {
110
146
  return reactions * (1 / (1 + (Date.now() - new Date(createdAt).getTime()) / (365.25 * 24 * 60 * 60 * 1e3) * .6));
111
147
  }
148
+ /**
149
+ * Type quotas — guarantee a mix of issue types.
150
+ * Bugs and questions get priority; feature requests are hard-capped.
151
+ */
112
152
  function applyTypeQuotas(issues, limit) {
113
153
  const byType = /* @__PURE__ */ new Map();
114
154
  for (const issue of issues) mapInsert(byType, issue.type, () => []).push(issue);
@@ -142,11 +182,18 @@ function applyTypeQuotas(issues, limit) {
142
182
  }
143
183
  return selected.sort((a, b) => b.score - a.score);
144
184
  }
185
+ /**
186
+ * Body truncation limit based on reactions — high-reaction issues deserve more space
187
+ */
145
188
  function bodyLimit(reactions) {
146
189
  if (reactions >= 10) return 2e3;
147
190
  if (reactions >= 5) return 1500;
148
191
  return 800;
149
192
  }
193
+ /**
194
+ * Smart body truncation — preserves code blocks and error messages.
195
+ * Instead of slicing at a char limit, finds a safe break point.
196
+ */
150
197
  function truncateBody$1(body, limit) {
151
198
  if (body.length <= limit) return body;
152
199
  const codeBlockRe = /```[\s\S]*?```/g;
@@ -166,6 +213,9 @@ function truncateBody$1(body, limit) {
166
213
  if (lastParagraph > lastSafeEnd * .6) return `${slice.slice(0, lastParagraph)}\n\n...`;
167
214
  return `${slice}...`;
168
215
  }
216
+ /**
217
+ * Fetch issues for a state using GitHub Search API sorted by reactions
218
+ */
169
219
  function fetchIssuesByState(owner, repo, state, count, releasedAt, fromDate) {
170
220
  const fetchCount = Math.min(count * 3, 100);
171
221
  let datePart = "";
@@ -210,7 +260,14 @@ function oneYearAgo() {
210
260
  d.setFullYear(d.getFullYear() - 1);
211
261
  return isoDate(d.toISOString());
212
262
  }
263
+ /** Noise patterns in comments — filter these out */
213
264
  const COMMENT_NOISE_RE$1 = /^(?:\+1|👍|same here|any update|bump|following|is there any progress|when will this|me too|i have the same|same issue)[\s!?.]*$/i;
265
+ /**
266
+ * Batch-fetch top comments for issues via GraphQL.
267
+ * Enriches the top N highest-score issues with their best comments.
268
+ * Prioritizes: comments with code blocks, from maintainers, with high reactions.
269
+ * Filters out "+1", "any updates?", "same here" noise.
270
+ */
214
271
  function enrichWithComments(owner, repo, issues, topN = 15) {
215
272
  const worth = issues.filter((i) => i.comments > 0 && (i.type === "bug" || i.type === "question" || i.reactions >= 3)).sort((a, b) => b.score - a.score).slice(0, topN);
216
273
  if (worth.length === 0) return;
@@ -258,6 +315,10 @@ function enrichWithComments(owner, repo, issues, topN = 15) {
258
315
  }
259
316
  } catch {}
260
317
  }
318
+ /**
319
+ * Try to detect which version fixed a closed issue from maintainer comments.
320
+ * Looks for version patterns in maintainer/collaborator comments.
321
+ */
261
322
  function detectResolvedVersion(comments) {
262
323
  const maintainerComments = comments.filter((c) => c.isMaintainer);
263
324
  for (const c of maintainerComments.reverse()) {
@@ -269,6 +330,11 @@ function detectResolvedVersion(comments) {
269
330
  }
270
331
  }
271
332
  }
333
+ /**
334
+ * Fetch issues from a GitHub repo with freshness-weighted scoring and type quotas.
335
+ * Returns a balanced mix: bugs > questions > docs > other > features.
336
+ * Filters noise, non-technical content, and enriches with quality comments.
337
+ */
272
338
  async function fetchGitHubIssues(owner, repo, limit = 30, releasedAt, fromDate) {
273
339
  if (!isGhAvailable()) return [];
274
340
  const openCount = Math.ceil(limit * .75);
@@ -283,6 +349,9 @@ async function fetchGitHubIssues(owner, repo, limit = 30, releasedAt, fromDate)
283
349
  return [];
284
350
  }
285
351
  }
352
+ /**
353
+ * Format a single issue as markdown with YAML frontmatter
354
+ */
286
355
  function formatIssueAsMarkdown(issue) {
287
356
  const limit = bodyLimit(issue.reactions);
288
357
  const fmFields = {
@@ -317,6 +386,10 @@ function formatIssueAsMarkdown(issue) {
317
386
  }
318
387
  return lines.join("\n");
319
388
  }
389
+ /**
390
+ * Generate a summary index of all issues for quick LLM scanning.
391
+ * Groups by type so the LLM can quickly find bugs vs questions.
392
+ */
320
393
  function generateIssueIndex(issues) {
321
394
  const byType = /* @__PURE__ */ new Map();
322
395
  for (const issue of issues) mapInsert(byType, issue.type, () => []).push(issue);
@@ -361,20 +434,32 @@ function generateIssueIndex(issues) {
361
434
  }
362
435
  return sections.join("\n");
363
436
  }
437
+ /**
438
+ * Shared utilities for doc resolution
439
+ */
364
440
  const $fetch = ofetch.create({
365
441
  retry: 3,
366
442
  retryDelay: 500,
367
443
  timeout: 15e3,
368
444
  headers: { "User-Agent": "skilld/1.0" }
369
445
  });
446
+ /**
447
+ * Fetch text content from URL
448
+ */
370
449
  async function fetchText(url) {
371
450
  return $fetch(url, { responseType: "text" }).catch(() => null);
372
451
  }
452
+ /**
453
+ * Verify URL exists and is not HTML (likely 404 page)
454
+ */
373
455
  async function verifyUrl(url) {
374
456
  const res = await $fetch.raw(url, { method: "HEAD" }).catch(() => null);
375
457
  if (!res) return false;
376
458
  return !(res.headers.get("content-type") || "").includes("text/html");
377
459
  }
460
+ /**
461
+ * Check if URL points to a social media or package registry site (not real docs)
462
+ */
378
463
  const USELESS_HOSTS = new Set([
379
464
  "twitter.com",
380
465
  "x.com",
@@ -394,6 +479,9 @@ function isUselessDocsUrl(url) {
394
479
  return false;
395
480
  }
396
481
  }
482
+ /**
483
+ * Check if URL is a GitHub repo URL (not a docs site)
484
+ */
397
485
  function isGitHubRepoUrl(url) {
398
486
  try {
399
487
  const parsed = new URL(url);
@@ -402,6 +490,9 @@ function isGitHubRepoUrl(url) {
402
490
  return false;
403
491
  }
404
492
  }
493
+ /**
494
+ * Parse owner/repo from GitHub URL
495
+ */
405
496
  function parseGitHubUrl(url) {
406
497
  const match = url.match(/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?(?:[/#]|$)/);
407
498
  if (!match) return null;
@@ -410,9 +501,16 @@ function parseGitHubUrl(url) {
410
501
  repo: match[2]
411
502
  };
412
503
  }
504
+ /**
505
+ * Normalize git repo URL to https
506
+ */
413
507
  function normalizeRepoUrl(url) {
414
508
  return url.replace(/^git\+/, "").replace(/#.*$/, "").replace(/\.git$/, "").replace(/^git:\/\//, "https://").replace(/^ssh:\/\/git@github\.com/, "https://github.com").replace(/^git@github\.com:/, "https://github.com/");
415
509
  }
510
+ /**
511
+ * Parse package spec with optional dist-tag or version: "vue@beta" → { name: "vue", tag: "beta" }
512
+ * Handles scoped packages: "@vue/reactivity@beta" → { name: "@vue/reactivity", tag: "beta" }
513
+ */
416
514
  function parsePackageSpec(spec) {
417
515
  if (spec.startsWith("@")) {
418
516
  const slashIdx = spec.indexOf("/");
@@ -432,6 +530,9 @@ function parsePackageSpec(spec) {
432
530
  };
433
531
  return { name: spec };
434
532
  }
533
+ /**
534
+ * Extract branch hint from URL fragment (e.g. "git+https://...#main" → "main")
535
+ */
435
536
  function extractBranchHint(url) {
436
537
  const hash = url.indexOf("#");
437
538
  if (hash === -1) return void 0;
@@ -439,6 +540,9 @@ function extractBranchHint(url) {
439
540
  if (!fragment || fragment === "readme") return void 0;
440
541
  return fragment;
441
542
  }
543
+ /**
544
+ * GitHub release notes fetching via gh CLI (preferred) with ungh.cc fallback
545
+ */
442
546
  function parseSemver(version) {
443
547
  const clean = version.replace(/^v/, "");
444
548
  const match = clean.match(/^(\d+)(?:\.(\d+))?(?:\.(\d+))?/);
@@ -450,6 +554,13 @@ function parseSemver(version) {
450
554
  raw: clean
451
555
  };
452
556
  }
557
+ /**
558
+ * Extract version from a release tag, handling monorepo formats:
559
+ * - `pkg@1.2.3` → `1.2.3`
560
+ * - `pkg-v1.2.3` → `1.2.3`
561
+ * - `v1.2.3` → `1.2.3`
562
+ * - `1.2.3` → `1.2.3`
563
+ */
453
564
  function extractVersion(tag, packageName) {
454
565
  if (packageName) {
455
566
  const atMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}@(.+)$`));
@@ -462,9 +573,15 @@ function extractVersion(tag, packageName) {
462
573
  function escapeRegex(str) {
463
574
  return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
464
575
  }
576
+ /**
577
+ * Check if a release tag belongs to a specific package
578
+ */
465
579
  function tagMatchesPackage(tag, packageName) {
466
580
  return tag.startsWith(`${packageName}@`) || tag.startsWith(`${packageName}-v`) || tag.startsWith(`${packageName}-`);
467
581
  }
582
+ /**
583
+ * Check if a version string contains a prerelease suffix (e.g. 6.0.0-beta, 1.2.3-rc.1)
584
+ */
468
585
  function isPrerelease(version) {
469
586
  return /^\d+\.\d+\.\d+-.+/.test(version.replace(/^v/, ""));
470
587
  }
@@ -473,6 +590,9 @@ function compareSemver(a, b) {
473
590
  if (a.minor !== b.minor) return a.minor - b.minor;
474
591
  return a.patch - b.patch;
475
592
  }
593
+ /**
594
+ * Fetch releases via gh CLI (fast, authenticated, paginated)
595
+ */
476
596
  function fetchReleasesViaGh(owner, repo) {
477
597
  try {
478
598
  const { stdout: ndjson } = spawnSync("gh", [
@@ -496,9 +616,15 @@ function fetchReleasesViaGh(owner, repo) {
496
616
  return [];
497
617
  }
498
618
  }
619
+ /**
620
+ * Fetch all releases from a GitHub repo via ungh.cc (fallback)
621
+ */
499
622
  async function fetchReleasesViaUngh(owner, repo) {
500
623
  return (await $fetch(`https://ungh.cc/repos/${owner}/${repo}/releases`, { signal: AbortSignal.timeout(15e3) }).catch(() => null))?.releases ?? [];
501
624
  }
625
+ /**
626
+ * Fetch all releases — gh CLI first, ungh.cc fallback
627
+ */
502
628
  async function fetchAllReleases(owner, repo) {
503
629
  if (isGhAvailable()) {
504
630
  const releases = fetchReleasesViaGh(owner, repo);
@@ -506,6 +632,12 @@ async function fetchAllReleases(owner, repo) {
506
632
  }
507
633
  return fetchReleasesViaUngh(owner, repo);
508
634
  }
635
+ /**
636
+ * Select last 20 stable releases for a package, sorted newest first.
637
+ * For monorepos, filters to package-specific tags (pkg@version).
638
+ * Falls back to generic tags (v1.2.3) only if no package-specific found.
639
+ * If installedVersion is provided, filters out releases newer than it.
640
+ */
509
641
  function selectReleases(releases, packageName, installedVersion, fromDate) {
510
642
  const hasMonorepoTags = packageName && releases.some((r) => tagMatchesPackage(r.tag, packageName));
511
643
  const installedSv = installedVersion ? parseSemver(installedVersion) : null;
@@ -535,6 +667,9 @@ function selectReleases(releases, packageName, installedVersion, fromDate) {
535
667
  });
536
668
  return fromDate ? sorted : sorted.slice(0, 20);
537
669
  }
670
+ /**
671
+ * Format a release as markdown with YAML frontmatter
672
+ */
538
673
  function formatRelease(release, packageName) {
539
674
  const date = isoDate(release.publishedAt || release.createdAt);
540
675
  const version = extractVersion(release.tag, packageName) || release.tag;
@@ -548,6 +683,10 @@ function formatRelease(release, packageName) {
548
683
  fm.push("---");
549
684
  return `${fm.join("\n")}\n\n# ${release.name || release.tag}\n\n${release.markdown}`;
550
685
  }
686
+ /**
687
+ * Generate a unified summary index of all releases for quick LLM scanning.
688
+ * Includes GitHub releases, blog release posts, and CHANGELOG link.
689
+ */
551
690
  function generateReleaseIndex(releasesOrOpts, packageName) {
552
691
  const opts = Array.isArray(releasesOrOpts) ? {
553
692
  releases: releasesOrOpts,
@@ -589,10 +728,18 @@ function generateReleaseIndex(releasesOrOpts, packageName) {
589
728
  }
590
729
  return lines.join("\n");
591
730
  }
731
+ /**
732
+ * Check if a single release is a stub redirecting to CHANGELOG.md.
733
+ * Short body (<500 chars) that mentions CHANGELOG indicates no real content.
734
+ */
592
735
  function isStubRelease(release) {
593
736
  const body = (release.markdown || "").trim();
594
737
  return body.length < 500 && /changelog\.md/i.test(body);
595
738
  }
739
+ /**
740
+ * Fetch CHANGELOG.md from a GitHub repo at a specific ref as fallback.
741
+ * For monorepos, also checks packages/{shortName}/CHANGELOG.md.
742
+ */
596
743
  async function fetchChangelog(owner, repo, ref, packageName) {
597
744
  const paths = [];
598
745
  if (packageName) {
@@ -611,6 +758,13 @@ async function fetchChangelog(owner, repo, ref, packageName) {
611
758
  }
612
759
  return null;
613
760
  }
761
+ /**
762
+ * Fetch release notes for a package. Returns CachedDoc[] with releases/{tag}.md files.
763
+ *
764
+ * Strategy:
765
+ * 1. Fetch GitHub releases, filter to package-specific tags for monorepos
766
+ * 2. If no releases found, try CHANGELOG.md as fallback
767
+ */
614
768
  async function fetchReleaseNotes(owner, repo, installedVersion, gitRef, packageName, fromDate, changelogRef) {
615
769
  const selected = selectReleases(await fetchAllReleases(owner, repo), packageName, installedVersion, fromDate);
616
770
  if (selected.length > 0) {
@@ -634,6 +788,9 @@ async function fetchReleaseNotes(owner, repo, installedVersion, gitRef, packageN
634
788
  content: changelog
635
789
  }];
636
790
  }
791
+ /**
792
+ * Format a blog release as markdown with YAML frontmatter
793
+ */
637
794
  function formatBlogRelease(release) {
638
795
  return `${[
639
796
  "---",
@@ -645,6 +802,9 @@ function formatBlogRelease(release) {
645
802
  "---"
646
803
  ].join("\n")}\n\n# ${release.title}\n\n${release.markdown}`;
647
804
  }
805
+ /**
806
+ * Fetch and parse a single blog post using preset metadata for version/date
807
+ */
648
808
  async function fetchBlogPost(entry) {
649
809
  try {
650
810
  const html = await $fetch(entry.url, {
@@ -672,6 +832,11 @@ async function fetchBlogPost(entry) {
672
832
  return null;
673
833
  }
674
834
  }
835
+ /**
836
+ * Filter blog releases by installed version
837
+ * Only includes releases where version <= installedVersion
838
+ * Returns all releases if version parsing fails (fail-safe)
839
+ */
675
840
  function filterBlogsByVersion(entries, installedVersion) {
676
841
  const installedSv = parseSemver(installedVersion);
677
842
  if (!installedSv) return entries;
@@ -681,6 +846,11 @@ function filterBlogsByVersion(entries, installedVersion) {
681
846
  return compareSemver(entrySv, installedSv) <= 0;
682
847
  });
683
848
  }
849
+ /**
850
+ * Fetch blog release notes from package presets
851
+ * Filters to only releases matching or older than the installed version
852
+ * Returns CachedDoc[] with releases/blog-{version}.md files
853
+ */
684
854
  async function fetchBlogReleases(packageName, installedVersion) {
685
855
  const preset = getBlogPreset(packageName);
686
856
  if (!preset) return [];
@@ -708,6 +878,17 @@ async function fetchBlogReleases(packageName, installedVersion) {
708
878
  content: formatBlogRelease(r)
709
879
  }));
710
880
  }
881
+ /**
882
+ * Website crawl doc source — fetches docs by crawling a URL pattern
883
+ */
884
+ /**
885
+ * Crawl a URL pattern and return docs as cached doc format.
886
+ * Uses HTTP crawler (no browser needed) with sitemap discovery + glob filtering.
887
+ *
888
+ * @param url - URL with optional glob pattern (e.g. 'https://example.com/docs/**')
889
+ * @param onProgress - Optional progress callback
890
+ * @param maxPages - Max pages to crawl (default 200)
891
+ */
711
892
  async function fetchCrawledDocs(url, onProgress, maxPages = 200) {
712
893
  const outputDir = join(tmpdir(), "skilld-crawl", Date.now().toString());
713
894
  onProgress?.(`Crawling ${url}`);
@@ -745,9 +926,16 @@ async function fetchCrawledDocs(url, onProgress, maxPages = 200) {
745
926
  onProgress?.(`Crawled ${docs.length} pages`);
746
927
  return docs;
747
928
  }
929
+ /** Append glob pattern to a docs URL for crawling */
748
930
  function toCrawlPattern(docsUrl) {
749
931
  return `${docsUrl.replace(/\/+$/, "")}/**`;
750
932
  }
933
+ /**
934
+ * GitHub discussions fetching via gh CLI GraphQL
935
+ * Prioritizes Q&A and Help categories, includes accepted answers
936
+ * Comment quality filtering, smart truncation, noise removal
937
+ */
938
+ /** Categories most useful for skill generation (in priority order) */
751
939
  const HIGH_VALUE_CATEGORIES = new Set([
752
940
  "q&a",
753
941
  "help",
@@ -759,10 +947,16 @@ const LOW_VALUE_CATEGORIES = new Set([
759
947
  "ideas",
760
948
  "polls"
761
949
  ]);
950
+ /** Noise patterns in comments — filter these out */
762
951
  const COMMENT_NOISE_RE = /^(?:\+1|👍|same here|any update|bump|following|is there any progress|when will this|me too|i have the same|same issue|thanks|thank you)[\s!?.]*$/i;
952
+ /** Check if body contains a code block */
763
953
  function hasCodeBlock(text) {
764
954
  return /```[\s\S]*?```/.test(text) || /`[^`]+`/.test(text);
765
955
  }
956
+ /**
957
+ * Smart body truncation — preserves code blocks and error messages.
958
+ * Instead of slicing at a char limit, finds a safe break point.
959
+ */
766
960
  function truncateBody(body, limit) {
767
961
  if (body.length <= limit) return body;
768
962
  const codeBlockRe = /```[\s\S]*?```/g;
@@ -782,11 +976,21 @@ function truncateBody(body, limit) {
782
976
  if (lastParagraph > lastSafeEnd * .6) return `${slice.slice(0, lastParagraph)}\n\n...`;
783
977
  return `${slice}...`;
784
978
  }
979
+ /** Off-topic or spam title patterns — instant reject */
785
980
  const TITLE_NOISE_RE = /looking .*(?:developer|engineer|freelanc)|hiring|job post|guide me to (?:complete|finish|build)|help me (?:complete|finish|build)|seeking .* tutorial|recommend.* course/i;
981
+ /** Minimum score for a discussion to be included */
786
982
  const MIN_DISCUSSION_SCORE = 3;
983
+ /**
984
+ * Score a comment for quality. Higher = more useful for skill generation.
985
+ * Maintainers 3x, code blocks 2x, reactions linear.
986
+ */
787
987
  function scoreComment(c) {
788
988
  return (c.isMaintainer ? 3 : 1) * (hasCodeBlock(c.body) ? 2 : 1) * (1 + c.reactions);
789
989
  }
990
+ /**
991
+ * Score a discussion for overall quality. Used for filtering and sorting.
992
+ * Returns -1 for instant-reject (spam/off-topic).
993
+ */
790
994
  function scoreDiscussion(d) {
791
995
  if (TITLE_NOISE_RE.test(d.title)) return -1;
792
996
  let score = 0;
@@ -805,6 +1009,11 @@ function scoreDiscussion(d) {
805
1009
  if (d.topComments.some((c) => c.reactions > 0)) score += 1;
806
1010
  return score;
807
1011
  }
1012
+ /**
1013
+ * Fetch discussions from a GitHub repo using gh CLI GraphQL.
1014
+ * Prioritizes Q&A and Help categories. Includes accepted answer body for answered discussions.
1015
+ * Fetches extra comments and scores them for quality.
1016
+ */
808
1017
  async function fetchGitHubDiscussions(owner, repo, limit = 20, releasedAt, fromDate) {
809
1018
  if (!isGhAvailable()) return [];
810
1019
  if (!fromDate && releasedAt) {
@@ -887,6 +1096,9 @@ async function fetchGitHubDiscussions(owner, repo, limit = 20, releasedAt, fromD
887
1096
  return [];
888
1097
  }
889
1098
  }
1099
+ /**
1100
+ * Format a single discussion as markdown with YAML frontmatter
1101
+ */
890
1102
  function formatDiscussionAsMarkdown(d) {
891
1103
  const fm = buildFrontmatter({
892
1104
  number: d.number,
@@ -916,6 +1128,10 @@ function formatDiscussionAsMarkdown(d) {
916
1128
  }
917
1129
  return lines.join("\n");
918
1130
  }
1131
+ /**
1132
+ * Generate a summary index of all discussions for quick LLM scanning.
1133
+ * Groups by category so the LLM can quickly find Q&A vs general discussions.
1134
+ */
919
1135
  function generateDiscussionIndex(discussions) {
920
1136
  const byCategory = /* @__PURE__ */ new Map();
921
1137
  for (const d of discussions) mapInsert(byCategory, d.category || "Uncategorized", () => []).push(d);
@@ -947,6 +1163,14 @@ function generateDiscussionIndex(discussions) {
947
1163
  }
948
1164
  return sections.join("\n");
949
1165
  }
1166
+ /**
1167
+ * Docs index generation — creates _INDEX.md for docs directory
1168
+ */
1169
+ /**
1170
+ * Generate a _INDEX.md for a docs/ directory.
1171
+ * Input: array of cached docs with paths like `docs/api/reactivity.md`.
1172
+ * Output: markdown index grouped by directory with title + description per page.
1173
+ */
950
1174
  function generateDocsIndex(docs) {
951
1175
  const docFiles = docs.filter((d) => d.path.startsWith("docs/") && d.path.endsWith(".md") && !d.path.endsWith("_INDEX.md")).sort((a, b) => a.path.localeCompare(b.path));
952
1176
  if (docFiles.length === 0) return "";
@@ -991,6 +1215,10 @@ function generateDocsIndex(docs) {
991
1215
  }
992
1216
  return sections.join("\n");
993
1217
  }
1218
+ /**
1219
+ * Globs .d.ts type definition files from a package for search indexing.
1220
+ * Only types — source code is too verbose.
1221
+ */
994
1222
  const SKIP_DIRS = [
995
1223
  "node_modules",
996
1224
  "_vendor",
@@ -1018,12 +1246,16 @@ const SKIP_PATTERNS = [
1018
1246
  "README*"
1019
1247
  ];
1020
1248
  const MAX_FILE_SIZE = 500 * 1024;
1249
+ /**
1250
+ * Glob .d.ts type definition files from a package directory, skipping junk.
1251
+ */
1021
1252
  async function resolveEntryFiles(packageDir) {
1022
1253
  if (!existsSync(join(packageDir, "package.json"))) return [];
1023
- const files = await globby(["**/*.d.{ts,mts,cts}"], {
1254
+ const files = await glob(["**/*.d.{ts,mts,cts}"], {
1024
1255
  cwd: packageDir,
1025
1256
  ignore: [...SKIP_DIRS.map((d) => `**/${d}/**`), ...SKIP_PATTERNS],
1026
- absolute: false
1257
+ absolute: false,
1258
+ expandDirectories: false
1027
1259
  });
1028
1260
  const entries = [];
1029
1261
  for (const file of files) {
@@ -1043,6 +1275,16 @@ async function resolveEntryFiles(packageDir) {
1043
1275
  }
1044
1276
  return entries;
1045
1277
  }
1278
+ /**
1279
+ * Git repo skill source — parse inputs + fetch pre-authored skills from repos
1280
+ *
1281
+ * Supports GitHub shorthand (owner/repo), full URLs, SSH, GitLab, and local paths.
1282
+ * Skills are pre-authored SKILL.md files — no doc resolution or LLM generation needed.
1283
+ */
1284
+ /**
1285
+ * Detect whether an input string is a git skill source.
1286
+ * Returns null for npm package names (including scoped @scope/pkg).
1287
+ */
1046
1288
  function parseGitSkillInput(input) {
1047
1289
  const trimmed = input.trim();
1048
1290
  if (trimmed.startsWith("@")) return null;
@@ -1104,6 +1346,9 @@ function parseGitUrl(url) {
1104
1346
  return null;
1105
1347
  }
1106
1348
  }
1349
+ /**
1350
+ * Parse name and description from SKILL.md frontmatter.
1351
+ */
1107
1352
  function parseSkillFrontmatterName(content) {
1108
1353
  const fm = parseFrontmatter(content);
1109
1354
  return {
@@ -1111,6 +1356,7 @@ function parseSkillFrontmatterName(content) {
1111
1356
  description: fm.description
1112
1357
  };
1113
1358
  }
1359
+ /** Recursively collect all files in a directory, returning relative paths */
1114
1360
  function collectFiles(dir, prefix = "") {
1115
1361
  const files = [];
1116
1362
  if (!existsSync(dir)) return files;
@@ -1125,6 +1371,9 @@ function collectFiles(dir, prefix = "") {
1125
1371
  }
1126
1372
  return files;
1127
1373
  }
1374
+ /**
1375
+ * Fetch skills from a git source. Returns list of discovered skills.
1376
+ */
1128
1377
  async function fetchGitSkills(source, onProgress) {
1129
1378
  if (source.type === "local") return fetchLocalSkills(source);
1130
1379
  if (source.type === "github") return fetchGitHubSkills(source, onProgress);
@@ -1272,11 +1521,17 @@ async function fetchGitLabSkills(source, onProgress) {
1272
1521
  });
1273
1522
  }
1274
1523
  }
1524
+ /**
1525
+ * Check for llms.txt at a docs URL, returns the llms.txt URL if found
1526
+ */
1275
1527
  async function fetchLlmsUrl(docsUrl) {
1276
1528
  const llmsUrl = `${new URL(docsUrl).origin}/llms.txt`;
1277
1529
  if (await verifyUrl(llmsUrl)) return llmsUrl;
1278
1530
  return null;
1279
1531
  }
1532
+ /**
1533
+ * Fetch and parse llms.txt content
1534
+ */
1280
1535
  async function fetchLlmsTxt(url) {
1281
1536
  const content = await fetchText(url);
1282
1537
  if (!content || content.length < 50) return null;
@@ -1285,9 +1540,16 @@ async function fetchLlmsTxt(url) {
1285
1540
  links: parseMarkdownLinks(content)
1286
1541
  };
1287
1542
  }
1543
+ /**
1544
+ * Parse markdown links from llms.txt to get .md file paths
1545
+ */
1288
1546
  function parseMarkdownLinks(content) {
1289
1547
  return extractLinks(content).filter((l) => l.url.endsWith(".md"));
1290
1548
  }
1549
+ /**
1550
+ * Download all .md files referenced in llms.txt
1551
+ */
1552
+ /** Reject non-https URLs and private/link-local IPs */
1291
1553
  function isSafeUrl(url) {
1292
1554
  try {
1293
1555
  const parsed = new URL(url);
@@ -1318,6 +1580,10 @@ async function downloadLlmsDocs(llmsContent, baseUrl, onProgress) {
1318
1580
  return null;
1319
1581
  })))).filter((d) => d !== null);
1320
1582
  }
1583
+ /**
1584
+ * Normalize llms.txt links to relative paths for local access
1585
+ * Handles: absolute URLs, root-relative paths, and relative paths
1586
+ */
1321
1587
  function normalizeLlmsLinks(content, baseUrl) {
1322
1588
  let normalized = content;
1323
1589
  if (baseUrl) {
@@ -1327,6 +1593,10 @@ function normalizeLlmsLinks(content, baseUrl) {
1327
1593
  normalized = normalized.replace(/\]\(\/([^)]+\.md)\)/g, "](./docs/$1)");
1328
1594
  return normalized;
1329
1595
  }
1596
+ /**
1597
+ * Extract sections from llms-full.txt by URL patterns
1598
+ * Format: ---\nurl: /path.md\n---\n<content>\n\n---\nurl: ...
1599
+ */
1330
1600
  function extractSections(content, patterns) {
1331
1601
  const sections = [];
1332
1602
  const parts = content.split(/\n---\n/);
@@ -1342,11 +1612,20 @@ function extractSections(content, patterns) {
1342
1612
  if (sections.length === 0) return null;
1343
1613
  return sections.join("\n\n---\n\n");
1344
1614
  }
1615
+ /** Minimum git-doc file count to prefer over llms.txt */
1345
1616
  const MIN_GIT_DOCS = 5;
1617
+ /** True when git-docs exist but are too few to be useful (< MIN_GIT_DOCS) */
1346
1618
  const isShallowGitDocs = (n) => n > 0 && n < MIN_GIT_DOCS;
1619
+ /**
1620
+ * List files at a git ref using ungh (no rate limits)
1621
+ */
1347
1622
  async function listFilesAtRef(owner, repo, ref) {
1348
1623
  return (await $fetch(`https://ungh.cc/repos/${owner}/${repo}/files/${ref}`).catch(() => null))?.files?.map((f) => f.path) ?? [];
1349
1624
  }
1625
+ /**
1626
+ * Find git tag for a version by checking if ungh can list files at that ref.
1627
+ * Tries v{version}, {version}, and optionally {packageName}@{version} (changeset convention).
1628
+ */
1350
1629
  async function findGitTag(owner, repo, version, packageName, branchHint) {
1351
1630
  const candidates = [`v${version}`, version];
1352
1631
  if (packageName) candidates.push(`${packageName}@${version}`);
@@ -1378,11 +1657,18 @@ async function findGitTag(owner, repo, version, packageName, branchHint) {
1378
1657
  }
1379
1658
  return null;
1380
1659
  }
1660
+ /**
1661
+ * Find the latest release tag matching `{packageName}@*` via ungh releases API.
1662
+ * Handles monorepos where npm version doesn't match git tag version.
1663
+ */
1381
1664
  async function findLatestReleaseTag(owner, repo, packageName) {
1382
1665
  const data = await $fetch(`https://ungh.cc/repos/${owner}/${repo}/releases`).catch(() => null);
1383
1666
  const prefix = `${packageName}@`;
1384
1667
  return data?.releases?.find((r) => r.tag.startsWith(prefix))?.tag ?? null;
1385
1668
  }
1669
+ /**
1670
+ * Filter file paths by prefix and md/mdx extension
1671
+ */
1386
1672
  function filterDocFiles(files, pathPrefix) {
1387
1673
  return files.filter((f) => f.startsWith(pathPrefix) && /\.(?:md|mdx)$/.test(f));
1388
1674
  }
@@ -1396,6 +1682,12 @@ const FRAMEWORK_NAMES = new Set([
1396
1682
  "lit",
1397
1683
  "qwik"
1398
1684
  ]);
1685
+ /**
1686
+ * Filter out docs for other frameworks when the package targets a specific one.
1687
+ * e.g. @tanstack/vue-query → keep vue + shared docs, exclude react/solid/angular
1688
+ * Uses word-boundary matching to catch all path conventions:
1689
+ * framework/react/, 0.react/, api/ai-react.md, react-native.mdx, etc.
1690
+ */
1399
1691
  function filterFrameworkDocs(files, packageName) {
1400
1692
  if (!packageName) return files;
1401
1693
  const shortName = packageName.replace(/^@.*\//, "");
@@ -1405,12 +1697,14 @@ function filterFrameworkDocs(files, packageName) {
1405
1697
  const excludePattern = new RegExp(`\\b(?:${otherFrameworks.join("|")})\\b`);
1406
1698
  return files.filter((f) => !excludePattern.test(f));
1407
1699
  }
1700
+ /** Known noise paths to exclude from doc discovery */
1408
1701
  const NOISE_PATTERNS = [
1409
1702
  /^\.changeset\//,
1410
1703
  /CHANGELOG\.md$/i,
1411
1704
  /CONTRIBUTING\.md$/i,
1412
1705
  /^\.github\//
1413
1706
  ];
1707
+ /** Directories to exclude from "best directory" heuristic */
1414
1708
  const EXCLUDE_DIRS = new Set([
1415
1709
  "test",
1416
1710
  "tests",
@@ -1429,6 +1723,7 @@ const EXCLUDE_DIRS = new Set([
1429
1723
  "mocks",
1430
1724
  "__mocks__"
1431
1725
  ]);
1726
+ /** Directory names that suggest documentation */
1432
1727
  const DOC_DIR_BONUS = new Set([
1433
1728
  "docs",
1434
1729
  "documentation",
@@ -1441,19 +1736,38 @@ const DOC_DIR_BONUS = new Set([
1441
1736
  "manual",
1442
1737
  "api"
1443
1738
  ]);
1739
+ /**
1740
+ * Check if a path contains any excluded directory
1741
+ */
1444
1742
  function hasExcludedDir(path) {
1445
1743
  return path.split("/").some((p) => EXCLUDE_DIRS.has(p.toLowerCase()));
1446
1744
  }
1745
+ /**
1746
+ * Get the depth of a path (number of directory levels)
1747
+ */
1447
1748
  function getPathDepth(path) {
1448
1749
  return path.split("/").filter(Boolean).length;
1449
1750
  }
1751
+ /**
1752
+ * Check if path contains a doc-related directory name
1753
+ */
1450
1754
  function hasDocDirBonus(path) {
1451
1755
  return path.split("/").some((p) => DOC_DIR_BONUS.has(p.toLowerCase()));
1452
1756
  }
1757
+ /**
1758
+ * Score a directory for doc likelihood.
1759
+ * Higher = better. Formula: count * nameBonus / depth
1760
+ */
1453
1761
  function scoreDocDir(dir, fileCount) {
1454
1762
  const depth = getPathDepth(dir) || 1;
1455
1763
  return fileCount * (hasDocDirBonus(dir) ? 1.5 : 1) / depth;
1456
1764
  }
1765
+ /**
1766
+ * Discover doc files in non-standard locations.
1767
+ * First tries to scope to sub-package dir in monorepos.
1768
+ * Then looks for clusters of md/mdx files in paths containing /docs/.
1769
+ * Falls back to finding the directory with the most markdown files (≥5).
1770
+ */
1457
1771
  function discoverDocFiles(allFiles, packageName) {
1458
1772
  const mdFiles = allFiles.filter((f) => /\.(?:md|mdx)$/.test(f)).filter((f) => !NOISE_PATTERNS.some((p) => p.test(f))).filter((f) => f.includes("/"));
1459
1773
  if (packageName?.includes("/")) {
@@ -1502,9 +1816,16 @@ function discoverDocFiles(allFiles, packageName) {
1502
1816
  prefix: best.dir
1503
1817
  };
1504
1818
  }
1819
+ /**
1820
+ * List markdown files in a folder at a specific git ref
1821
+ */
1505
1822
  async function listDocsAtRef(owner, repo, ref, pathPrefix = "docs/") {
1506
1823
  return filterDocFiles(await listFilesAtRef(owner, repo, ref), pathPrefix);
1507
1824
  }
1825
+ /**
1826
+ * Fetch versioned docs from GitHub repo's docs/ folder.
1827
+ * Pass packageName to check doc overrides (e.g. vue -> vuejs/docs).
1828
+ */
1508
1829
  async function fetchGitDocs(owner, repo, version, packageName, repoUrl) {
1509
1830
  const override = packageName ? getDocOverride(packageName) : void 0;
1510
1831
  if (override) {
@@ -1544,9 +1865,18 @@ async function fetchGitDocs(owner, repo, version, packageName, repoUrl) {
1544
1865
  fallback: tag.fallback
1545
1866
  };
1546
1867
  }
1868
+ /**
1869
+ * Strip file extension (.md, .mdx) and leading slash from a path
1870
+ */
1547
1871
  function normalizePath(p) {
1548
1872
  return p.replace(/^\//, "").replace(/\.(?:md|mdx)$/, "");
1549
1873
  }
1874
+ /**
1875
+ * Validate that discovered git docs are relevant by cross-referencing llms.txt links
1876
+ * against the repo file tree. Uses extensionless suffix matching to handle monorepo nesting.
1877
+ *
1878
+ * Returns { isValid, matchRatio } where isValid = matchRatio >= 0.3
1879
+ */
1550
1880
  function validateGitDocsWithLlms(llmsLinks, repoFiles) {
1551
1881
  if (llmsLinks.length === 0) return {
1552
1882
  isValid: true,
@@ -1572,6 +1902,10 @@ function validateGitDocsWithLlms(llmsLinks, repoFiles) {
1572
1902
  matchRatio
1573
1903
  };
1574
1904
  }
1905
+ /**
1906
+ * Verify a GitHub repo is the source for an npm package by checking package.json name field.
1907
+ * Checks root first, then common monorepo paths (packages/{shortName}, packages/{name}).
1908
+ */
1575
1909
  async function verifyNpmRepo(owner, repo, packageName) {
1576
1910
  const base = `https://raw.githubusercontent.com/${owner}/${repo}/HEAD`;
1577
1911
  const paths = [
@@ -1630,6 +1964,10 @@ async function searchGitHubRepo(packageName) {
1630
1964
  }
1631
1965
  return null;
1632
1966
  }
1967
+ /**
1968
+ * Fetch GitHub repo metadata to get website URL.
1969
+ * Pass packageName to check doc overrides first (avoids API call).
1970
+ */
1633
1971
  async function fetchGitHubRepoMeta(owner, repo, packageName) {
1634
1972
  const override = packageName ? getDocOverride(packageName) : void 0;
1635
1973
  if (override?.homepage) return { homepage: override.homepage };
@@ -1650,6 +1988,9 @@ async function fetchGitHubRepoMeta(owner, repo, packageName) {
1650
1988
  const data = await $fetch(`https://api.github.com/repos/${owner}/${repo}`).catch(() => null);
1651
1989
  return data?.homepage ? { homepage: data.homepage } : null;
1652
1990
  }
1991
+ /**
1992
+ * Resolve README URL for a GitHub repo, returns ungh:// pseudo-URL or raw URL
1993
+ */
1653
1994
  async function fetchReadme(owner, repo, subdir, ref) {
1654
1995
  const unghUrl = subdir ? `https://ungh.cc/repos/${owner}/${repo}/files/${ref || "main"}/${subdir}/README.md` : `https://ungh.cc/repos/${owner}/${repo}/readme${ref ? `?ref=${ref}` : ""}`;
1655
1996
  if ((await $fetch.raw(unghUrl).catch(() => null))?.ok) return `ungh://${owner}/${repo}${subdir ? `/${subdir}` : ""}${ref ? `@${ref}` : ""}`;
@@ -1665,6 +2006,9 @@ async function fetchReadme(owner, repo, subdir, ref) {
1665
2006
  }
1666
2007
  return null;
1667
2008
  }
2009
+ /**
2010
+ * Fetch README content from ungh:// pseudo-URL, file:// URL, or regular URL
2011
+ */
1668
2012
  async function fetchReadmeContent(url) {
1669
2013
  if (url.startsWith("file://")) {
1670
2014
  const filePath = fileURLToPath(url);
@@ -1694,6 +2038,10 @@ async function fetchReadmeContent(url) {
1694
2038
  }
1695
2039
  return fetchText(url);
1696
2040
  }
2041
+ /**
2042
+ * Resolve a GitHub repo into a ResolvedPackage (no npm registry needed).
2043
+ * Fetches repo meta, latest release version, git docs, README, and llms.txt.
2044
+ */
1697
2045
  async function resolveGitHubRepo(owner, repo, onProgress) {
1698
2046
  onProgress?.("Fetching repo metadata");
1699
2047
  const repoUrl = `https://github.com/${owner}/${repo}`;
@@ -1755,6 +2103,10 @@ async function resolveGitHubRepo(owner, repo, onProgress) {
1755
2103
  llmsUrl
1756
2104
  };
1757
2105
  }
2106
+ /**
2107
+ * Search npm registry for packages matching a query.
2108
+ * Used as a fallback when direct package lookup fails.
2109
+ */
1758
2110
  async function searchNpmPackages(query, size = 5) {
1759
2111
  const data = await $fetch(`https://registry.npmjs.org/-/v1/search?text=${encodeURIComponent(query)}&size=${size}`).catch(() => null);
1760
2112
  if (!data?.objects?.length) return [];
@@ -1764,11 +2116,17 @@ async function searchNpmPackages(query, size = 5) {
1764
2116
  version: o.package.version
1765
2117
  }));
1766
2118
  }
2119
+ /**
2120
+ * Fetch package info from npm registry
2121
+ */
1767
2122
  async function fetchNpmPackage(packageName) {
1768
2123
  const data = await $fetch(`https://unpkg.com/${packageName}/package.json`).catch(() => null);
1769
2124
  if (data) return data;
1770
2125
  return $fetch(`https://registry.npmjs.org/${packageName}/latest`).catch(() => null);
1771
2126
  }
2127
+ /**
2128
+ * Fetch release date and dist-tags from npm registry
2129
+ */
1772
2130
  async function fetchNpmRegistryMeta(packageName, version) {
1773
2131
  const { name: barePackageName } = parsePackageSpec(packageName);
1774
2132
  const data = await $fetch(`https://registry.npmjs.org/${barePackageName}`).catch(() => null);
@@ -1782,6 +2140,10 @@ async function fetchNpmRegistryMeta(packageName, version) {
1782
2140
  distTags
1783
2141
  };
1784
2142
  }
2143
+ /**
2144
+ * Shared GitHub resolution cascade: git docs → repo meta (homepage) → README.
2145
+ * Used for both "repo URL found in package.json" and "repo URL found via search" paths.
2146
+ */
1785
2147
  async function resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgress, opts) {
1786
2148
  let allFiles;
1787
2149
  if (targetVersion) {
@@ -1840,9 +2202,15 @@ async function resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgres
1840
2202
  });
1841
2203
  return allFiles;
1842
2204
  }
2205
+ /**
2206
+ * Resolve documentation URL for a package (legacy - returns null on failure)
2207
+ */
1843
2208
  async function resolvePackageDocs(packageName, options = {}) {
1844
2209
  return (await resolvePackageDocsWithAttempts(packageName, options)).package;
1845
2210
  }
2211
+ /**
2212
+ * Resolve documentation URL for a package with attempt tracking
2213
+ */
1846
2214
  async function resolvePackageDocsWithAttempts(packageName, options = {}) {
1847
2215
  const attempts = [];
1848
2216
  const { onProgress } = options;
@@ -1976,6 +2344,9 @@ async function resolvePackageDocsWithAttempts(packageName, options = {}) {
1976
2344
  attempts
1977
2345
  };
1978
2346
  }
2347
+ /**
2348
+ * Parse version specifier, handling protocols like link:, workspace:, npm:, file:
2349
+ */
1979
2350
  function parseVersionSpecifier(name, version, cwd) {
1980
2351
  if (version.startsWith("link:")) {
1981
2352
  const linkedPkgPath = join(resolve(cwd, version.slice(5)), "package.json");
@@ -2013,6 +2384,10 @@ function parseVersionSpecifier(name, version, cwd) {
2013
2384
  };
2014
2385
  return null;
2015
2386
  }
2387
+ /**
2388
+ * Resolve the actual installed version of a package by finding its package.json
2389
+ * via mlly's resolvePathSync. Works regardless of package manager or version protocol.
2390
+ */
2016
2391
  function resolveInstalledVersion(name, cwd) {
2017
2392
  try {
2018
2393
  const resolved = resolvePathSync(`${name}/package.json`, { url: cwd });
@@ -2029,6 +2404,9 @@ function resolveInstalledVersion(name, cwd) {
2029
2404
  return null;
2030
2405
  }
2031
2406
  }
2407
+ /**
2408
+ * Read package.json dependencies with versions
2409
+ */
2032
2410
  async function readLocalDependencies(cwd) {
2033
2411
  const pkgPath = join(cwd, "package.json");
2034
2412
  if (!existsSync(pkgPath)) throw new Error("No package.json found in current directory");
@@ -2044,6 +2422,9 @@ async function readLocalDependencies(cwd) {
2044
2422
  }
2045
2423
  return results;
2046
2424
  }
2425
+ /**
2426
+ * Read package info from a local path (for link: deps)
2427
+ */
2047
2428
  function readLocalPackageInfo(localPath) {
2048
2429
  const pkgPath = join(localPath, "package.json");
2049
2430
  if (!existsSync(pkgPath)) return null;
@@ -2059,6 +2440,9 @@ function readLocalPackageInfo(localPath) {
2059
2440
  localPath
2060
2441
  };
2061
2442
  }
2443
+ /**
2444
+ * Resolve docs for a local package (link: dependency)
2445
+ */
2062
2446
  async function resolveLocalPackageDocs(localPath) {
2063
2447
  const info = readLocalPackageInfo(localPath);
2064
2448
  if (!info) return null;
@@ -2088,6 +2472,13 @@ async function resolveLocalPackageDocs(localPath) {
2088
2472
  if (!result.readmeUrl && !result.gitDocsUrl) return null;
2089
2473
  return result;
2090
2474
  }
2475
+ /**
2476
+ * Download and extract npm package tarball to cache directory.
2477
+ * Used when the package isn't available in node_modules.
2478
+ *
2479
+ * Extracts to: ~/.skilld/references/<pkg>@<version>/pkg/
2480
+ * Returns the extracted directory path, or null on failure.
2481
+ */
2091
2482
  async function fetchPkgDist(name, version) {
2092
2483
  const cacheDir = getCacheDir(name, version);
2093
2484
  const pkgDir = join(cacheDir, "pkg");
@@ -2140,9 +2531,15 @@ async function fetchPkgDist(name, version) {
2140
2531
  unlinkSync(tmpTarball);
2141
2532
  return pkgDir;
2142
2533
  }
2534
+ /**
2535
+ * Fetch just the latest version string from npm (lightweight)
2536
+ */
2143
2537
  async function fetchLatestVersion(packageName) {
2144
2538
  return (await $fetch(`https://unpkg.com/${packageName}/package.json`).catch(() => null))?.version || null;
2145
2539
  }
2540
+ /**
2541
+ * Get installed skill version from SKILL.md
2542
+ */
2146
2543
  function getInstalledSkillVersion(skillDir) {
2147
2544
  const skillPath = join(skillDir, "SKILL.md");
2148
2545
  if (!existsSync(skillPath)) return null;
@@ -2150,4 +2547,4 @@ function getInstalledSkillVersion(skillDir) {
2150
2547
  }
2151
2548
  export { fetchGitHubIssues as $, parseGitSkillInput as A, compareSemver as B, downloadLlmsDocs as C, normalizeLlmsLinks as D, fetchLlmsUrl as E, formatDiscussionAsMarkdown as F, $fetch as G, generateReleaseIndex as H, generateDiscussionIndex as I, isGitHubRepoUrl as J, extractBranchHint as K, fetchCrawledDocs as L, resolveEntryFiles as M, generateDocsIndex as N, parseMarkdownLinks as O, fetchGitHubDiscussions as P, verifyUrl as Q, toCrawlPattern as R, validateGitDocsWithLlms as S, fetchLlmsTxt as T, isPrerelease as U, fetchReleaseNotes as V, parseSemver as W, parseGitHubUrl as X, normalizeRepoUrl as Y, parsePackageSpec as Z, fetchReadme as _, getInstalledSkillVersion as a, isShallowGitDocs as b, readLocalPackageInfo as c, resolvePackageDocs as d, formatIssueAsMarkdown as et, resolvePackageDocsWithAttempts as f, fetchGitHubRepoMeta as g, fetchGitDocs as h, fetchPkgDist as i, parseSkillFrontmatterName as j, fetchGitSkills as k, resolveInstalledVersion as l, MIN_GIT_DOCS as m, fetchNpmPackage as n, isGhAvailable as nt, parseVersionSpecifier as o, searchNpmPackages as p, fetchText as q, fetchNpmRegistryMeta as r, readLocalDependencies as s, fetchLatestVersion as t, generateIssueIndex as tt, resolveLocalPackageDocs as u, fetchReadmeContent as v, extractSections as w, resolveGitHubRepo as x, filterFrameworkDocs as y, fetchBlogReleases as z };
2152
2549
 
2153
- //# sourceMappingURL=npm.mjs.map
2550
+ //# sourceMappingURL=sources.mjs.map