skilld 1.6.2 → 1.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -20
- package/dist/_chunks/agent.mjs +14 -4
- package/dist/_chunks/agent.mjs.map +1 -1
- package/dist/_chunks/assemble.mjs +1 -1
- package/dist/_chunks/author-group.mjs +17 -0
- package/dist/_chunks/author-group.mjs.map +1 -0
- package/dist/_chunks/author.mjs +11 -12
- package/dist/_chunks/author.mjs.map +1 -1
- package/dist/_chunks/cache.mjs +12 -2
- package/dist/_chunks/cache.mjs.map +1 -1
- package/dist/_chunks/cache2.mjs +1 -1
- package/dist/_chunks/cli-helpers.mjs +4 -120
- package/dist/_chunks/cli-helpers.mjs.map +1 -1
- package/dist/_chunks/config.mjs +119 -27
- package/dist/_chunks/config.mjs.map +1 -1
- package/dist/_chunks/core.mjs +2 -2
- package/dist/_chunks/embedding-cache2.mjs +1 -1
- package/dist/_chunks/index.d.mts +4 -1
- package/dist/_chunks/index.d.mts.map +1 -1
- package/dist/_chunks/index3.d.mts +81 -78
- package/dist/_chunks/index3.d.mts.map +1 -1
- package/dist/_chunks/install.mjs +22 -34
- package/dist/_chunks/install.mjs.map +1 -1
- package/dist/_chunks/list.mjs +1 -1
- package/dist/_chunks/lockfile.mjs +5 -1
- package/dist/_chunks/lockfile.mjs.map +1 -1
- package/dist/_chunks/prefix.mjs +108 -0
- package/dist/_chunks/prefix.mjs.map +1 -0
- package/dist/_chunks/prepare.mjs +6 -2
- package/dist/_chunks/prepare.mjs.map +1 -1
- package/dist/_chunks/prepare2.mjs +4 -4
- package/dist/_chunks/prepare2.mjs.map +1 -1
- package/dist/_chunks/prompts.mjs +4 -237
- package/dist/_chunks/prompts.mjs.map +1 -1
- package/dist/_chunks/search-helpers.mjs +99 -0
- package/dist/_chunks/search-helpers.mjs.map +1 -0
- package/dist/_chunks/search-interactive.mjs +2 -2
- package/dist/_chunks/search-interactive.mjs.map +1 -1
- package/dist/_chunks/search.mjs +219 -1
- package/dist/_chunks/search.mjs.map +1 -0
- package/dist/_chunks/skill.mjs +329 -0
- package/dist/_chunks/skill.mjs.map +1 -0
- package/dist/_chunks/skills.mjs +2 -2
- package/dist/_chunks/sources.mjs +1180 -987
- package/dist/_chunks/sources.mjs.map +1 -1
- package/dist/_chunks/sync-registry.mjs +59 -0
- package/dist/_chunks/sync-registry.mjs.map +1 -0
- package/dist/_chunks/sync-shared2.mjs +14 -12
- package/dist/_chunks/sync-shared2.mjs.map +1 -1
- package/dist/_chunks/sync.mjs +253 -158
- package/dist/_chunks/sync.mjs.map +1 -1
- package/dist/_chunks/sync2.mjs +1 -1
- package/dist/_chunks/uninstall.mjs +5 -4
- package/dist/_chunks/uninstall.mjs.map +1 -1
- package/dist/_chunks/upload.mjs +152 -0
- package/dist/_chunks/upload.mjs.map +1 -0
- package/dist/_chunks/validate.mjs +1 -1
- package/dist/_chunks/version.mjs +30 -0
- package/dist/_chunks/version.mjs.map +1 -0
- package/dist/_chunks/wizard.mjs +3 -2
- package/dist/_chunks/wizard.mjs.map +1 -1
- package/dist/agent/index.d.mts +3 -1
- package/dist/agent/index.d.mts.map +1 -1
- package/dist/agent/index.mjs +4 -3
- package/dist/cache/index.d.mts +2 -2
- package/dist/cache/index.mjs +3 -3
- package/dist/cli.mjs +48 -21
- package/dist/cli.mjs.map +1 -1
- package/dist/index.d.mts +2 -2
- package/dist/index.mjs +3 -3
- package/dist/prepare.mjs +1 -1
- package/dist/sources/index.d.mts +2 -2
- package/dist/sources/index.mjs +2 -2
- package/dist/types.d.mts +2 -2
- package/package.json +12 -12
- package/dist/THIRD-PARTY-LICENSES.md +0 -38
- package/dist/_chunks/formatting.mjs +0 -82
- package/dist/_chunks/formatting.mjs.map +0 -1
- package/dist/_chunks/libs/@sinclair/typebox.mjs +0 -2304
- package/dist/_chunks/libs/@sinclair/typebox.mjs.map +0 -1
- package/dist/_chunks/rolldown-runtime.mjs +0 -11
- package/dist/_chunks/search2.mjs +0 -310
- package/dist/_chunks/search2.mjs.map +0 -1
package/dist/_chunks/sources.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { t as getCacheDir } from "./version.mjs";
|
|
2
2
|
import { i as readPackageJsonSafe } from "./package-json.mjs";
|
|
3
3
|
import { t as yamlEscape } from "./yaml.mjs";
|
|
4
4
|
import { i as parseFrontmatter, n as extractLinks, r as extractTitle, t as extractDescription } from "./markdown.mjs";
|
|
@@ -10,10 +10,10 @@ import { htmlToMarkdown } from "mdream";
|
|
|
10
10
|
import pLimit from "p-limit";
|
|
11
11
|
import { spawnSync } from "node:child_process";
|
|
12
12
|
import { ofetch } from "ofetch";
|
|
13
|
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
13
14
|
import { crawlAndGenerate } from "@mdream/crawl";
|
|
14
15
|
import { glob } from "tinyglobby";
|
|
15
16
|
import { downloadTemplate } from "giget";
|
|
16
|
-
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
17
17
|
import { Writable } from "node:stream";
|
|
18
18
|
import { resolvePathSync } from "mlly";
|
|
19
19
|
const BOT_USERS = new Set([
|
|
@@ -110,12 +110,36 @@ async function ghApiPaginated(endpoint) {
|
|
|
110
110
|
}
|
|
111
111
|
return results;
|
|
112
112
|
}
|
|
113
|
+
const SKILLD_USER_AGENT = "skilld/1.0 (+https://github.com/harlan-zw/skilld)";
|
|
113
114
|
const $fetch = ofetch.create({
|
|
114
115
|
retry: 3,
|
|
115
|
-
retryDelay:
|
|
116
|
+
retryDelay: 1e3,
|
|
117
|
+
retryStatusCodes: [
|
|
118
|
+
408,
|
|
119
|
+
429,
|
|
120
|
+
500,
|
|
121
|
+
502,
|
|
122
|
+
503,
|
|
123
|
+
504
|
|
124
|
+
],
|
|
116
125
|
timeout: 15e3,
|
|
117
|
-
headers: { "User-Agent":
|
|
126
|
+
headers: { "User-Agent": SKILLD_USER_AGENT }
|
|
118
127
|
});
|
|
128
|
+
function createRateLimitedRunner(intervalMs) {
|
|
129
|
+
let queue = Promise.resolve();
|
|
130
|
+
let lastRunAt = 0;
|
|
131
|
+
return async function runRateLimited(task) {
|
|
132
|
+
const run = async () => {
|
|
133
|
+
const waitMs = intervalMs - (Date.now() - lastRunAt);
|
|
134
|
+
if (waitMs > 0) await new Promise((resolve) => setTimeout(resolve, waitMs));
|
|
135
|
+
lastRunAt = Date.now();
|
|
136
|
+
return task();
|
|
137
|
+
};
|
|
138
|
+
const request = queue.then(run, run);
|
|
139
|
+
queue = request.then(() => void 0, () => void 0);
|
|
140
|
+
return request;
|
|
141
|
+
};
|
|
142
|
+
}
|
|
119
143
|
async function fetchText(url) {
|
|
120
144
|
return $fetch(url, { responseType: "text" }).catch(() => null);
|
|
121
145
|
}
|
|
@@ -175,6 +199,20 @@ function isGitHubRepoUrl(url) {
|
|
|
175
199
|
return false;
|
|
176
200
|
}
|
|
177
201
|
}
|
|
202
|
+
function isLikelyCodeHostUrl(url) {
|
|
203
|
+
if (!url) return false;
|
|
204
|
+
try {
|
|
205
|
+
const parsed = new URL(url);
|
|
206
|
+
return [
|
|
207
|
+
"github.com",
|
|
208
|
+
"www.github.com",
|
|
209
|
+
"gitlab.com",
|
|
210
|
+
"www.gitlab.com"
|
|
211
|
+
].includes(parsed.hostname);
|
|
212
|
+
} catch {
|
|
213
|
+
return false;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
178
216
|
function parseGitHubUrl(url) {
|
|
179
217
|
const match = url.match(/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?(?:[/#]|$)/);
|
|
180
218
|
if (!match) return null;
|
|
@@ -183,6 +221,11 @@ function parseGitHubUrl(url) {
|
|
|
183
221
|
repo: match[2]
|
|
184
222
|
};
|
|
185
223
|
}
|
|
224
|
+
function parseGitHubRepoSlug(url) {
|
|
225
|
+
if (!url) return void 0;
|
|
226
|
+
const parsed = parseGitHubUrl(url);
|
|
227
|
+
return parsed ? `${parsed.owner}/${parsed.repo}` : void 0;
|
|
228
|
+
}
|
|
186
229
|
function normalizeRepoUrl(url) {
|
|
187
230
|
return url.replace(/^git\+/, "").replace(/#.*$/, "").replace(/\.git$/, "").replace(/^git:\/\//, "https://").replace(/^ssh:\/\/git@github\.com/, "https://github.com").replace(/^git@github\.com:/, "https://github.com/");
|
|
188
231
|
}
|
|
@@ -456,99 +499,6 @@ async function fetchBlogReleases(packageName, installedVersion) {
|
|
|
456
499
|
content: formatBlogRelease(r)
|
|
457
500
|
}));
|
|
458
501
|
}
|
|
459
|
-
async function fetchCrawledDocs(url, onProgress, maxPages = 200) {
|
|
460
|
-
const outputDir = join(tmpdir(), "skilld-crawl", Date.now().toString());
|
|
461
|
-
onProgress?.(`Crawling ${url}`);
|
|
462
|
-
const userLang = getUserLang();
|
|
463
|
-
const foreignUrls = /* @__PURE__ */ new Set();
|
|
464
|
-
const doCrawl = () => crawlAndGenerate({
|
|
465
|
-
urls: [url],
|
|
466
|
-
outputDir,
|
|
467
|
-
driver: "http",
|
|
468
|
-
generateLlmsTxt: false,
|
|
469
|
-
generateIndividualMd: true,
|
|
470
|
-
maxRequestsPerCrawl: maxPages,
|
|
471
|
-
onPage: (page) => {
|
|
472
|
-
const lang = extractHtmlLang(page.html);
|
|
473
|
-
if (lang && !lang.startsWith("en") && !lang.startsWith(userLang)) foreignUrls.add(page.url);
|
|
474
|
-
}
|
|
475
|
-
}, (progress) => {
|
|
476
|
-
if (progress.crawling.status === "processing" && progress.crawling.total > 0) onProgress?.(`Crawling ${progress.crawling.processed}/${progress.crawling.total} pages`);
|
|
477
|
-
});
|
|
478
|
-
let results = await doCrawl().catch((err) => {
|
|
479
|
-
onProgress?.(`Crawl failed: ${err?.message || err}`);
|
|
480
|
-
return [];
|
|
481
|
-
});
|
|
482
|
-
if (results.length === 0) {
|
|
483
|
-
onProgress?.("Retrying crawl");
|
|
484
|
-
results = await doCrawl().catch(() => []);
|
|
485
|
-
}
|
|
486
|
-
rmSync(outputDir, {
|
|
487
|
-
recursive: true,
|
|
488
|
-
force: true
|
|
489
|
-
});
|
|
490
|
-
const docs = [];
|
|
491
|
-
let localeFiltered = 0;
|
|
492
|
-
for (const result of results) {
|
|
493
|
-
if (!result.success || !result.content) continue;
|
|
494
|
-
if (foreignUrls.has(result.url)) {
|
|
495
|
-
localeFiltered++;
|
|
496
|
-
continue;
|
|
497
|
-
}
|
|
498
|
-
const segments = (new URL(result.url).pathname.replace(/\/$/, "") || "/index").split("/").filter(Boolean);
|
|
499
|
-
if (isForeignPathPrefix(segments[0], userLang)) {
|
|
500
|
-
localeFiltered++;
|
|
501
|
-
continue;
|
|
502
|
-
}
|
|
503
|
-
const path = `docs/${segments.join("/")}.md`;
|
|
504
|
-
docs.push({
|
|
505
|
-
path,
|
|
506
|
-
content: result.content
|
|
507
|
-
});
|
|
508
|
-
}
|
|
509
|
-
if (localeFiltered > 0) onProgress?.(`Filtered ${localeFiltered} foreign locale pages`);
|
|
510
|
-
onProgress?.(`Crawled ${docs.length} pages`);
|
|
511
|
-
return docs;
|
|
512
|
-
}
|
|
513
|
-
const HTML_LANG_RE = /<html[^>]*\slang=["']([^"']+)["']/i;
|
|
514
|
-
function extractHtmlLang(html) {
|
|
515
|
-
return HTML_LANG_RE.exec(html)?.[1]?.toLowerCase();
|
|
516
|
-
}
|
|
517
|
-
const LOCALE_CODES = new Set([
|
|
518
|
-
"ar",
|
|
519
|
-
"de",
|
|
520
|
-
"es",
|
|
521
|
-
"fr",
|
|
522
|
-
"id",
|
|
523
|
-
"it",
|
|
524
|
-
"ja",
|
|
525
|
-
"ko",
|
|
526
|
-
"nl",
|
|
527
|
-
"pl",
|
|
528
|
-
"pt",
|
|
529
|
-
"pt-br",
|
|
530
|
-
"ru",
|
|
531
|
-
"th",
|
|
532
|
-
"tr",
|
|
533
|
-
"uk",
|
|
534
|
-
"vi",
|
|
535
|
-
"zh",
|
|
536
|
-
"zh-cn",
|
|
537
|
-
"zh-tw"
|
|
538
|
-
]);
|
|
539
|
-
function isForeignPathPrefix(segment, userLang) {
|
|
540
|
-
if (!segment) return false;
|
|
541
|
-
const lower = segment.toLowerCase();
|
|
542
|
-
if (lower === "en" || lower.startsWith(userLang)) return false;
|
|
543
|
-
return LOCALE_CODES.has(lower);
|
|
544
|
-
}
|
|
545
|
-
function getUserLang() {
|
|
546
|
-
const code = (process.env.LC_ALL || process.env.LANG || process.env.LANGUAGE || "").split(/[_.:-]/)[0]?.toLowerCase() || "";
|
|
547
|
-
return code.length >= 2 ? code.slice(0, 2) : "en";
|
|
548
|
-
}
|
|
549
|
-
function toCrawlPattern(docsUrl) {
|
|
550
|
-
return `${docsUrl.replace(/\/+$/, "")}/**`;
|
|
551
|
-
}
|
|
552
502
|
let _ghAvailable;
|
|
553
503
|
function isGhAvailable() {
|
|
554
504
|
if (_ghAvailable !== void 0) return _ghAvailable;
|
|
@@ -870,992 +820,1235 @@ function generateIssueIndex(issues) {
|
|
|
870
820
|
}
|
|
871
821
|
return sections.join("\n");
|
|
872
822
|
}
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
"support"
|
|
878
|
-
]);
|
|
879
|
-
const LOW_VALUE_CATEGORIES = new Set([
|
|
880
|
-
"show and tell",
|
|
881
|
-
"ideas",
|
|
882
|
-
"polls"
|
|
883
|
-
]);
|
|
884
|
-
const TITLE_NOISE_RE = /looking .*(?:developer|engineer|freelanc)|hiring|job post|guide me to (?:complete|finish|build)|help me (?:complete|finish|build)|seeking .* tutorial|recommend.* course/i;
|
|
885
|
-
const MIN_DISCUSSION_SCORE = 3;
|
|
886
|
-
function scoreComment(c) {
|
|
887
|
-
return (c.isMaintainer ? 3 : 1) * (hasCodeBlock(c.body) ? 2 : 1) * (1 + c.reactions);
|
|
823
|
+
async function fetchLlmsUrl(docsUrl) {
|
|
824
|
+
const llmsUrl = `${new URL(docsUrl).origin}/llms.txt`;
|
|
825
|
+
if (await verifyUrl(llmsUrl)) return llmsUrl;
|
|
826
|
+
return null;
|
|
888
827
|
}
|
|
889
|
-
function
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
...d.topComments.map((c) => c.body)
|
|
897
|
-
].join("\n"))) score += 3;
|
|
898
|
-
score += Math.min(d.upvoteCount, 5);
|
|
899
|
-
if (d.answer) {
|
|
900
|
-
score += 2;
|
|
901
|
-
if (d.answer.length > 100) score += 1;
|
|
902
|
-
}
|
|
903
|
-
if (d.topComments.some((c) => c.isMaintainer)) score += 2;
|
|
904
|
-
if (d.topComments.some((c) => c.reactions > 0)) score += 1;
|
|
905
|
-
return score;
|
|
828
|
+
async function fetchLlmsTxt(url) {
|
|
829
|
+
const content = await fetchText(url);
|
|
830
|
+
if (!content || content.length < 50) return null;
|
|
831
|
+
return {
|
|
832
|
+
raw: content,
|
|
833
|
+
links: parseMarkdownLinks(content)
|
|
834
|
+
};
|
|
906
835
|
}
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
cutoff.setMonth(cutoff.getMonth() + 6);
|
|
912
|
-
if (cutoff < /* @__PURE__ */ new Date()) return [];
|
|
913
|
-
}
|
|
836
|
+
function parseMarkdownLinks(content) {
|
|
837
|
+
return extractLinks(content).filter((l) => l.url.endsWith(".md"));
|
|
838
|
+
}
|
|
839
|
+
function isSafeUrl(url) {
|
|
914
840
|
try {
|
|
915
|
-
const
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
"-f",
|
|
923
|
-
`repo=${repo}`
|
|
924
|
-
], {
|
|
925
|
-
encoding: "utf-8",
|
|
926
|
-
maxBuffer: 10 * 1024 * 1024
|
|
927
|
-
});
|
|
928
|
-
if (!result) return [];
|
|
929
|
-
const nodes = JSON.parse(result)?.data?.repository?.discussions?.nodes;
|
|
930
|
-
if (!Array.isArray(nodes)) return [];
|
|
931
|
-
const fromTs = fromDate ? new Date(fromDate).getTime() : null;
|
|
932
|
-
return nodes.filter((d) => d.author && !BOT_USERS.has(d.author.login)).filter((d) => {
|
|
933
|
-
const cat = (d.category?.name || "").toLowerCase();
|
|
934
|
-
return !LOW_VALUE_CATEGORIES.has(cat);
|
|
935
|
-
}).filter((d) => !fromTs || new Date(d.createdAt).getTime() >= fromTs).map((d) => {
|
|
936
|
-
let answer;
|
|
937
|
-
if (d.answer?.body) {
|
|
938
|
-
const isMaintainer = [
|
|
939
|
-
"OWNER",
|
|
940
|
-
"MEMBER",
|
|
941
|
-
"COLLABORATOR"
|
|
942
|
-
].includes(d.answer.authorAssociation);
|
|
943
|
-
const author = d.answer.author?.login;
|
|
944
|
-
answer = `${isMaintainer && author ? `**@${author}** [maintainer]:\n\n` : ""}${d.answer.body}`;
|
|
945
|
-
}
|
|
946
|
-
const comments = (d.comments?.nodes || []).filter((c) => c.author && !BOT_USERS.has(c.author.login)).filter((c) => !COMMENT_NOISE_RE.test((c.body || "").trim())).map((c) => {
|
|
947
|
-
const isMaintainer = [
|
|
948
|
-
"OWNER",
|
|
949
|
-
"MEMBER",
|
|
950
|
-
"COLLABORATOR"
|
|
951
|
-
].includes(c.authorAssociation);
|
|
952
|
-
return {
|
|
953
|
-
body: c.body || "",
|
|
954
|
-
author: c.author.login,
|
|
955
|
-
reactions: c.reactions?.totalCount || 0,
|
|
956
|
-
isMaintainer
|
|
957
|
-
};
|
|
958
|
-
}).sort((a, b) => scoreComment(b) - scoreComment(a)).slice(0, 3);
|
|
959
|
-
return {
|
|
960
|
-
number: d.number,
|
|
961
|
-
title: d.title,
|
|
962
|
-
body: d.body || "",
|
|
963
|
-
category: d.category?.name || "",
|
|
964
|
-
createdAt: d.createdAt,
|
|
965
|
-
url: d.url,
|
|
966
|
-
upvoteCount: d.upvoteCount || 0,
|
|
967
|
-
comments: d.comments?.totalCount || 0,
|
|
968
|
-
isMaintainer: [
|
|
969
|
-
"OWNER",
|
|
970
|
-
"MEMBER",
|
|
971
|
-
"COLLABORATOR"
|
|
972
|
-
].includes(d.authorAssociation),
|
|
973
|
-
answer,
|
|
974
|
-
topComments: comments
|
|
975
|
-
};
|
|
976
|
-
}).map((d) => ({
|
|
977
|
-
d,
|
|
978
|
-
score: scoreDiscussion(d)
|
|
979
|
-
})).filter(({ score }) => score >= MIN_DISCUSSION_SCORE).sort((a, b) => {
|
|
980
|
-
const aHigh = HIGH_VALUE_CATEGORIES.has(a.d.category.toLowerCase()) ? 1 : 0;
|
|
981
|
-
const bHigh = HIGH_VALUE_CATEGORIES.has(b.d.category.toLowerCase()) ? 1 : 0;
|
|
982
|
-
if (aHigh !== bHigh) return bHigh - aHigh;
|
|
983
|
-
return b.score - a.score;
|
|
984
|
-
}).slice(0, limit).map(({ d }) => d);
|
|
841
|
+
const parsed = new URL(url);
|
|
842
|
+
if (parsed.protocol !== "https:") return false;
|
|
843
|
+
const host = parsed.hostname;
|
|
844
|
+
if (host === "localhost" || host === "0.0.0.0" || host === "[::1]") return false;
|
|
845
|
+
if (/^(?:127\.|10\.|172\.(?:1[6-9]|2\d|3[01])\.|192\.168\.|169\.254\.)/.test(host)) return false;
|
|
846
|
+
if (/^\[(?:f[cd]|fe[89ab]|::ffff:)/i.test(host)) return false;
|
|
847
|
+
return true;
|
|
985
848
|
} catch {
|
|
986
|
-
return
|
|
849
|
+
return false;
|
|
987
850
|
}
|
|
988
851
|
}
|
|
989
|
-
function
|
|
990
|
-
const
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
`# ${d.title}`
|
|
1005
|
-
];
|
|
1006
|
-
if (d.body) lines.push("", truncateBody(d.body, bodyLimit));
|
|
1007
|
-
if (d.answer) lines.push("", "---", "", "## Accepted Answer", "", truncateBody(d.answer, 1e3));
|
|
1008
|
-
else if (d.topComments.length > 0) {
|
|
1009
|
-
lines.push("", "---", "", "## Top Comments");
|
|
1010
|
-
for (const c of d.topComments) {
|
|
1011
|
-
const reactions = c.reactions > 0 ? ` (+${c.reactions})` : "";
|
|
1012
|
-
const maintainer = c.isMaintainer ? " [maintainer]" : "";
|
|
1013
|
-
lines.push("", `**@${c.author}**${maintainer}${reactions}:`, "", truncateBody(c.body, 600));
|
|
1014
|
-
}
|
|
1015
|
-
}
|
|
1016
|
-
return lines.join("\n");
|
|
852
|
+
async function downloadLlmsDocs(llmsContent, baseUrl, onProgress) {
|
|
853
|
+
const limit = pLimit(5);
|
|
854
|
+
let completed = 0;
|
|
855
|
+
return (await Promise.all(llmsContent.links.map((link) => limit(async () => {
|
|
856
|
+
const url = link.url.startsWith("http") ? link.url : `${baseUrl.replace(/\/$/, "")}${link.url.startsWith("/") ? "" : "/"}${link.url}`;
|
|
857
|
+
if (!isSafeUrl(url)) return null;
|
|
858
|
+
const content = await fetchText(url);
|
|
859
|
+
onProgress?.(link.url, ++completed, llmsContent.links.length);
|
|
860
|
+
if (content && content.length > 100) return {
|
|
861
|
+
url: link.url.startsWith("http") ? new URL(link.url).pathname : link.url,
|
|
862
|
+
title: link.title,
|
|
863
|
+
content
|
|
864
|
+
};
|
|
865
|
+
return null;
|
|
866
|
+
})))).filter((d) => d !== null);
|
|
1017
867
|
}
|
|
1018
|
-
function
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
[
|
|
1024
|
-
"---",
|
|
1025
|
-
`total: ${discussions.length}`,
|
|
1026
|
-
`answered: ${answered}`,
|
|
1027
|
-
"---"
|
|
1028
|
-
].join("\n"),
|
|
1029
|
-
"",
|
|
1030
|
-
"# Discussions Index",
|
|
1031
|
-
""
|
|
1032
|
-
];
|
|
1033
|
-
const cats = [...byCategory.keys()].sort((a, b) => {
|
|
1034
|
-
return (HIGH_VALUE_CATEGORIES.has(a.toLowerCase()) ? 0 : 1) - (HIGH_VALUE_CATEGORIES.has(b.toLowerCase()) ? 0 : 1) || a.localeCompare(b);
|
|
1035
|
-
});
|
|
1036
|
-
for (const cat of cats) {
|
|
1037
|
-
const group = byCategory.get(cat);
|
|
1038
|
-
sections.push(`## ${cat} (${group.length})`, "");
|
|
1039
|
-
for (const d of group) {
|
|
1040
|
-
const upvotes = d.upvoteCount > 0 ? ` (+${d.upvoteCount})` : "";
|
|
1041
|
-
const answered = d.answer ? " [answered]" : "";
|
|
1042
|
-
const date = isoDate(d.createdAt);
|
|
1043
|
-
sections.push(`- [#${d.number}](./discussion-${d.number}.md): ${d.title}${upvotes}${answered} (${date})`);
|
|
1044
|
-
}
|
|
1045
|
-
sections.push("");
|
|
868
|
+
function normalizeLlmsLinks(content, baseUrl) {
|
|
869
|
+
let normalized = content;
|
|
870
|
+
if (baseUrl) {
|
|
871
|
+
const escaped = baseUrl.replace(/\/$/, "").replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
872
|
+
normalized = normalized.replace(new RegExp(`\\]\\(${escaped}(/[^)]+\\.md)\\)`, "g"), "](./docs$1)");
|
|
1046
873
|
}
|
|
1047
|
-
|
|
874
|
+
normalized = normalized.replace(/\]\(\/([^)]+\.md)\)/g, "](./docs/$1)");
|
|
875
|
+
return normalized;
|
|
1048
876
|
}
|
|
1049
|
-
function
|
|
1050
|
-
const
|
|
1051
|
-
|
|
1052
|
-
const
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
const
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
const list = byDir.get(dir);
|
|
1060
|
-
if (list) list.push(doc);
|
|
1061
|
-
else byDir.set(dir, [doc]);
|
|
877
|
+
function extractSections(content, patterns) {
|
|
878
|
+
const sections = [];
|
|
879
|
+
const parts = content.split(/\n---\n/);
|
|
880
|
+
for (const part of parts) {
|
|
881
|
+
const urlMatch = part.match(/^url: *(\S.*)$/m);
|
|
882
|
+
if (!urlMatch) continue;
|
|
883
|
+
const url = urlMatch[1];
|
|
884
|
+
if (patterns.some((p) => url.includes(p))) {
|
|
885
|
+
const contentStart = part.indexOf("\n", part.indexOf("url:"));
|
|
886
|
+
if (contentStart > -1) sections.push(part.slice(contentStart + 1));
|
|
1062
887
|
}
|
|
1063
888
|
}
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
const rel = file.path.slice(5);
|
|
1074
|
-
const title = extractTitle(file.content) || rel.replace(/\.md$/, "");
|
|
1075
|
-
const desc = extractDescription(file.content);
|
|
1076
|
-
const descPart = desc ? `: ${desc}` : "";
|
|
1077
|
-
sections.push(`- [${title}](./${rel})${descPart}`);
|
|
889
|
+
if (sections.length === 0) return null;
|
|
890
|
+
return sections.join("\n\n---\n\n");
|
|
891
|
+
}
|
|
892
|
+
const MIN_GIT_DOCS = 5;
|
|
893
|
+
const isShallowGitDocs = (n) => n > 0 && n < 5;
|
|
894
|
+
async function listFilesAtRef(owner, repo, ref) {
|
|
895
|
+
if (!isKnownPrivateRepo(owner, repo)) {
|
|
896
|
+
const data = await $fetch(`https://ungh.cc/repos/${owner}/${repo}/files/${ref}`).catch(() => null);
|
|
897
|
+
if (data?.files?.length) return data.files.map((f) => f.path);
|
|
1078
898
|
}
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
const rel = file.path.slice(5);
|
|
1084
|
-
const title = extractTitle(file.content) || rel.replace(/\.md$/, "").split("/").pop();
|
|
1085
|
-
const desc = extractDescription(file.content);
|
|
1086
|
-
const descPart = desc ? `: ${desc}` : "";
|
|
1087
|
-
sections.push(`- [${title}](./${rel})${descPart}`);
|
|
1088
|
-
}
|
|
1089
|
-
sections.push("");
|
|
899
|
+
const tree = await ghApi(`repos/${owner}/${repo}/git/trees/${ref}?recursive=1`);
|
|
900
|
+
if (tree?.tree?.length) {
|
|
901
|
+
markRepoPrivate(owner, repo);
|
|
902
|
+
return tree.tree.map((f) => f.path);
|
|
1090
903
|
}
|
|
1091
|
-
return
|
|
904
|
+
return [];
|
|
1092
905
|
}
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
"*.global.*",
|
|
1112
|
-
"*.browser.*",
|
|
1113
|
-
"*.map",
|
|
1114
|
-
"*.map.js",
|
|
1115
|
-
"CHANGELOG*",
|
|
1116
|
-
"LICENSE*",
|
|
1117
|
-
"README*"
|
|
1118
|
-
];
|
|
1119
|
-
const MAX_FILE_SIZE = 500 * 1024;
|
|
1120
|
-
async function resolveEntryFiles(packageDir) {
|
|
1121
|
-
if (!existsSync(join(packageDir, "package.json"))) return [];
|
|
1122
|
-
const files = await glob(["**/*.d.{ts,mts,cts}"], {
|
|
1123
|
-
cwd: packageDir,
|
|
1124
|
-
ignore: [...SKIP_DIRS.map((d) => `**/${d}/**`), ...SKIP_PATTERNS],
|
|
1125
|
-
absolute: false,
|
|
1126
|
-
expandDirectories: false
|
|
1127
|
-
});
|
|
1128
|
-
const entries = [];
|
|
1129
|
-
for (const file of files) {
|
|
1130
|
-
const absPath = join(packageDir, file);
|
|
1131
|
-
let content;
|
|
1132
|
-
try {
|
|
1133
|
-
content = readFileSync(absPath, "utf-8");
|
|
1134
|
-
} catch {
|
|
1135
|
-
continue;
|
|
906
|
+
async function findGitTag(owner, repo, version, packageName, branchHint) {
|
|
907
|
+
const candidates = [`v${version}`, version];
|
|
908
|
+
if (packageName) candidates.push(`${packageName}@${version}`);
|
|
909
|
+
for (const tag of candidates) {
|
|
910
|
+
const files = await listFilesAtRef(owner, repo, tag);
|
|
911
|
+
if (files.length > 0) return {
|
|
912
|
+
ref: tag,
|
|
913
|
+
files
|
|
914
|
+
};
|
|
915
|
+
}
|
|
916
|
+
if (packageName) {
|
|
917
|
+
const latestTag = await findLatestReleaseTag(owner, repo, packageName);
|
|
918
|
+
if (latestTag) {
|
|
919
|
+
const files = await listFilesAtRef(owner, repo, latestTag);
|
|
920
|
+
if (files.length > 0) return {
|
|
921
|
+
ref: latestTag,
|
|
922
|
+
files
|
|
923
|
+
};
|
|
1136
924
|
}
|
|
1137
|
-
if (content.length > MAX_FILE_SIZE) continue;
|
|
1138
|
-
entries.push({
|
|
1139
|
-
path: file,
|
|
1140
|
-
content,
|
|
1141
|
-
type: "types"
|
|
1142
|
-
});
|
|
1143
925
|
}
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
localPath: trimmed.startsWith("~") ? resolve(process.env.HOME || "", trimmed.slice(1)) : resolve(trimmed)
|
|
1152
|
-
};
|
|
1153
|
-
if (trimmed.startsWith("git@")) {
|
|
1154
|
-
const gh = parseGitHubUrl(normalizeRepoUrl(trimmed));
|
|
1155
|
-
if (gh) return {
|
|
1156
|
-
type: "github",
|
|
1157
|
-
owner: gh.owner,
|
|
1158
|
-
repo: gh.repo
|
|
926
|
+
const branches = branchHint ? [branchHint, ...["main", "master"].filter((b) => b !== branchHint)] : ["main", "master"];
|
|
927
|
+
for (const branch of branches) {
|
|
928
|
+
const files = await listFilesAtRef(owner, repo, branch);
|
|
929
|
+
if (files.length > 0) return {
|
|
930
|
+
ref: branch,
|
|
931
|
+
files,
|
|
932
|
+
fallback: true
|
|
1159
933
|
};
|
|
1160
|
-
return null;
|
|
1161
934
|
}
|
|
1162
|
-
if (trimmed.startsWith("https://") || trimmed.startsWith("http://")) return parseGitUrl(trimmed);
|
|
1163
|
-
if (/^[\w.-]+\/[\w.-]+$/.test(trimmed)) return {
|
|
1164
|
-
type: "github",
|
|
1165
|
-
owner: trimmed.split("/")[0],
|
|
1166
|
-
repo: trimmed.split("/")[1]
|
|
1167
|
-
};
|
|
1168
935
|
return null;
|
|
1169
936
|
}
|
|
1170
|
-
function
|
|
1171
|
-
|
|
1172
|
-
const
|
|
1173
|
-
if (
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
ref: parts[3],
|
|
1183
|
-
skillPath: parts.length > 4 ? parts.slice(4).join("/") : void 0
|
|
1184
|
-
};
|
|
1185
|
-
return {
|
|
1186
|
-
type: "github",
|
|
1187
|
-
owner,
|
|
1188
|
-
repo
|
|
1189
|
-
};
|
|
1190
|
-
}
|
|
1191
|
-
if (parsed.hostname === "gitlab.com") {
|
|
1192
|
-
const parts = parsed.pathname.replace(/^\//, "").replace(/\.git$/, "").split("/");
|
|
1193
|
-
const owner = parts[0];
|
|
1194
|
-
const repo = parts[1];
|
|
1195
|
-
if (!owner || !repo) return null;
|
|
1196
|
-
return {
|
|
1197
|
-
type: "gitlab",
|
|
1198
|
-
owner,
|
|
1199
|
-
repo
|
|
1200
|
-
};
|
|
1201
|
-
}
|
|
1202
|
-
return null;
|
|
1203
|
-
} catch {
|
|
1204
|
-
return null;
|
|
937
|
+
async function fetchUnghReleases(owner, repo) {
|
|
938
|
+
if (!isKnownPrivateRepo(owner, repo)) {
|
|
939
|
+
const data = await $fetch(`https://ungh.cc/repos/${owner}/${repo}/releases`).catch(() => null);
|
|
940
|
+
if (data?.releases?.length) return data.releases;
|
|
941
|
+
}
|
|
942
|
+
const raw = await ghApiPaginated(`repos/${owner}/${repo}/releases`);
|
|
943
|
+
if (raw.length > 0) {
|
|
944
|
+
markRepoPrivate(owner, repo);
|
|
945
|
+
return raw.map((r) => ({
|
|
946
|
+
tag: r.tag_name,
|
|
947
|
+
publishedAt: r.published_at
|
|
948
|
+
}));
|
|
1205
949
|
}
|
|
950
|
+
return [];
|
|
1206
951
|
}
|
|
1207
|
-
function
|
|
1208
|
-
const
|
|
1209
|
-
return
|
|
1210
|
-
name: fm.name,
|
|
1211
|
-
description: fm.description
|
|
1212
|
-
};
|
|
952
|
+
async function findLatestReleaseTag(owner, repo, packageName) {
|
|
953
|
+
const prefix = `${packageName}@`;
|
|
954
|
+
return (await fetchUnghReleases(owner, repo)).find((r) => r.tag.startsWith(prefix))?.tag ?? null;
|
|
1213
955
|
}
|
|
1214
|
-
function
|
|
1215
|
-
|
|
1216
|
-
if (!existsSync(dir)) return files;
|
|
1217
|
-
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
1218
|
-
const relPath = prefix ? `${prefix}/${entry.name}` : entry.name;
|
|
1219
|
-
const fullPath = resolve(dir, entry.name);
|
|
1220
|
-
if (entry.isDirectory()) files.push(...collectFiles(fullPath, relPath));
|
|
1221
|
-
else if (entry.isFile()) files.push({
|
|
1222
|
-
path: relPath,
|
|
1223
|
-
content: readFileSync(fullPath, "utf-8")
|
|
1224
|
-
});
|
|
1225
|
-
}
|
|
1226
|
-
return files;
|
|
956
|
+
function filterDocFiles(files, pathPrefix) {
|
|
957
|
+
return files.filter((f) => f.startsWith(pathPrefix) && /\.(?:md|mdx)$/.test(f));
|
|
1227
958
|
}
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
959
|
+
const FRAMEWORK_NAMES = new Set([
|
|
960
|
+
"vue",
|
|
961
|
+
"react",
|
|
962
|
+
"solid",
|
|
963
|
+
"angular",
|
|
964
|
+
"svelte",
|
|
965
|
+
"preact",
|
|
966
|
+
"lit",
|
|
967
|
+
"qwik"
|
|
968
|
+
]);
|
|
969
|
+
function filterFrameworkDocs(files, packageName) {
|
|
970
|
+
if (!packageName) return files;
|
|
971
|
+
const shortName = packageName.replace(/^@.*\//, "");
|
|
972
|
+
const targetFramework = [...FRAMEWORK_NAMES].find((fw) => shortName.includes(fw));
|
|
973
|
+
if (!targetFramework) return files;
|
|
974
|
+
const otherFrameworks = [...FRAMEWORK_NAMES].filter((fw) => fw !== targetFramework);
|
|
975
|
+
const excludePattern = new RegExp(`\\b(?:${otherFrameworks.join("|")})\\b`);
|
|
976
|
+
return files.filter((f) => !excludePattern.test(f));
|
|
1233
977
|
}
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
978
|
+
const NOISE_PATTERNS = [
|
|
979
|
+
/^\.changeset\//,
|
|
980
|
+
/CHANGELOG\.md$/i,
|
|
981
|
+
/CONTRIBUTING\.md$/i,
|
|
982
|
+
/^\.github\//
|
|
983
|
+
];
|
|
984
|
+
const EXCLUDE_DIRS = new Set([
|
|
985
|
+
"test",
|
|
986
|
+
"tests",
|
|
987
|
+
"__tests__",
|
|
988
|
+
"fixtures",
|
|
989
|
+
"fixture",
|
|
990
|
+
"examples",
|
|
991
|
+
"example",
|
|
992
|
+
"node_modules",
|
|
993
|
+
".git",
|
|
994
|
+
"dist",
|
|
995
|
+
"build",
|
|
996
|
+
"coverage",
|
|
997
|
+
"e2e",
|
|
998
|
+
"spec",
|
|
999
|
+
"mocks",
|
|
1000
|
+
"__mocks__"
|
|
1001
|
+
]);
|
|
1002
|
+
const DOC_DIR_BONUS = new Set([
|
|
1003
|
+
"docs",
|
|
1004
|
+
"documentation",
|
|
1005
|
+
"pages",
|
|
1006
|
+
"content",
|
|
1007
|
+
"website",
|
|
1008
|
+
"guide",
|
|
1009
|
+
"guides",
|
|
1010
|
+
"wiki",
|
|
1011
|
+
"manual",
|
|
1012
|
+
"api"
|
|
1013
|
+
]);
|
|
1014
|
+
function hasExcludedDir(path) {
|
|
1015
|
+
return path.split("/").some((p) => EXCLUDE_DIRS.has(p.toLowerCase()));
|
|
1016
|
+
}
|
|
1017
|
+
function getPathDepth(path) {
|
|
1018
|
+
return path.split("/").filter(Boolean).length;
|
|
1019
|
+
}
|
|
1020
|
+
function hasDocDirBonus(path) {
|
|
1021
|
+
return path.split("/").some((p) => DOC_DIR_BONUS.has(p.toLowerCase()));
|
|
1022
|
+
}
|
|
1023
|
+
function scoreDocDir(dir, fileCount) {
|
|
1024
|
+
const depth = getPathDepth(dir) || 1;
|
|
1025
|
+
return fileCount * (hasDocDirBonus(dir) ? 1.5 : 1) / depth;
|
|
1026
|
+
}
|
|
1027
|
+
function discoverDocFiles(allFiles, packageName) {
|
|
1028
|
+
const mdFiles = allFiles.filter((f) => /\.(?:md|mdx)$/.test(f)).filter((f) => !NOISE_PATTERNS.some((p) => p.test(f))).filter((f) => f.includes("/"));
|
|
1029
|
+
if (packageName?.includes("/")) {
|
|
1030
|
+
const subPkgPrefix = `packages/${packageName.split("/").pop().toLowerCase()}/`;
|
|
1031
|
+
const subPkgFiles = mdFiles.filter((f) => f.startsWith(subPkgPrefix));
|
|
1032
|
+
if (subPkgFiles.length >= 3) return {
|
|
1033
|
+
files: subPkgFiles,
|
|
1034
|
+
prefix: subPkgPrefix
|
|
1035
|
+
};
|
|
1243
1036
|
}
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1037
|
+
const docsGroups = /* @__PURE__ */ new Map();
|
|
1038
|
+
for (const file of mdFiles) {
|
|
1039
|
+
const docsIdx = file.lastIndexOf("/docs/");
|
|
1040
|
+
if (docsIdx === -1) continue;
|
|
1041
|
+
mapInsert(docsGroups, file.slice(0, docsIdx + 6), () => []).push(file);
|
|
1247
1042
|
}
|
|
1248
|
-
|
|
1043
|
+
if (docsGroups.size > 0) {
|
|
1044
|
+
const largest = [...docsGroups.entries()].sort((a, b) => b[1].length - a[1].length)[0];
|
|
1045
|
+
if (largest[1].length >= 3) {
|
|
1046
|
+
const fullPrefix = largest[0];
|
|
1047
|
+
const docsIdx = fullPrefix.lastIndexOf("docs/");
|
|
1048
|
+
const stripPrefix = docsIdx > 0 ? fullPrefix.slice(0, docsIdx) : "";
|
|
1049
|
+
return {
|
|
1050
|
+
files: largest[1],
|
|
1051
|
+
prefix: stripPrefix
|
|
1052
|
+
};
|
|
1053
|
+
}
|
|
1054
|
+
}
|
|
1055
|
+
const dirGroups = /* @__PURE__ */ new Map();
|
|
1056
|
+
for (const file of mdFiles) {
|
|
1057
|
+
if (hasExcludedDir(file)) continue;
|
|
1058
|
+
const lastSlash = file.lastIndexOf("/");
|
|
1059
|
+
if (lastSlash === -1) continue;
|
|
1060
|
+
mapInsert(dirGroups, file.slice(0, lastSlash + 1), () => []).push(file);
|
|
1061
|
+
}
|
|
1062
|
+
if (dirGroups.size === 0) return null;
|
|
1063
|
+
const scored = Array.from(dirGroups.entries(), ([dir, files]) => ({
|
|
1064
|
+
dir,
|
|
1065
|
+
files,
|
|
1066
|
+
score: scoreDocDir(dir, files.length)
|
|
1067
|
+
})).filter((d) => d.files.length >= 5).sort((a, b) => b.score - a.score);
|
|
1068
|
+
if (scored.length === 0) return null;
|
|
1069
|
+
const best = scored[0];
|
|
1070
|
+
return {
|
|
1071
|
+
files: best.files,
|
|
1072
|
+
prefix: best.dir
|
|
1073
|
+
};
|
|
1249
1074
|
}
|
|
1250
|
-
function
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
const
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1075
|
+
async function listDocsAtRef(owner, repo, ref, pathPrefix = "docs/") {
|
|
1076
|
+
return filterDocFiles(await listFilesAtRef(owner, repo, ref), pathPrefix);
|
|
1077
|
+
}
|
|
1078
|
+
async function fetchGitDocs(owner, repo, version, packageName, repoUrl) {
|
|
1079
|
+
const override = packageName ? getDocOverride(packageName) : void 0;
|
|
1080
|
+
if (override) {
|
|
1081
|
+
const ref = override.ref || "main";
|
|
1082
|
+
const fallback = !override.ref;
|
|
1083
|
+
const files = await listDocsAtRef(override.owner, override.repo, ref, `${override.path}/`);
|
|
1084
|
+
if (files.length === 0) return null;
|
|
1085
|
+
return {
|
|
1086
|
+
baseUrl: `https://raw.githubusercontent.com/${override.owner}/${override.repo}/${ref}`,
|
|
1087
|
+
ref,
|
|
1088
|
+
files,
|
|
1089
|
+
fallback,
|
|
1090
|
+
docsPrefix: `${override.path}/` !== "docs/" ? `${override.path}/` : void 0
|
|
1091
|
+
};
|
|
1092
|
+
}
|
|
1093
|
+
const tag = await findGitTag(owner, repo, version, packageName, repoUrl ? extractBranchHint(repoUrl) : void 0);
|
|
1094
|
+
if (!tag) return null;
|
|
1095
|
+
let docs = filterDocFiles(tag.files, "docs/");
|
|
1096
|
+
let docsPrefix;
|
|
1097
|
+
let allFiles;
|
|
1098
|
+
if (docs.length === 0) {
|
|
1099
|
+
const discovered = discoverDocFiles(tag.files, packageName);
|
|
1100
|
+
if (discovered) {
|
|
1101
|
+
docs = discovered.files;
|
|
1102
|
+
docsPrefix = discovered.prefix || void 0;
|
|
1103
|
+
allFiles = tag.files;
|
|
1104
|
+
}
|
|
1105
|
+
}
|
|
1106
|
+
docs = filterFrameworkDocs(docs, packageName);
|
|
1107
|
+
if (docs.length === 0) return null;
|
|
1258
1108
|
return {
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1109
|
+
baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,
|
|
1110
|
+
ref: tag.ref,
|
|
1111
|
+
files: docs,
|
|
1112
|
+
docsPrefix,
|
|
1113
|
+
allFiles,
|
|
1114
|
+
fallback: tag.fallback
|
|
1264
1115
|
};
|
|
1265
1116
|
}
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1117
|
+
function normalizePath(p) {
|
|
1118
|
+
return p.replace(/^\//, "").replace(/\.(?:md|mdx)$/, "");
|
|
1119
|
+
}
|
|
1120
|
+
function validateGitDocsWithLlms(llmsLinks, repoFiles) {
|
|
1121
|
+
if (llmsLinks.length === 0) return {
|
|
1122
|
+
isValid: true,
|
|
1123
|
+
matchRatio: 1
|
|
1124
|
+
};
|
|
1125
|
+
const sample = llmsLinks.slice(0, 10);
|
|
1126
|
+
const normalizedLinks = sample.map((link) => {
|
|
1127
|
+
let path = link.url;
|
|
1128
|
+
if (path.startsWith("http")) try {
|
|
1129
|
+
path = new URL(path).pathname;
|
|
1130
|
+
} catch {}
|
|
1131
|
+
return normalizePath(path);
|
|
1132
|
+
});
|
|
1133
|
+
const repoNormalized = new Set(repoFiles.map(normalizePath));
|
|
1134
|
+
let matches = 0;
|
|
1135
|
+
for (const linkPath of normalizedLinks) for (const repoPath of repoNormalized) if (repoPath === linkPath || repoPath.endsWith(`/${linkPath}`)) {
|
|
1136
|
+
matches++;
|
|
1137
|
+
break;
|
|
1274
1138
|
}
|
|
1275
|
-
|
|
1139
|
+
const matchRatio = matches / sample.length;
|
|
1140
|
+
return {
|
|
1141
|
+
isValid: matchRatio >= .3,
|
|
1142
|
+
matchRatio
|
|
1143
|
+
};
|
|
1276
1144
|
}
|
|
1277
|
-
async function
|
|
1278
|
-
const
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
const skill = readLocalSkill(dir, skillPath);
|
|
1288
|
-
return skill ? [skill] : [];
|
|
1289
|
-
}
|
|
1290
|
-
onProgress?.(`Downloading ${owner}/${repo}/skills@${ref}`);
|
|
1145
|
+
async function verifyNpmRepo(owner, repo, packageName) {
|
|
1146
|
+
const base = `https://raw.githubusercontent.com/${owner}/${repo}/HEAD`;
|
|
1147
|
+
const paths = [
|
|
1148
|
+
"package.json",
|
|
1149
|
+
`packages/${packageName.replace(/^@.*\//, "")}/package.json`,
|
|
1150
|
+
`packages/${packageName.replace(/^@/, "").replace("/", "-")}/package.json`
|
|
1151
|
+
];
|
|
1152
|
+
for (const path of paths) {
|
|
1153
|
+
const text = await fetchGitHubRaw(`${base}/${path}`);
|
|
1154
|
+
if (!text) continue;
|
|
1291
1155
|
try {
|
|
1292
|
-
|
|
1293
|
-
dir: tempDir,
|
|
1294
|
-
force: true,
|
|
1295
|
-
auth: getGitHubToken() || void 0
|
|
1296
|
-
});
|
|
1297
|
-
const skills = [];
|
|
1298
|
-
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
1299
|
-
if (!entry.isDirectory()) continue;
|
|
1300
|
-
const skill = readLocalSkill(resolve(dir, entry.name), `skills/${entry.name}`);
|
|
1301
|
-
if (skill) skills.push(skill);
|
|
1302
|
-
}
|
|
1303
|
-
if (skills.length > 0) {
|
|
1304
|
-
onProgress?.(`Found ${skills.length} skill(s)`);
|
|
1305
|
-
return skills;
|
|
1306
|
-
}
|
|
1156
|
+
if (JSON.parse(text).name === packageName) return true;
|
|
1307
1157
|
} catch {}
|
|
1308
|
-
const content = await fetchGitHubRaw(`https://raw.githubusercontent.com/${owner}/${repo}/${ref}/SKILL.md`);
|
|
1309
|
-
if (content) {
|
|
1310
|
-
const fm = parseSkillFrontmatterName(content);
|
|
1311
|
-
onProgress?.("Found 1 skill");
|
|
1312
|
-
return [{
|
|
1313
|
-
name: fm.name || repo,
|
|
1314
|
-
description: fm.description || "",
|
|
1315
|
-
path: "",
|
|
1316
|
-
content,
|
|
1317
|
-
files: []
|
|
1318
|
-
}];
|
|
1319
|
-
}
|
|
1320
|
-
return [];
|
|
1321
|
-
} catch {
|
|
1322
|
-
return [];
|
|
1323
|
-
} finally {
|
|
1324
|
-
rmSync(tempDir, {
|
|
1325
|
-
recursive: true,
|
|
1326
|
-
force: true
|
|
1327
|
-
});
|
|
1328
1158
|
}
|
|
1159
|
+
return false;
|
|
1329
1160
|
}
|
|
1330
|
-
async function
|
|
1331
|
-
const
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
const subdir = source.skillPath || "skills";
|
|
1337
|
-
onProgress?.(`Downloading ${owner}/${repo}/${subdir}@${ref}`);
|
|
1338
|
-
const { dir } = await downloadTemplate(`gitlab:${owner}/${repo}/${subdir}#${ref}`, {
|
|
1339
|
-
dir: tempDir,
|
|
1340
|
-
force: true
|
|
1341
|
-
});
|
|
1342
|
-
if (source.skillPath) {
|
|
1343
|
-
const skill = readLocalSkill(dir, source.skillPath);
|
|
1344
|
-
return { skills: skill ? [skill] : [] };
|
|
1345
|
-
}
|
|
1346
|
-
const skills = [];
|
|
1347
|
-
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
1348
|
-
if (!entry.isDirectory()) continue;
|
|
1349
|
-
const skill = readLocalSkill(resolve(dir, entry.name), `skills/${entry.name}`);
|
|
1350
|
-
if (skill) skills.push(skill);
|
|
1351
|
-
}
|
|
1352
|
-
if (skills.length > 0) {
|
|
1353
|
-
onProgress?.(`Found ${skills.length} skill(s)`);
|
|
1354
|
-
return { skills };
|
|
1355
|
-
}
|
|
1356
|
-
const content = await $fetch(`https://gitlab.com/${owner}/${repo}/-/raw/${ref}/SKILL.md`, { responseType: "text" }).catch(() => null);
|
|
1357
|
-
if (content) {
|
|
1358
|
-
const fm = parseSkillFrontmatterName(content);
|
|
1359
|
-
return { skills: [{
|
|
1360
|
-
name: fm.name || repo,
|
|
1361
|
-
description: fm.description || "",
|
|
1362
|
-
path: "",
|
|
1363
|
-
content,
|
|
1364
|
-
files: []
|
|
1365
|
-
}] };
|
|
1161
|
+
async function searchGitHubRepo(packageName) {
|
|
1162
|
+
const shortName = packageName.replace(/^@.*\//, "");
|
|
1163
|
+
for (const candidate of [packageName.replace(/^@/, "").replace("/", "/"), shortName]) {
|
|
1164
|
+
if (!candidate.includes("/")) {
|
|
1165
|
+
if ((await $fetch.raw(`https://ungh.cc/repos/${shortName}/${shortName}`).catch(() => null))?.ok) return `https://github.com/${shortName}/${shortName}`;
|
|
1166
|
+
continue;
|
|
1366
1167
|
}
|
|
1367
|
-
|
|
1368
|
-
}
|
|
1369
|
-
|
|
1370
|
-
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1168
|
+
if ((await $fetch.raw(`https://ungh.cc/repos/${candidate}`).catch(() => null))?.ok) return `https://github.com/${candidate}`;
|
|
1169
|
+
}
|
|
1170
|
+
const searchTerm = packageName.replace(/^@/, "");
|
|
1171
|
+
if (isGhAvailable()) try {
|
|
1172
|
+
const { stdout: json } = spawnSync("gh", [
|
|
1173
|
+
"search",
|
|
1174
|
+
"repos",
|
|
1175
|
+
searchTerm,
|
|
1176
|
+
"--json",
|
|
1177
|
+
"fullName",
|
|
1178
|
+
"--limit",
|
|
1179
|
+
"5"
|
|
1180
|
+
], {
|
|
1181
|
+
encoding: "utf-8",
|
|
1182
|
+
timeout: 15e3
|
|
1374
1183
|
});
|
|
1184
|
+
if (!json) throw new Error("no output");
|
|
1185
|
+
const repos = JSON.parse(json);
|
|
1186
|
+
const match = repos.find((r) => r.fullName.toLowerCase().endsWith(`/${packageName.toLowerCase()}`) || r.fullName.toLowerCase().endsWith(`/${shortName.toLowerCase()}`));
|
|
1187
|
+
if (match) return `https://github.com/${match.fullName}`;
|
|
1188
|
+
for (const candidate of repos) {
|
|
1189
|
+
const gh = parseGitHubUrl(`https://github.com/${candidate.fullName}`);
|
|
1190
|
+
if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName)) return `https://github.com/${candidate.fullName}`;
|
|
1191
|
+
}
|
|
1192
|
+
} catch {}
|
|
1193
|
+
const data = await $fetch(`https://api.github.com/search/repositories?q=${encodeURIComponent(`${searchTerm} in:name`)}&per_page=5`).catch(() => null);
|
|
1194
|
+
if (!data?.items?.length) return null;
|
|
1195
|
+
const match = data.items.find((r) => r.full_name.toLowerCase().endsWith(`/${packageName.toLowerCase()}`) || r.full_name.toLowerCase().endsWith(`/${shortName.toLowerCase()}`));
|
|
1196
|
+
if (match) return `https://github.com/${match.full_name}`;
|
|
1197
|
+
for (const candidate of data.items) {
|
|
1198
|
+
const gh = parseGitHubUrl(`https://github.com/${candidate.full_name}`);
|
|
1199
|
+
if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName)) return `https://github.com/${candidate.full_name}`;
|
|
1375
1200
|
}
|
|
1376
|
-
}
|
|
1377
|
-
async function fetchLlmsUrl(docsUrl) {
|
|
1378
|
-
const llmsUrl = `${new URL(docsUrl).origin}/llms.txt`;
|
|
1379
|
-
if (await verifyUrl(llmsUrl)) return llmsUrl;
|
|
1380
1201
|
return null;
|
|
1381
1202
|
}
|
|
1382
|
-
async function
|
|
1383
|
-
const
|
|
1384
|
-
if (
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
links: parseMarkdownLinks(content)
|
|
1388
|
-
};
|
|
1389
|
-
}
|
|
1390
|
-
function parseMarkdownLinks(content) {
|
|
1391
|
-
return extractLinks(content).filter((l) => l.url.endsWith(".md"));
|
|
1392
|
-
}
|
|
1393
|
-
function isSafeUrl(url) {
|
|
1394
|
-
try {
|
|
1395
|
-
const parsed = new URL(url);
|
|
1396
|
-
if (parsed.protocol !== "https:") return false;
|
|
1397
|
-
const host = parsed.hostname;
|
|
1398
|
-
if (host === "localhost" || host === "0.0.0.0" || host === "[::1]") return false;
|
|
1399
|
-
if (/^(?:127\.|10\.|172\.(?:1[6-9]|2\d|3[01])\.|192\.168\.|169\.254\.)/.test(host)) return false;
|
|
1400
|
-
if (/^\[(?:f[cd]|fe[89ab]|::ffff:)/i.test(host)) return false;
|
|
1401
|
-
return true;
|
|
1402
|
-
} catch {
|
|
1403
|
-
return false;
|
|
1404
|
-
}
|
|
1405
|
-
}
|
|
1406
|
-
async function downloadLlmsDocs(llmsContent, baseUrl, onProgress) {
|
|
1407
|
-
const limit = pLimit(5);
|
|
1408
|
-
let completed = 0;
|
|
1409
|
-
return (await Promise.all(llmsContent.links.map((link) => limit(async () => {
|
|
1410
|
-
const url = link.url.startsWith("http") ? link.url : `${baseUrl.replace(/\/$/, "")}${link.url.startsWith("/") ? "" : "/"}${link.url}`;
|
|
1411
|
-
if (!isSafeUrl(url)) return null;
|
|
1412
|
-
const content = await fetchText(url);
|
|
1413
|
-
onProgress?.(link.url, ++completed, llmsContent.links.length);
|
|
1414
|
-
if (content && content.length > 100) return {
|
|
1415
|
-
url: link.url.startsWith("http") ? new URL(link.url).pathname : link.url,
|
|
1416
|
-
title: link.title,
|
|
1417
|
-
content
|
|
1418
|
-
};
|
|
1419
|
-
return null;
|
|
1420
|
-
})))).filter((d) => d !== null);
|
|
1421
|
-
}
|
|
1422
|
-
function normalizeLlmsLinks(content, baseUrl) {
|
|
1423
|
-
let normalized = content;
|
|
1424
|
-
if (baseUrl) {
|
|
1425
|
-
const escaped = baseUrl.replace(/\/$/, "").replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
1426
|
-
normalized = normalized.replace(new RegExp(`\\]\\(${escaped}(/[^)]+\\.md)\\)`, "g"), "](./docs$1)");
|
|
1427
|
-
}
|
|
1428
|
-
normalized = normalized.replace(/\]\(\/([^)]+\.md)\)/g, "](./docs/$1)");
|
|
1429
|
-
return normalized;
|
|
1430
|
-
}
|
|
1431
|
-
function extractSections(content, patterns) {
|
|
1432
|
-
const sections = [];
|
|
1433
|
-
const parts = content.split(/\n---\n/);
|
|
1434
|
-
for (const part of parts) {
|
|
1435
|
-
const urlMatch = part.match(/^url: *(\S.*)$/m);
|
|
1436
|
-
if (!urlMatch) continue;
|
|
1437
|
-
const url = urlMatch[1];
|
|
1438
|
-
if (patterns.some((p) => url.includes(p))) {
|
|
1439
|
-
const contentStart = part.indexOf("\n", part.indexOf("url:"));
|
|
1440
|
-
if (contentStart > -1) sections.push(part.slice(contentStart + 1));
|
|
1441
|
-
}
|
|
1442
|
-
}
|
|
1443
|
-
if (sections.length === 0) return null;
|
|
1444
|
-
return sections.join("\n\n---\n\n");
|
|
1203
|
+
async function fetchGitHubRepoMeta(owner, repo, packageName) {
|
|
1204
|
+
const override = packageName ? getDocOverride(packageName) : void 0;
|
|
1205
|
+
if (override?.homepage) return { homepage: override.homepage };
|
|
1206
|
+
const data = await ghApi(`repos/${owner}/${repo}`) ?? await $fetch(`https://api.github.com/repos/${owner}/${repo}`).catch(() => null);
|
|
1207
|
+
return data?.homepage ? { homepage: data.homepage } : null;
|
|
1445
1208
|
}
|
|
1446
|
-
|
|
1447
|
-
const
|
|
1448
|
-
async function listFilesAtRef(owner, repo, ref) {
|
|
1209
|
+
async function fetchReadme(owner, repo, subdir, ref) {
|
|
1210
|
+
const branch = ref || "main";
|
|
1449
1211
|
if (!isKnownPrivateRepo(owner, repo)) {
|
|
1450
|
-
const
|
|
1451
|
-
if (
|
|
1212
|
+
const unghUrl = subdir ? `https://ungh.cc/repos/${owner}/${repo}/files/${branch}/${subdir}/README.md` : `https://ungh.cc/repos/${owner}/${repo}/readme${ref ? `?ref=${ref}` : ""}`;
|
|
1213
|
+
if ((await $fetch.raw(unghUrl).catch(() => null))?.ok) return `ungh://${owner}/${repo}${subdir ? `/${subdir}` : ""}${ref ? `@${ref}` : ""}`;
|
|
1452
1214
|
}
|
|
1453
|
-
const
|
|
1454
|
-
|
|
1215
|
+
const basePath = subdir ? `${subdir}/` : "";
|
|
1216
|
+
const branches = ref ? [ref] : ["main", "master"];
|
|
1217
|
+
const token = isKnownPrivateRepo(owner, repo) ? getGitHubToken() : null;
|
|
1218
|
+
const authHeaders = token ? { Authorization: `token ${token}` } : {};
|
|
1219
|
+
for (const b of branches) for (const filename of [
|
|
1220
|
+
"README.md",
|
|
1221
|
+
"Readme.md",
|
|
1222
|
+
"readme.md"
|
|
1223
|
+
]) {
|
|
1224
|
+
const readmeUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${b}/${basePath}${filename}`;
|
|
1225
|
+
if ((await $fetch.raw(readmeUrl, { headers: authHeaders }).catch(() => null))?.ok) return readmeUrl;
|
|
1226
|
+
}
|
|
1227
|
+
const refParam = ref ? `?ref=${ref}` : "";
|
|
1228
|
+
const apiData = await ghApi(subdir ? `repos/${owner}/${repo}/contents/${subdir}/README.md${refParam}` : `repos/${owner}/${repo}/readme${refParam}`);
|
|
1229
|
+
if (apiData?.download_url) {
|
|
1455
1230
|
markRepoPrivate(owner, repo);
|
|
1456
|
-
return
|
|
1231
|
+
return apiData.download_url;
|
|
1457
1232
|
}
|
|
1458
|
-
return
|
|
1233
|
+
return null;
|
|
1459
1234
|
}
|
|
1460
|
-
async function
|
|
1461
|
-
|
|
1462
|
-
|
|
1463
|
-
|
|
1464
|
-
|
|
1465
|
-
if (files.length > 0) return {
|
|
1466
|
-
ref: tag,
|
|
1467
|
-
files
|
|
1468
|
-
};
|
|
1235
|
+
async function fetchReadmeContent(url) {
|
|
1236
|
+
if (url.startsWith("file://")) {
|
|
1237
|
+
const filePath = fileURLToPath(url);
|
|
1238
|
+
if (!existsSync(filePath)) return null;
|
|
1239
|
+
return readFileSync(filePath, "utf-8");
|
|
1469
1240
|
}
|
|
1470
|
-
if (
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1241
|
+
if (url.startsWith("ungh://")) {
|
|
1242
|
+
let path = url.replace("ungh://", "");
|
|
1243
|
+
let ref = "main";
|
|
1244
|
+
const atIdx = path.lastIndexOf("@");
|
|
1245
|
+
if (atIdx !== -1) {
|
|
1246
|
+
ref = path.slice(atIdx + 1);
|
|
1247
|
+
path = path.slice(0, atIdx);
|
|
1248
|
+
}
|
|
1249
|
+
const parts = path.split("/");
|
|
1250
|
+
const owner = parts[0];
|
|
1251
|
+
const repo = parts[1];
|
|
1252
|
+
const subdir = parts.slice(2).join("/");
|
|
1253
|
+
const text = await $fetch(subdir ? `https://ungh.cc/repos/${owner}/${repo}/files/${ref}/${subdir}/README.md` : `https://ungh.cc/repos/${owner}/${repo}/readme?ref=${ref}`, { responseType: "text" }).catch(() => null);
|
|
1254
|
+
if (!text) return null;
|
|
1255
|
+
try {
|
|
1256
|
+
const json = JSON.parse(text);
|
|
1257
|
+
return json.markdown || json.file?.contents || null;
|
|
1258
|
+
} catch {
|
|
1259
|
+
return text;
|
|
1478
1260
|
}
|
|
1479
1261
|
}
|
|
1480
|
-
|
|
1481
|
-
|
|
1482
|
-
|
|
1483
|
-
|
|
1484
|
-
|
|
1485
|
-
|
|
1486
|
-
|
|
1262
|
+
if (url.includes("raw.githubusercontent.com")) return fetchGitHubRaw(url);
|
|
1263
|
+
return fetchText(url);
|
|
1264
|
+
}
|
|
1265
|
+
async function resolveGitHubRepo(owner, repo, onProgress) {
|
|
1266
|
+
onProgress?.("Fetching repo metadata");
|
|
1267
|
+
const repoUrl = `https://github.com/${owner}/${repo}`;
|
|
1268
|
+
const meta = await ghApi(`repos/${owner}/${repo}`) ?? await $fetch(`https://api.github.com/repos/${owner}/${repo}`).catch(() => null);
|
|
1269
|
+
const homepage = meta?.homepage || void 0;
|
|
1270
|
+
const description = meta?.description || void 0;
|
|
1271
|
+
onProgress?.("Fetching latest release");
|
|
1272
|
+
const releases = await fetchUnghReleases(owner, repo);
|
|
1273
|
+
let version = "main";
|
|
1274
|
+
let releasedAt;
|
|
1275
|
+
const latestRelease = releases[0];
|
|
1276
|
+
if (latestRelease) {
|
|
1277
|
+
version = latestRelease.tag.replace(/^v/, "");
|
|
1278
|
+
releasedAt = latestRelease.publishedAt;
|
|
1279
|
+
}
|
|
1280
|
+
onProgress?.("Resolving docs");
|
|
1281
|
+
const gitDocs = await fetchGitDocs(owner, repo, version);
|
|
1282
|
+
const gitDocsUrl = gitDocs ? `${repoUrl}/tree/${gitDocs.ref}/docs` : void 0;
|
|
1283
|
+
const gitRef = gitDocs?.ref;
|
|
1284
|
+
onProgress?.("Fetching README");
|
|
1285
|
+
const readmeUrl = await fetchReadme(owner, repo);
|
|
1286
|
+
let llmsUrl;
|
|
1287
|
+
if (homepage) {
|
|
1288
|
+
onProgress?.("Checking llms.txt");
|
|
1289
|
+
llmsUrl = await fetchLlmsUrl(homepage).catch(() => null) ?? void 0;
|
|
1290
|
+
}
|
|
1291
|
+
if (!gitDocsUrl && !readmeUrl && !llmsUrl) return null;
|
|
1292
|
+
return {
|
|
1293
|
+
name: repo,
|
|
1294
|
+
version: latestRelease ? version : void 0,
|
|
1295
|
+
releasedAt,
|
|
1296
|
+
description,
|
|
1297
|
+
repoUrl,
|
|
1298
|
+
docsUrl: homepage,
|
|
1299
|
+
gitDocsUrl,
|
|
1300
|
+
gitRef,
|
|
1301
|
+
gitDocsFallback: gitDocs?.fallback,
|
|
1302
|
+
readmeUrl: readmeUrl ?? void 0,
|
|
1303
|
+
llmsUrl
|
|
1304
|
+
};
|
|
1305
|
+
}
|
|
1306
|
+
const VALID_CRATE_NAME = /^[a-z0-9][\w-]*$/;
|
|
1307
|
+
const runCratesApiRateLimited = createRateLimitedRunner(1e3);
|
|
1308
|
+
function selectCrateVersion(data, requestedVersion) {
|
|
1309
|
+
const versions = data.versions || [];
|
|
1310
|
+
if (requestedVersion) {
|
|
1311
|
+
const exact = versions.find((v) => v.num === requestedVersion && !v.yanked);
|
|
1312
|
+
if (exact?.num) return {
|
|
1313
|
+
version: exact.num,
|
|
1314
|
+
entry: exact
|
|
1487
1315
|
};
|
|
1488
1316
|
}
|
|
1317
|
+
const crate = data.crate;
|
|
1318
|
+
const preferred = [
|
|
1319
|
+
crate?.max_stable_version,
|
|
1320
|
+
crate?.newest_version,
|
|
1321
|
+
crate?.max_version,
|
|
1322
|
+
crate?.default_version
|
|
1323
|
+
].find(Boolean);
|
|
1324
|
+
if (preferred) {
|
|
1325
|
+
const match = versions.find((v) => v.num === preferred && !v.yanked);
|
|
1326
|
+
if (match?.num) return {
|
|
1327
|
+
version: preferred,
|
|
1328
|
+
entry: match
|
|
1329
|
+
};
|
|
1330
|
+
if (versions.length === 0) return { version: preferred };
|
|
1331
|
+
}
|
|
1332
|
+
const firstStable = versions.find((v) => !v.yanked && v.num);
|
|
1333
|
+
if (firstStable?.num) return {
|
|
1334
|
+
version: firstStable.num,
|
|
1335
|
+
entry: firstStable
|
|
1336
|
+
};
|
|
1489
1337
|
return null;
|
|
1490
1338
|
}
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1339
|
+
function pickPreferredUrl(...urls) {
|
|
1340
|
+
return urls.map((v) => v?.trim()).find((v) => !!v);
|
|
1341
|
+
}
|
|
1342
|
+
async function fetchCratesApi(url) {
|
|
1343
|
+
return runCratesApiRateLimited(() => $fetch(url).catch(() => null));
|
|
1344
|
+
}
|
|
1345
|
+
async function resolveCrateDocsWithAttempts(crateName, options = {}) {
|
|
1346
|
+
const attempts = [];
|
|
1347
|
+
const onProgress = options.onProgress;
|
|
1348
|
+
const normalizedName = crateName.trim().toLowerCase();
|
|
1349
|
+
if (!normalizedName || !VALID_CRATE_NAME.test(normalizedName)) {
|
|
1350
|
+
attempts.push({
|
|
1351
|
+
source: "crates",
|
|
1352
|
+
status: "error",
|
|
1353
|
+
message: `Invalid crate name: ${crateName}`
|
|
1354
|
+
});
|
|
1355
|
+
return {
|
|
1356
|
+
package: null,
|
|
1357
|
+
attempts
|
|
1358
|
+
};
|
|
1495
1359
|
}
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1360
|
+
onProgress?.("crates.io metadata");
|
|
1361
|
+
const apiUrl = `https://crates.io/api/v1/crates/${encodeURIComponent(normalizedName)}`;
|
|
1362
|
+
const data = await fetchCratesApi(apiUrl);
|
|
1363
|
+
if (!data?.crate) {
|
|
1364
|
+
attempts.push({
|
|
1365
|
+
source: "crates",
|
|
1366
|
+
url: apiUrl,
|
|
1367
|
+
status: "not-found",
|
|
1368
|
+
message: "Crate not found on crates.io"
|
|
1369
|
+
});
|
|
1370
|
+
return {
|
|
1371
|
+
package: null,
|
|
1372
|
+
attempts
|
|
1373
|
+
};
|
|
1503
1374
|
}
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1375
|
+
attempts.push({
|
|
1376
|
+
source: "crates",
|
|
1377
|
+
url: apiUrl,
|
|
1378
|
+
status: "success",
|
|
1379
|
+
message: `Found crate: ${data.crate.name || normalizedName}`
|
|
1380
|
+
});
|
|
1381
|
+
const selected = selectCrateVersion(data, options.version);
|
|
1382
|
+
if (!selected) {
|
|
1383
|
+
attempts.push({
|
|
1384
|
+
source: "crates",
|
|
1385
|
+
url: apiUrl,
|
|
1386
|
+
status: "error",
|
|
1387
|
+
message: "No usable crate versions found"
|
|
1388
|
+
});
|
|
1389
|
+
return {
|
|
1390
|
+
package: null,
|
|
1391
|
+
attempts
|
|
1392
|
+
};
|
|
1393
|
+
}
|
|
1394
|
+
const version = selected.version;
|
|
1395
|
+
const versionEntry = selected.entry;
|
|
1396
|
+
const docsRsUrl = `https://docs.rs/${encodeURIComponent(normalizedName)}/${encodeURIComponent(version)}`;
|
|
1397
|
+
const repositoryRaw = pickPreferredUrl(versionEntry?.repository, data.crate.repository);
|
|
1398
|
+
const homepage = pickPreferredUrl(versionEntry?.homepage, data.crate.homepage);
|
|
1399
|
+
const documentation = pickPreferredUrl(versionEntry?.documentation, data.crate.documentation);
|
|
1400
|
+
const normalizedRepo = repositoryRaw ? normalizeRepoUrl(repositoryRaw) : void 0;
|
|
1401
|
+
const repoUrl = normalizedRepo && isLikelyCodeHostUrl(normalizedRepo) ? normalizedRepo : isLikelyCodeHostUrl(homepage) ? homepage : void 0;
|
|
1402
|
+
let resolved = {
|
|
1403
|
+
name: normalizedName,
|
|
1404
|
+
version,
|
|
1405
|
+
releasedAt: versionEntry?.created_at || data.crate.updated_at || void 0,
|
|
1406
|
+
description: versionEntry?.description || data.crate.description,
|
|
1407
|
+
docsUrl: (() => {
|
|
1408
|
+
if (documentation && !isUselessDocsUrl(documentation) && !isLikelyCodeHostUrl(documentation)) return documentation;
|
|
1409
|
+
if (homepage && !isUselessDocsUrl(homepage) && !isLikelyCodeHostUrl(homepage)) return homepage;
|
|
1410
|
+
return docsRsUrl;
|
|
1411
|
+
})(),
|
|
1412
|
+
repoUrl
|
|
1413
|
+
};
|
|
1414
|
+
const gh = repoUrl ? parseGitHubUrl(repoUrl) : null;
|
|
1415
|
+
if (gh) {
|
|
1416
|
+
onProgress?.("GitHub enrichment");
|
|
1417
|
+
const ghResolved = await resolveGitHubRepo(gh.owner, gh.repo);
|
|
1418
|
+
if (ghResolved) {
|
|
1419
|
+
attempts.push({
|
|
1420
|
+
source: "github-meta",
|
|
1421
|
+
url: repoUrl,
|
|
1422
|
+
status: "success",
|
|
1423
|
+
message: "Enriched via GitHub repo metadata"
|
|
1424
|
+
});
|
|
1425
|
+
resolved = {
|
|
1426
|
+
...ghResolved,
|
|
1427
|
+
name: normalizedName,
|
|
1428
|
+
version,
|
|
1429
|
+
releasedAt: resolved.releasedAt || ghResolved.releasedAt,
|
|
1430
|
+
description: resolved.description || ghResolved.description,
|
|
1431
|
+
docsUrl: resolved.docsUrl || ghResolved.docsUrl,
|
|
1432
|
+
repoUrl,
|
|
1433
|
+
readmeUrl: ghResolved.readmeUrl || resolved.readmeUrl
|
|
1434
|
+
};
|
|
1435
|
+
} else attempts.push({
|
|
1436
|
+
source: "github-meta",
|
|
1437
|
+
url: repoUrl,
|
|
1438
|
+
status: "not-found",
|
|
1439
|
+
message: "GitHub enrichment failed, using crates.io metadata"
|
|
1440
|
+
});
|
|
1441
|
+
}
|
|
1442
|
+
if (!resolved.llmsUrl && resolved.docsUrl) {
|
|
1443
|
+
onProgress?.("llms.txt discovery");
|
|
1444
|
+
resolved.llmsUrl = await fetchLlmsUrl(resolved.docsUrl).catch(() => null) ?? void 0;
|
|
1445
|
+
if (resolved.llmsUrl) attempts.push({
|
|
1446
|
+
source: "llms.txt",
|
|
1447
|
+
url: resolved.llmsUrl,
|
|
1448
|
+
status: "success"
|
|
1449
|
+
});
|
|
1450
|
+
}
|
|
1451
|
+
return {
|
|
1452
|
+
package: resolved,
|
|
1453
|
+
attempts
|
|
1454
|
+
};
|
|
1509
1455
|
}
|
|
1510
|
-
function
|
|
1511
|
-
|
|
1456
|
+
async function fetchCrawledDocs(url, onProgress, maxPages = 200) {
|
|
1457
|
+
const outputDir = join(tmpdir(), "skilld-crawl", Date.now().toString());
|
|
1458
|
+
onProgress?.(`Crawling ${url}`);
|
|
1459
|
+
const userLang = getUserLang();
|
|
1460
|
+
const foreignUrls = /* @__PURE__ */ new Set();
|
|
1461
|
+
const doCrawl = () => crawlAndGenerate({
|
|
1462
|
+
urls: [url],
|
|
1463
|
+
outputDir,
|
|
1464
|
+
driver: "http",
|
|
1465
|
+
generateLlmsTxt: false,
|
|
1466
|
+
generateIndividualMd: true,
|
|
1467
|
+
maxRequestsPerCrawl: maxPages,
|
|
1468
|
+
onPage: (page) => {
|
|
1469
|
+
const lang = extractHtmlLang(page.html);
|
|
1470
|
+
if (lang && !lang.startsWith("en") && !lang.startsWith(userLang)) foreignUrls.add(page.url);
|
|
1471
|
+
}
|
|
1472
|
+
}, (progress) => {
|
|
1473
|
+
if (progress.crawling.status === "processing" && progress.crawling.total > 0) onProgress?.(`Crawling ${progress.crawling.processed}/${progress.crawling.total} pages`);
|
|
1474
|
+
});
|
|
1475
|
+
let results = await doCrawl().catch((err) => {
|
|
1476
|
+
onProgress?.(`Crawl failed: ${err?.message || err}`);
|
|
1477
|
+
return [];
|
|
1478
|
+
});
|
|
1479
|
+
if (results.length === 0) {
|
|
1480
|
+
onProgress?.("Retrying crawl");
|
|
1481
|
+
results = await doCrawl().catch(() => []);
|
|
1482
|
+
}
|
|
1483
|
+
rmSync(outputDir, {
|
|
1484
|
+
recursive: true,
|
|
1485
|
+
force: true
|
|
1486
|
+
});
|
|
1487
|
+
const docs = [];
|
|
1488
|
+
let localeFiltered = 0;
|
|
1489
|
+
for (const result of results) {
|
|
1490
|
+
if (!result.success || !result.content) continue;
|
|
1491
|
+
if (foreignUrls.has(result.url)) {
|
|
1492
|
+
localeFiltered++;
|
|
1493
|
+
continue;
|
|
1494
|
+
}
|
|
1495
|
+
const segments = (new URL(result.url).pathname.replace(/\/$/, "") || "/index").split("/").filter(Boolean);
|
|
1496
|
+
if (isForeignPathPrefix(segments[0], userLang)) {
|
|
1497
|
+
localeFiltered++;
|
|
1498
|
+
continue;
|
|
1499
|
+
}
|
|
1500
|
+
const path = `docs/${segments.join("/")}.md`;
|
|
1501
|
+
docs.push({
|
|
1502
|
+
path,
|
|
1503
|
+
content: result.content
|
|
1504
|
+
});
|
|
1505
|
+
}
|
|
1506
|
+
if (localeFiltered > 0) onProgress?.(`Filtered ${localeFiltered} foreign locale pages`);
|
|
1507
|
+
onProgress?.(`Crawled ${docs.length} pages`);
|
|
1508
|
+
return docs;
|
|
1512
1509
|
}
|
|
1513
|
-
const
|
|
1514
|
-
|
|
1515
|
-
|
|
1516
|
-
"solid",
|
|
1517
|
-
"angular",
|
|
1518
|
-
"svelte",
|
|
1519
|
-
"preact",
|
|
1520
|
-
"lit",
|
|
1521
|
-
"qwik"
|
|
1522
|
-
]);
|
|
1523
|
-
function filterFrameworkDocs(files, packageName) {
|
|
1524
|
-
if (!packageName) return files;
|
|
1525
|
-
const shortName = packageName.replace(/^@.*\//, "");
|
|
1526
|
-
const targetFramework = [...FRAMEWORK_NAMES].find((fw) => shortName.includes(fw));
|
|
1527
|
-
if (!targetFramework) return files;
|
|
1528
|
-
const otherFrameworks = [...FRAMEWORK_NAMES].filter((fw) => fw !== targetFramework);
|
|
1529
|
-
const excludePattern = new RegExp(`\\b(?:${otherFrameworks.join("|")})\\b`);
|
|
1530
|
-
return files.filter((f) => !excludePattern.test(f));
|
|
1510
|
+
const HTML_LANG_RE = /<html[^>]*\slang=["']([^"']+)["']/i;
|
|
1511
|
+
function extractHtmlLang(html) {
|
|
1512
|
+
return HTML_LANG_RE.exec(html)?.[1]?.toLowerCase();
|
|
1531
1513
|
}
|
|
1532
|
-
const
|
|
1533
|
-
|
|
1534
|
-
|
|
1535
|
-
|
|
1536
|
-
|
|
1537
|
-
|
|
1538
|
-
|
|
1539
|
-
"
|
|
1540
|
-
"
|
|
1541
|
-
"
|
|
1542
|
-
"
|
|
1543
|
-
"
|
|
1544
|
-
"
|
|
1545
|
-
"
|
|
1546
|
-
"
|
|
1547
|
-
"
|
|
1548
|
-
"
|
|
1549
|
-
"
|
|
1550
|
-
"
|
|
1551
|
-
"
|
|
1552
|
-
"
|
|
1553
|
-
"mocks",
|
|
1554
|
-
"__mocks__"
|
|
1555
|
-
]);
|
|
1556
|
-
const DOC_DIR_BONUS = new Set([
|
|
1557
|
-
"docs",
|
|
1558
|
-
"documentation",
|
|
1559
|
-
"pages",
|
|
1560
|
-
"content",
|
|
1561
|
-
"website",
|
|
1562
|
-
"guide",
|
|
1563
|
-
"guides",
|
|
1564
|
-
"wiki",
|
|
1565
|
-
"manual",
|
|
1566
|
-
"api"
|
|
1514
|
+
const LOCALE_CODES = new Set([
|
|
1515
|
+
"ar",
|
|
1516
|
+
"de",
|
|
1517
|
+
"es",
|
|
1518
|
+
"fr",
|
|
1519
|
+
"id",
|
|
1520
|
+
"it",
|
|
1521
|
+
"ja",
|
|
1522
|
+
"ko",
|
|
1523
|
+
"nl",
|
|
1524
|
+
"pl",
|
|
1525
|
+
"pt",
|
|
1526
|
+
"pt-br",
|
|
1527
|
+
"ru",
|
|
1528
|
+
"th",
|
|
1529
|
+
"tr",
|
|
1530
|
+
"uk",
|
|
1531
|
+
"vi",
|
|
1532
|
+
"zh",
|
|
1533
|
+
"zh-cn",
|
|
1534
|
+
"zh-tw"
|
|
1567
1535
|
]);
|
|
1568
|
-
function
|
|
1569
|
-
|
|
1536
|
+
function isForeignPathPrefix(segment, userLang) {
|
|
1537
|
+
if (!segment) return false;
|
|
1538
|
+
const lower = segment.toLowerCase();
|
|
1539
|
+
if (lower === "en" || lower.startsWith(userLang)) return false;
|
|
1540
|
+
return LOCALE_CODES.has(lower);
|
|
1570
1541
|
}
|
|
1571
|
-
function
|
|
1572
|
-
|
|
1542
|
+
function getUserLang() {
|
|
1543
|
+
const code = (process.env.LC_ALL || process.env.LANG || process.env.LANGUAGE || "").split(/[_.:-]/)[0]?.toLowerCase() || "";
|
|
1544
|
+
return code.length >= 2 ? code.slice(0, 2) : "en";
|
|
1573
1545
|
}
|
|
1574
|
-
function
|
|
1575
|
-
return
|
|
1546
|
+
function toCrawlPattern(docsUrl) {
|
|
1547
|
+
return `${docsUrl.replace(/\/+$/, "")}/**`;
|
|
1576
1548
|
}
|
|
1577
|
-
|
|
1578
|
-
|
|
1579
|
-
|
|
1549
|
+
const HIGH_VALUE_CATEGORIES = new Set([
|
|
1550
|
+
"q&a",
|
|
1551
|
+
"help",
|
|
1552
|
+
"troubleshooting",
|
|
1553
|
+
"support"
|
|
1554
|
+
]);
|
|
1555
|
+
const LOW_VALUE_CATEGORIES = new Set([
|
|
1556
|
+
"show and tell",
|
|
1557
|
+
"ideas",
|
|
1558
|
+
"polls"
|
|
1559
|
+
]);
|
|
1560
|
+
const TITLE_NOISE_RE = /looking .*(?:developer|engineer|freelanc)|hiring|job post|guide me to (?:complete|finish|build)|help me (?:complete|finish|build)|seeking .* tutorial|recommend.* course/i;
|
|
1561
|
+
const MIN_DISCUSSION_SCORE = 3;
|
|
1562
|
+
function scoreComment(c) {
|
|
1563
|
+
return (c.isMaintainer ? 3 : 1) * (hasCodeBlock(c.body) ? 2 : 1) * (1 + c.reactions);
|
|
1580
1564
|
}
|
|
1581
|
-
function
|
|
1582
|
-
|
|
1583
|
-
|
|
1584
|
-
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
|
|
1589
|
-
|
|
1565
|
+
function scoreDiscussion(d) {
|
|
1566
|
+
if (TITLE_NOISE_RE.test(d.title)) return -1;
|
|
1567
|
+
let score = 0;
|
|
1568
|
+
if (d.isMaintainer) score += 3;
|
|
1569
|
+
if (hasCodeBlock([
|
|
1570
|
+
d.body,
|
|
1571
|
+
d.answer || "",
|
|
1572
|
+
...d.topComments.map((c) => c.body)
|
|
1573
|
+
].join("\n"))) score += 3;
|
|
1574
|
+
score += Math.min(d.upvoteCount, 5);
|
|
1575
|
+
if (d.answer) {
|
|
1576
|
+
score += 2;
|
|
1577
|
+
if (d.answer.length > 100) score += 1;
|
|
1590
1578
|
}
|
|
1591
|
-
|
|
1592
|
-
|
|
1593
|
-
|
|
1594
|
-
|
|
1595
|
-
|
|
1579
|
+
if (d.topComments.some((c) => c.isMaintainer)) score += 2;
|
|
1580
|
+
if (d.topComments.some((c) => c.reactions > 0)) score += 1;
|
|
1581
|
+
return score;
|
|
1582
|
+
}
|
|
1583
|
+
async function fetchGitHubDiscussions(owner, repo, limit = 20, releasedAt, fromDate) {
|
|
1584
|
+
if (!isGhAvailable()) return [];
|
|
1585
|
+
if (!fromDate && releasedAt) {
|
|
1586
|
+
const cutoff = new Date(releasedAt);
|
|
1587
|
+
cutoff.setMonth(cutoff.getMonth() + 6);
|
|
1588
|
+
if (cutoff < /* @__PURE__ */ new Date()) return [];
|
|
1596
1589
|
}
|
|
1597
|
-
|
|
1598
|
-
const
|
|
1599
|
-
|
|
1600
|
-
|
|
1601
|
-
|
|
1602
|
-
|
|
1590
|
+
try {
|
|
1591
|
+
const { stdout: result } = spawnSync("gh", [
|
|
1592
|
+
"api",
|
|
1593
|
+
"graphql",
|
|
1594
|
+
"-f",
|
|
1595
|
+
`query=${`query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { discussions(first: ${Math.min(limit * 3, 80)}, orderBy: {field: CREATED_AT, direction: DESC}) { nodes { number title body category { name } createdAt url upvoteCount comments(first: 10) { totalCount nodes { body author { login } authorAssociation reactions { totalCount } } } answer { body author { login } authorAssociation } author { login } authorAssociation } } } }`}`,
|
|
1596
|
+
"-f",
|
|
1597
|
+
`owner=${owner}`,
|
|
1598
|
+
"-f",
|
|
1599
|
+
`repo=${repo}`
|
|
1600
|
+
], {
|
|
1601
|
+
encoding: "utf-8",
|
|
1602
|
+
maxBuffer: 10 * 1024 * 1024
|
|
1603
|
+
});
|
|
1604
|
+
if (!result) return [];
|
|
1605
|
+
const nodes = JSON.parse(result)?.data?.repository?.discussions?.nodes;
|
|
1606
|
+
if (!Array.isArray(nodes)) return [];
|
|
1607
|
+
const fromTs = fromDate ? new Date(fromDate).getTime() : null;
|
|
1608
|
+
return nodes.filter((d) => d.author && !BOT_USERS.has(d.author.login)).filter((d) => {
|
|
1609
|
+
const cat = (d.category?.name || "").toLowerCase();
|
|
1610
|
+
return !LOW_VALUE_CATEGORIES.has(cat);
|
|
1611
|
+
}).filter((d) => !fromTs || new Date(d.createdAt).getTime() >= fromTs).map((d) => {
|
|
1612
|
+
let answer;
|
|
1613
|
+
if (d.answer?.body) {
|
|
1614
|
+
const isMaintainer = [
|
|
1615
|
+
"OWNER",
|
|
1616
|
+
"MEMBER",
|
|
1617
|
+
"COLLABORATOR"
|
|
1618
|
+
].includes(d.answer.authorAssociation);
|
|
1619
|
+
const author = d.answer.author?.login;
|
|
1620
|
+
answer = `${isMaintainer && author ? `**@${author}** [maintainer]:\n\n` : ""}${d.answer.body}`;
|
|
1621
|
+
}
|
|
1622
|
+
const comments = (d.comments?.nodes || []).filter((c) => c.author && !BOT_USERS.has(c.author.login)).filter((c) => !COMMENT_NOISE_RE.test((c.body || "").trim())).map((c) => {
|
|
1623
|
+
const isMaintainer = [
|
|
1624
|
+
"OWNER",
|
|
1625
|
+
"MEMBER",
|
|
1626
|
+
"COLLABORATOR"
|
|
1627
|
+
].includes(c.authorAssociation);
|
|
1628
|
+
return {
|
|
1629
|
+
body: c.body || "",
|
|
1630
|
+
author: c.author.login,
|
|
1631
|
+
reactions: c.reactions?.totalCount || 0,
|
|
1632
|
+
isMaintainer
|
|
1633
|
+
};
|
|
1634
|
+
}).sort((a, b) => scoreComment(b) - scoreComment(a)).slice(0, 3);
|
|
1603
1635
|
return {
|
|
1604
|
-
|
|
1605
|
-
|
|
1636
|
+
number: d.number,
|
|
1637
|
+
title: d.title,
|
|
1638
|
+
body: d.body || "",
|
|
1639
|
+
category: d.category?.name || "",
|
|
1640
|
+
createdAt: d.createdAt,
|
|
1641
|
+
url: d.url,
|
|
1642
|
+
upvoteCount: d.upvoteCount || 0,
|
|
1643
|
+
comments: d.comments?.totalCount || 0,
|
|
1644
|
+
isMaintainer: [
|
|
1645
|
+
"OWNER",
|
|
1646
|
+
"MEMBER",
|
|
1647
|
+
"COLLABORATOR"
|
|
1648
|
+
].includes(d.authorAssociation),
|
|
1649
|
+
answer,
|
|
1650
|
+
topComments: comments
|
|
1606
1651
|
};
|
|
1607
|
-
}
|
|
1608
|
-
|
|
1609
|
-
|
|
1610
|
-
|
|
1611
|
-
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1652
|
+
}).map((d) => ({
|
|
1653
|
+
d,
|
|
1654
|
+
score: scoreDiscussion(d)
|
|
1655
|
+
})).filter(({ score }) => score >= MIN_DISCUSSION_SCORE).sort((a, b) => {
|
|
1656
|
+
const aHigh = HIGH_VALUE_CATEGORIES.has(a.d.category.toLowerCase()) ? 1 : 0;
|
|
1657
|
+
const bHigh = HIGH_VALUE_CATEGORIES.has(b.d.category.toLowerCase()) ? 1 : 0;
|
|
1658
|
+
if (aHigh !== bHigh) return bHigh - aHigh;
|
|
1659
|
+
return b.score - a.score;
|
|
1660
|
+
}).slice(0, limit).map(({ d }) => d);
|
|
1661
|
+
} catch {
|
|
1662
|
+
return [];
|
|
1615
1663
|
}
|
|
1616
|
-
if (dirGroups.size === 0) return null;
|
|
1617
|
-
const scored = Array.from(dirGroups.entries(), ([dir, files]) => ({
|
|
1618
|
-
dir,
|
|
1619
|
-
files,
|
|
1620
|
-
score: scoreDocDir(dir, files.length)
|
|
1621
|
-
})).filter((d) => d.files.length >= 5).sort((a, b) => b.score - a.score);
|
|
1622
|
-
if (scored.length === 0) return null;
|
|
1623
|
-
const best = scored[0];
|
|
1624
|
-
return {
|
|
1625
|
-
files: best.files,
|
|
1626
|
-
prefix: best.dir
|
|
1627
|
-
};
|
|
1628
|
-
}
|
|
1629
|
-
async function listDocsAtRef(owner, repo, ref, pathPrefix = "docs/") {
|
|
1630
|
-
return filterDocFiles(await listFilesAtRef(owner, repo, ref), pathPrefix);
|
|
1631
1664
|
}
|
|
1632
|
-
|
|
1633
|
-
const
|
|
1634
|
-
|
|
1635
|
-
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
|
|
1639
|
-
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
|
|
1646
|
-
|
|
1647
|
-
|
|
1648
|
-
|
|
1649
|
-
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
const
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
allFiles = tag.files;
|
|
1665
|
+
function formatDiscussionAsMarkdown(d) {
|
|
1666
|
+
const fm = buildFrontmatter({
|
|
1667
|
+
number: d.number,
|
|
1668
|
+
title: d.title,
|
|
1669
|
+
category: d.category,
|
|
1670
|
+
created: isoDate(d.createdAt),
|
|
1671
|
+
url: d.url,
|
|
1672
|
+
upvotes: d.upvoteCount,
|
|
1673
|
+
comments: d.comments,
|
|
1674
|
+
answered: !!d.answer
|
|
1675
|
+
});
|
|
1676
|
+
const bodyLimit = d.upvoteCount >= 5 ? 1500 : 800;
|
|
1677
|
+
const lines = [
|
|
1678
|
+
fm,
|
|
1679
|
+
"",
|
|
1680
|
+
`# ${d.title}`
|
|
1681
|
+
];
|
|
1682
|
+
if (d.body) lines.push("", truncateBody(d.body, bodyLimit));
|
|
1683
|
+
if (d.answer) lines.push("", "---", "", "## Accepted Answer", "", truncateBody(d.answer, 1e3));
|
|
1684
|
+
else if (d.topComments.length > 0) {
|
|
1685
|
+
lines.push("", "---", "", "## Top Comments");
|
|
1686
|
+
for (const c of d.topComments) {
|
|
1687
|
+
const reactions = c.reactions > 0 ? ` (+${c.reactions})` : "";
|
|
1688
|
+
const maintainer = c.isMaintainer ? " [maintainer]" : "";
|
|
1689
|
+
lines.push("", `**@${c.author}**${maintainer}${reactions}:`, "", truncateBody(c.body, 600));
|
|
1658
1690
|
}
|
|
1659
1691
|
}
|
|
1660
|
-
|
|
1661
|
-
if (docs.length === 0) return null;
|
|
1662
|
-
return {
|
|
1663
|
-
baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,
|
|
1664
|
-
ref: tag.ref,
|
|
1665
|
-
files: docs,
|
|
1666
|
-
docsPrefix,
|
|
1667
|
-
allFiles,
|
|
1668
|
-
fallback: tag.fallback
|
|
1669
|
-
};
|
|
1670
|
-
}
|
|
1671
|
-
function normalizePath(p) {
|
|
1672
|
-
return p.replace(/^\//, "").replace(/\.(?:md|mdx)$/, "");
|
|
1692
|
+
return lines.join("\n");
|
|
1673
1693
|
}
|
|
1674
|
-
function
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
|
|
1682
|
-
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1694
|
+
function generateDiscussionIndex(discussions) {
|
|
1695
|
+
const byCategory = /* @__PURE__ */ new Map();
|
|
1696
|
+
for (const d of discussions) mapInsert(byCategory, d.category || "Uncategorized", () => []).push(d);
|
|
1697
|
+
const answered = discussions.filter((d) => d.answer).length;
|
|
1698
|
+
const sections = [
|
|
1699
|
+
[
|
|
1700
|
+
"---",
|
|
1701
|
+
`total: ${discussions.length}`,
|
|
1702
|
+
`answered: ${answered}`,
|
|
1703
|
+
"---"
|
|
1704
|
+
].join("\n"),
|
|
1705
|
+
"",
|
|
1706
|
+
"# Discussions Index",
|
|
1707
|
+
""
|
|
1708
|
+
];
|
|
1709
|
+
const cats = [...byCategory.keys()].sort((a, b) => {
|
|
1710
|
+
return (HIGH_VALUE_CATEGORIES.has(a.toLowerCase()) ? 0 : 1) - (HIGH_VALUE_CATEGORIES.has(b.toLowerCase()) ? 0 : 1) || a.localeCompare(b);
|
|
1686
1711
|
});
|
|
1687
|
-
const
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1712
|
+
for (const cat of cats) {
|
|
1713
|
+
const group = byCategory.get(cat);
|
|
1714
|
+
sections.push(`## ${cat} (${group.length})`, "");
|
|
1715
|
+
for (const d of group) {
|
|
1716
|
+
const upvotes = d.upvoteCount > 0 ? ` (+${d.upvoteCount})` : "";
|
|
1717
|
+
const answered = d.answer ? " [answered]" : "";
|
|
1718
|
+
const date = isoDate(d.createdAt);
|
|
1719
|
+
sections.push(`- [#${d.number}](./discussion-${d.number}.md): ${d.title}${upvotes}${answered} (${date})`);
|
|
1720
|
+
}
|
|
1721
|
+
sections.push("");
|
|
1692
1722
|
}
|
|
1693
|
-
|
|
1694
|
-
return {
|
|
1695
|
-
isValid: matchRatio >= .3,
|
|
1696
|
-
matchRatio
|
|
1697
|
-
};
|
|
1723
|
+
return sections.join("\n");
|
|
1698
1724
|
}
|
|
1699
|
-
|
|
1700
|
-
const
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1725
|
+
function generateDocsIndex(docs) {
|
|
1726
|
+
const docFiles = docs.filter((d) => d.path.startsWith("docs/") && d.path.endsWith(".md") && !d.path.endsWith("_INDEX.md")).sort((a, b) => a.path.localeCompare(b.path));
|
|
1727
|
+
if (docFiles.length === 0) return "";
|
|
1728
|
+
const rootFiles = [];
|
|
1729
|
+
const byDir = /* @__PURE__ */ new Map();
|
|
1730
|
+
for (const doc of docFiles) {
|
|
1731
|
+
const rel = doc.path.slice(5);
|
|
1732
|
+
const dir = rel.includes("/") ? rel.slice(0, rel.lastIndexOf("/")) : "";
|
|
1733
|
+
if (!dir) rootFiles.push(doc);
|
|
1734
|
+
else {
|
|
1735
|
+
const list = byDir.get(dir);
|
|
1736
|
+
if (list) list.push(doc);
|
|
1737
|
+
else byDir.set(dir, [doc]);
|
|
1738
|
+
}
|
|
1739
|
+
}
|
|
1740
|
+
const sections = [
|
|
1741
|
+
"---",
|
|
1742
|
+
`total: ${docFiles.length}`,
|
|
1743
|
+
"---",
|
|
1744
|
+
"",
|
|
1745
|
+
"# Docs Index",
|
|
1746
|
+
""
|
|
1705
1747
|
];
|
|
1706
|
-
for (const
|
|
1707
|
-
const
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1748
|
+
for (const file of rootFiles) {
|
|
1749
|
+
const rel = file.path.slice(5);
|
|
1750
|
+
const title = extractTitle(file.content) || rel.replace(/\.md$/, "");
|
|
1751
|
+
const desc = extractDescription(file.content);
|
|
1752
|
+
const descPart = desc ? `: ${desc}` : "";
|
|
1753
|
+
sections.push(`- [${title}](./${rel})${descPart}`);
|
|
1712
1754
|
}
|
|
1713
|
-
|
|
1755
|
+
if (rootFiles.length > 0) sections.push("");
|
|
1756
|
+
for (const [dir, files] of byDir) {
|
|
1757
|
+
sections.push(`## ${dir} (${files.length})`, "");
|
|
1758
|
+
for (const file of files) {
|
|
1759
|
+
const rel = file.path.slice(5);
|
|
1760
|
+
const title = extractTitle(file.content) || rel.replace(/\.md$/, "").split("/").pop();
|
|
1761
|
+
const desc = extractDescription(file.content);
|
|
1762
|
+
const descPart = desc ? `: ${desc}` : "";
|
|
1763
|
+
sections.push(`- [${title}](./${rel})${descPart}`);
|
|
1764
|
+
}
|
|
1765
|
+
sections.push("");
|
|
1766
|
+
}
|
|
1767
|
+
return sections.join("\n");
|
|
1714
1768
|
}
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
|
|
1719
|
-
|
|
1769
|
+
const SKIP_DIRS = [
|
|
1770
|
+
"node_modules",
|
|
1771
|
+
"_vendor",
|
|
1772
|
+
"__tests__",
|
|
1773
|
+
"__mocks__",
|
|
1774
|
+
"__fixtures__",
|
|
1775
|
+
"test",
|
|
1776
|
+
"tests",
|
|
1777
|
+
"fixture",
|
|
1778
|
+
"fixtures",
|
|
1779
|
+
"locales",
|
|
1780
|
+
"locale",
|
|
1781
|
+
"i18n",
|
|
1782
|
+
".git"
|
|
1783
|
+
];
|
|
1784
|
+
const SKIP_PATTERNS = [
|
|
1785
|
+
"*.min.*",
|
|
1786
|
+
"*.prod.*",
|
|
1787
|
+
"*.global.*",
|
|
1788
|
+
"*.browser.*",
|
|
1789
|
+
"*.map",
|
|
1790
|
+
"*.map.js",
|
|
1791
|
+
"CHANGELOG*",
|
|
1792
|
+
"LICENSE*",
|
|
1793
|
+
"README*"
|
|
1794
|
+
];
|
|
1795
|
+
const MAX_FILE_SIZE = 500 * 1024;
|
|
1796
|
+
async function resolveEntryFiles(packageDir) {
|
|
1797
|
+
if (!existsSync(join(packageDir, "package.json"))) return [];
|
|
1798
|
+
const files = await glob(["**/*.d.{ts,mts,cts}"], {
|
|
1799
|
+
cwd: packageDir,
|
|
1800
|
+
ignore: [...SKIP_DIRS.map((d) => `**/${d}/**`), ...SKIP_PATTERNS],
|
|
1801
|
+
absolute: false,
|
|
1802
|
+
expandDirectories: false
|
|
1803
|
+
});
|
|
1804
|
+
const entries = [];
|
|
1805
|
+
for (const file of files) {
|
|
1806
|
+
const absPath = join(packageDir, file);
|
|
1807
|
+
let content;
|
|
1808
|
+
try {
|
|
1809
|
+
content = readFileSync(absPath, "utf-8");
|
|
1810
|
+
} catch {
|
|
1720
1811
|
continue;
|
|
1721
1812
|
}
|
|
1722
|
-
if (
|
|
1723
|
-
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
"search",
|
|
1728
|
-
"repos",
|
|
1729
|
-
searchTerm,
|
|
1730
|
-
"--json",
|
|
1731
|
-
"fullName",
|
|
1732
|
-
"--limit",
|
|
1733
|
-
"5"
|
|
1734
|
-
], {
|
|
1735
|
-
encoding: "utf-8",
|
|
1736
|
-
timeout: 15e3
|
|
1813
|
+
if (content.length > MAX_FILE_SIZE) continue;
|
|
1814
|
+
entries.push({
|
|
1815
|
+
path: file,
|
|
1816
|
+
content,
|
|
1817
|
+
type: "types"
|
|
1737
1818
|
});
|
|
1738
|
-
|
|
1739
|
-
|
|
1740
|
-
|
|
1741
|
-
|
|
1742
|
-
|
|
1743
|
-
|
|
1744
|
-
|
|
1819
|
+
}
|
|
1820
|
+
return entries;
|
|
1821
|
+
}
|
|
1822
|
+
function parseGitSkillInput(input) {
|
|
1823
|
+
const trimmed = input.trim();
|
|
1824
|
+
if (trimmed.startsWith("@")) return null;
|
|
1825
|
+
if (trimmed.startsWith("./") || trimmed.startsWith("../") || trimmed.startsWith("/") || trimmed.startsWith("~")) return {
|
|
1826
|
+
type: "local",
|
|
1827
|
+
localPath: trimmed.startsWith("~") ? resolve(process.env.HOME || "", trimmed.slice(1)) : resolve(trimmed)
|
|
1828
|
+
};
|
|
1829
|
+
if (trimmed.startsWith("git@")) {
|
|
1830
|
+
const gh = parseGitHubUrl(normalizeRepoUrl(trimmed));
|
|
1831
|
+
if (gh) return {
|
|
1832
|
+
type: "github",
|
|
1833
|
+
owner: gh.owner,
|
|
1834
|
+
repo: gh.repo
|
|
1835
|
+
};
|
|
1836
|
+
return null;
|
|
1837
|
+
}
|
|
1838
|
+
if (trimmed.startsWith("https://") || trimmed.startsWith("http://")) return parseGitUrl(trimmed);
|
|
1839
|
+
if (/^[\w.-]+\/[\w.-]+$/.test(trimmed)) return {
|
|
1840
|
+
type: "github",
|
|
1841
|
+
owner: trimmed.split("/")[0],
|
|
1842
|
+
repo: trimmed.split("/")[1]
|
|
1843
|
+
};
|
|
1844
|
+
return null;
|
|
1845
|
+
}
|
|
1846
|
+
function parseGitUrl(url) {
|
|
1847
|
+
try {
|
|
1848
|
+
const parsed = new URL(url);
|
|
1849
|
+
if (parsed.hostname === "github.com" || parsed.hostname === "www.github.com") {
|
|
1850
|
+
const parts = parsed.pathname.replace(/^\//, "").replace(/\.git$/, "").split("/");
|
|
1851
|
+
const owner = parts[0];
|
|
1852
|
+
const repo = parts[1];
|
|
1853
|
+
if (!owner || !repo) return null;
|
|
1854
|
+
if (parts[2] === "tree" && parts.length >= 4) return {
|
|
1855
|
+
type: "github",
|
|
1856
|
+
owner,
|
|
1857
|
+
repo,
|
|
1858
|
+
ref: parts[3],
|
|
1859
|
+
skillPath: parts.length > 4 ? parts.slice(4).join("/") : void 0
|
|
1860
|
+
};
|
|
1861
|
+
return {
|
|
1862
|
+
type: "github",
|
|
1863
|
+
owner,
|
|
1864
|
+
repo
|
|
1865
|
+
};
|
|
1745
1866
|
}
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
|
|
1750
|
-
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
|
|
1867
|
+
if (parsed.hostname === "gitlab.com") {
|
|
1868
|
+
const parts = parsed.pathname.replace(/^\//, "").replace(/\.git$/, "").split("/");
|
|
1869
|
+
const owner = parts[0];
|
|
1870
|
+
const repo = parts[1];
|
|
1871
|
+
if (!owner || !repo) return null;
|
|
1872
|
+
return {
|
|
1873
|
+
type: "gitlab",
|
|
1874
|
+
owner,
|
|
1875
|
+
repo
|
|
1876
|
+
};
|
|
1877
|
+
}
|
|
1878
|
+
return null;
|
|
1879
|
+
} catch {
|
|
1880
|
+
return null;
|
|
1754
1881
|
}
|
|
1755
|
-
return null;
|
|
1756
1882
|
}
|
|
1757
|
-
|
|
1758
|
-
const
|
|
1759
|
-
|
|
1760
|
-
|
|
1761
|
-
|
|
1883
|
+
function parseSkillFrontmatterName(content) {
|
|
1884
|
+
const fm = parseFrontmatter(content);
|
|
1885
|
+
return {
|
|
1886
|
+
name: fm.name,
|
|
1887
|
+
description: fm.description
|
|
1888
|
+
};
|
|
1762
1889
|
}
|
|
1763
|
-
|
|
1764
|
-
const
|
|
1765
|
-
if (!
|
|
1766
|
-
|
|
1767
|
-
|
|
1890
|
+
function collectFiles(dir, prefix = "") {
|
|
1891
|
+
const files = [];
|
|
1892
|
+
if (!existsSync(dir)) return files;
|
|
1893
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
1894
|
+
const relPath = prefix ? `${prefix}/${entry.name}` : entry.name;
|
|
1895
|
+
const fullPath = resolve(dir, entry.name);
|
|
1896
|
+
if (entry.isDirectory()) files.push(...collectFiles(fullPath, relPath));
|
|
1897
|
+
else if (entry.isFile()) files.push({
|
|
1898
|
+
path: relPath,
|
|
1899
|
+
content: readFileSync(fullPath, "utf-8")
|
|
1900
|
+
});
|
|
1768
1901
|
}
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
1902
|
+
return files;
|
|
1903
|
+
}
|
|
1904
|
+
async function fetchGitSkills(source, onProgress) {
|
|
1905
|
+
if (source.type === "local") return fetchLocalSkills(source);
|
|
1906
|
+
if (source.type === "github") return fetchGitHubSkills(source, onProgress);
|
|
1907
|
+
if (source.type === "gitlab") return fetchGitLabSkills(source, onProgress);
|
|
1908
|
+
return { skills: [] };
|
|
1909
|
+
}
|
|
1910
|
+
function fetchLocalSkills(source) {
|
|
1911
|
+
const base = source.localPath;
|
|
1912
|
+
if (!existsSync(base)) return { skills: [] };
|
|
1913
|
+
const skills = [];
|
|
1914
|
+
const skillsDir = resolve(base, "skills");
|
|
1915
|
+
if (existsSync(skillsDir)) for (const entry of readdirSync(skillsDir, { withFileTypes: true })) {
|
|
1916
|
+
if (!entry.isDirectory()) continue;
|
|
1917
|
+
const skill = readLocalSkill(resolve(skillsDir, entry.name), `skills/${entry.name}`);
|
|
1918
|
+
if (skill) skills.push(skill);
|
|
1780
1919
|
}
|
|
1781
|
-
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
markRepoPrivate(owner, repo);
|
|
1785
|
-
return apiData.download_url;
|
|
1920
|
+
if (skills.length === 0) {
|
|
1921
|
+
const skill = readLocalSkill(base, "");
|
|
1922
|
+
if (skill) skills.push(skill);
|
|
1786
1923
|
}
|
|
1787
|
-
return
|
|
1924
|
+
return { skills };
|
|
1788
1925
|
}
|
|
1789
|
-
|
|
1790
|
-
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1926
|
+
function readLocalSkill(dir, repoPath) {
|
|
1927
|
+
const skillMdPath = resolve(dir, "SKILL.md");
|
|
1928
|
+
if (!existsSync(skillMdPath)) return null;
|
|
1929
|
+
const content = readFileSync(skillMdPath, "utf-8");
|
|
1930
|
+
const frontmatter = parseSkillFrontmatterName(content);
|
|
1931
|
+
const dirName = dir.split("/").pop();
|
|
1932
|
+
const name = frontmatter.name || dirName;
|
|
1933
|
+
const files = collectFiles(dir).filter((f) => f.path !== "SKILL.md");
|
|
1934
|
+
return {
|
|
1935
|
+
name,
|
|
1936
|
+
description: frontmatter.description || "",
|
|
1937
|
+
path: repoPath,
|
|
1938
|
+
content,
|
|
1939
|
+
files
|
|
1940
|
+
};
|
|
1941
|
+
}
|
|
1942
|
+
async function fetchGitHubSkills(source, onProgress) {
|
|
1943
|
+
const { owner, repo } = source;
|
|
1944
|
+
if (!owner || !repo) return { skills: [] };
|
|
1945
|
+
const ref = source.ref || "main";
|
|
1946
|
+
const refs = ref === "main" ? ["main", "master"] : [ref];
|
|
1947
|
+
for (const tryRef of refs) {
|
|
1948
|
+
const skills = await downloadGitHubSkills(owner, repo, tryRef, source.skillPath, onProgress);
|
|
1949
|
+
if (skills.length > 0) return { skills };
|
|
1794
1950
|
}
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
|
|
1801
|
-
|
|
1951
|
+
return { skills: [] };
|
|
1952
|
+
}
|
|
1953
|
+
async function downloadGitHubSkills(owner, repo, ref, skillPath, onProgress) {
|
|
1954
|
+
const tempDir = join(tmpdir(), `skilld-${Date.now()}`);
|
|
1955
|
+
try {
|
|
1956
|
+
if (skillPath) {
|
|
1957
|
+
onProgress?.(`Downloading ${owner}/${repo}/${skillPath}@${ref}`);
|
|
1958
|
+
const { dir } = await downloadTemplate(`github:${owner}/${repo}/${skillPath}#${ref}`, {
|
|
1959
|
+
dir: tempDir,
|
|
1960
|
+
force: true,
|
|
1961
|
+
auth: getGitHubToken() || void 0
|
|
1962
|
+
});
|
|
1963
|
+
const skill = readLocalSkill(dir, skillPath);
|
|
1964
|
+
return skill ? [skill] : [];
|
|
1802
1965
|
}
|
|
1803
|
-
|
|
1804
|
-
const owner = parts[0];
|
|
1805
|
-
const repo = parts[1];
|
|
1806
|
-
const subdir = parts.slice(2).join("/");
|
|
1807
|
-
const text = await $fetch(subdir ? `https://ungh.cc/repos/${owner}/${repo}/files/${ref}/${subdir}/README.md` : `https://ungh.cc/repos/${owner}/${repo}/readme?ref=${ref}`, { responseType: "text" }).catch(() => null);
|
|
1808
|
-
if (!text) return null;
|
|
1966
|
+
onProgress?.(`Downloading ${owner}/${repo}/skills@${ref}`);
|
|
1809
1967
|
try {
|
|
1810
|
-
const
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1968
|
+
const { dir } = await downloadTemplate(`github:${owner}/${repo}/skills#${ref}`, {
|
|
1969
|
+
dir: tempDir,
|
|
1970
|
+
force: true,
|
|
1971
|
+
auth: getGitHubToken() || void 0
|
|
1972
|
+
});
|
|
1973
|
+
const skills = [];
|
|
1974
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
1975
|
+
if (!entry.isDirectory()) continue;
|
|
1976
|
+
const skill = readLocalSkill(resolve(dir, entry.name), `skills/${entry.name}`);
|
|
1977
|
+
if (skill) skills.push(skill);
|
|
1978
|
+
}
|
|
1979
|
+
if (skills.length > 0) {
|
|
1980
|
+
onProgress?.(`Found ${skills.length} skill(s)`);
|
|
1981
|
+
return skills;
|
|
1982
|
+
}
|
|
1983
|
+
} catch {}
|
|
1984
|
+
const content = await fetchGitHubRaw(`https://raw.githubusercontent.com/${owner}/${repo}/${ref}/SKILL.md`);
|
|
1985
|
+
if (content) {
|
|
1986
|
+
const fm = parseSkillFrontmatterName(content);
|
|
1987
|
+
onProgress?.("Found 1 skill");
|
|
1988
|
+
return [{
|
|
1989
|
+
name: fm.name || repo,
|
|
1990
|
+
description: fm.description || "",
|
|
1991
|
+
path: "",
|
|
1992
|
+
content,
|
|
1993
|
+
files: []
|
|
1994
|
+
}];
|
|
1814
1995
|
}
|
|
1996
|
+
return [];
|
|
1997
|
+
} catch {
|
|
1998
|
+
return [];
|
|
1999
|
+
} finally {
|
|
2000
|
+
rmSync(tempDir, {
|
|
2001
|
+
recursive: true,
|
|
2002
|
+
force: true
|
|
2003
|
+
});
|
|
1815
2004
|
}
|
|
1816
|
-
if (url.includes("raw.githubusercontent.com")) return fetchGitHubRaw(url);
|
|
1817
|
-
return fetchText(url);
|
|
1818
2005
|
}
|
|
1819
|
-
async function
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
const
|
|
1823
|
-
const
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
|
|
1830
|
-
|
|
1831
|
-
|
|
1832
|
-
|
|
1833
|
-
|
|
1834
|
-
|
|
1835
|
-
|
|
1836
|
-
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
|
|
1842
|
-
|
|
1843
|
-
|
|
2006
|
+
async function fetchGitLabSkills(source, onProgress) {
|
|
2007
|
+
const { owner, repo } = source;
|
|
2008
|
+
if (!owner || !repo) return { skills: [] };
|
|
2009
|
+
const ref = source.ref || "main";
|
|
2010
|
+
const tempDir = join(tmpdir(), `skilld-gitlab-${Date.now()}`);
|
|
2011
|
+
try {
|
|
2012
|
+
const subdir = source.skillPath || "skills";
|
|
2013
|
+
onProgress?.(`Downloading ${owner}/${repo}/${subdir}@${ref}`);
|
|
2014
|
+
const { dir } = await downloadTemplate(`gitlab:${owner}/${repo}/${subdir}#${ref}`, {
|
|
2015
|
+
dir: tempDir,
|
|
2016
|
+
force: true
|
|
2017
|
+
});
|
|
2018
|
+
if (source.skillPath) {
|
|
2019
|
+
const skill = readLocalSkill(dir, source.skillPath);
|
|
2020
|
+
return { skills: skill ? [skill] : [] };
|
|
2021
|
+
}
|
|
2022
|
+
const skills = [];
|
|
2023
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
2024
|
+
if (!entry.isDirectory()) continue;
|
|
2025
|
+
const skill = readLocalSkill(resolve(dir, entry.name), `skills/${entry.name}`);
|
|
2026
|
+
if (skill) skills.push(skill);
|
|
2027
|
+
}
|
|
2028
|
+
if (skills.length > 0) {
|
|
2029
|
+
onProgress?.(`Found ${skills.length} skill(s)`);
|
|
2030
|
+
return { skills };
|
|
2031
|
+
}
|
|
2032
|
+
const content = await $fetch(`https://gitlab.com/${owner}/${repo}/-/raw/${ref}/SKILL.md`, { responseType: "text" }).catch(() => null);
|
|
2033
|
+
if (content) {
|
|
2034
|
+
const fm = parseSkillFrontmatterName(content);
|
|
2035
|
+
return { skills: [{
|
|
2036
|
+
name: fm.name || repo,
|
|
2037
|
+
description: fm.description || "",
|
|
2038
|
+
path: "",
|
|
2039
|
+
content,
|
|
2040
|
+
files: []
|
|
2041
|
+
}] };
|
|
2042
|
+
}
|
|
2043
|
+
return { skills: [] };
|
|
2044
|
+
} catch {
|
|
2045
|
+
return { skills: [] };
|
|
2046
|
+
} finally {
|
|
2047
|
+
rmSync(tempDir, {
|
|
2048
|
+
recursive: true,
|
|
2049
|
+
force: true
|
|
2050
|
+
});
|
|
1844
2051
|
}
|
|
1845
|
-
if (!gitDocsUrl && !readmeUrl && !llmsUrl) return null;
|
|
1846
|
-
return {
|
|
1847
|
-
name: repo,
|
|
1848
|
-
version: latestRelease ? version : void 0,
|
|
1849
|
-
releasedAt,
|
|
1850
|
-
description,
|
|
1851
|
-
repoUrl,
|
|
1852
|
-
docsUrl: homepage,
|
|
1853
|
-
gitDocsUrl,
|
|
1854
|
-
gitRef,
|
|
1855
|
-
gitDocsFallback: gitDocs?.fallback,
|
|
1856
|
-
readmeUrl: readmeUrl ?? void 0,
|
|
1857
|
-
llmsUrl
|
|
1858
|
-
};
|
|
1859
2052
|
}
|
|
1860
2053
|
async function searchNpmPackages(query, size = 5) {
|
|
1861
2054
|
const data = await $fetch(`https://registry.npmjs.org/-/v1/search?text=${encodeURIComponent(query)}&size=${size}`).catch(() => null);
|
|
@@ -2198,7 +2391,7 @@ async function fetchPkgDist(name, version) {
|
|
|
2198
2391
|
if (!data) return null;
|
|
2199
2392
|
const tarballUrl = data.dist?.tarball;
|
|
2200
2393
|
if (!tarballUrl) return null;
|
|
2201
|
-
const tarballRes = await fetch(tarballUrl, { headers: { "User-Agent":
|
|
2394
|
+
const tarballRes = await fetch(tarballUrl, { headers: { "User-Agent": SKILLD_USER_AGENT } }).catch(() => null);
|
|
2202
2395
|
if (!tarballRes?.ok || !tarballRes.body) return null;
|
|
2203
2396
|
mkdirSync(pkgDir, { recursive: true });
|
|
2204
2397
|
const tmpTarball = join(cacheDir, "_pkg.tgz");
|
|
@@ -2267,6 +2460,6 @@ function getInstalledSkillVersion(skillDir) {
|
|
|
2267
2460
|
if (!existsSync(skillPath)) return null;
|
|
2268
2461
|
return readFileSync(skillPath, "utf-8").match(/^version:\s*"?([^"\n]+)"?/m)?.[1] || null;
|
|
2269
2462
|
}
|
|
2270
|
-
export {
|
|
2463
|
+
export { fetchText as $, filterFrameworkDocs as A, fetchGitHubIssues as B, toCrawlPattern as C, fetchGitHubRepoMeta as D, fetchGitDocs as E, extractSections as F, compareSemver as G, generateIssueIndex as H, fetchLlmsTxt as I, isPrerelease as J, fetchReleaseNotes as K, fetchLlmsUrl as L, resolveGitHubRepo as M, validateGitDocsWithLlms as N, fetchReadme as O, downloadLlmsDocs as P, fetchGitHubRaw as Q, normalizeLlmsLinks as R, fetchCrawledDocs as S, MIN_GIT_DOCS as T, isGhAvailable as U, formatIssueAsMarkdown as V, fetchBlogReleases as W, $fetch as X, parseSemver as Y, extractBranchHint as Z, resolveEntryFiles as _, getInstalledSkillVersion as a, verifyUrl as at, formatDiscussionAsMarkdown as b, readLocalPackageInfo as c, resolvePackageDocs as d, isGitHubRepoUrl as et, resolvePackageDocsWithAttempts as f, parseSkillFrontmatterName as g, parseGitSkillInput as h, fetchPkgDist as i, parsePackageSpec as it, isShallowGitDocs as j, fetchReadmeContent as k, resolveInstalledVersion as l, fetchGitSkills as m, fetchNpmPackage as n, parseGitHubRepoSlug as nt, parseVersionSpecifier as o, searchNpmPackages as p, generateReleaseIndex as q, fetchNpmRegistryMeta as r, parseGitHubUrl as rt, readLocalDependencies as s, fetchLatestVersion as t, normalizeRepoUrl as tt, resolveLocalPackageDocs as u, generateDocsIndex as v, resolveCrateDocsWithAttempts as w, generateDiscussionIndex as x, fetchGitHubDiscussions as y, parseMarkdownLinks as z };
|
|
2271
2464
|
|
|
2272
2465
|
//# sourceMappingURL=sources.mjs.map
|