skilld 0.7.0 → 0.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -9
- package/dist/agent/index.d.mts +1 -309
- package/dist/agent/index.mjs +1 -6
- package/dist/cache/index.d.mts +1 -2
- package/dist/cache/index.mjs +1 -3
- package/dist/cli.d.mts +1 -1
- package/dist/cli.mjs +1 -4220
- package/dist/index.d.mts +1 -6
- package/dist/index.mjs +1 -10
- package/dist/retriv/index.d.mts +1 -26
- package/dist/retriv/index.mjs +1 -109
- package/dist/retriv/worker.d.mts +1 -33
- package/dist/retriv/worker.mjs +1 -51
- package/dist/sources/index.d.mts +1 -2
- package/dist/sources/index.mjs +1 -4
- package/dist/types.d.mts +1 -6
- package/dist/types.mjs +1 -1
- package/package.json +7 -7
- package/dist/_chunks/chunk.mjs +0 -13
- package/dist/_chunks/config.mjs +0 -25
- package/dist/_chunks/config.mjs.map +0 -1
- package/dist/_chunks/detect-imports.mjs +0 -1915
- package/dist/_chunks/detect-imports.mjs.map +0 -1
- package/dist/_chunks/embedding-cache.mjs +0 -50
- package/dist/_chunks/embedding-cache.mjs.map +0 -1
- package/dist/_chunks/npm.mjs +0 -1821
- package/dist/_chunks/npm.mjs.map +0 -1
- package/dist/_chunks/pool2.mjs +0 -120
- package/dist/_chunks/pool2.mjs.map +0 -1
- package/dist/_chunks/storage.mjs +0 -436
- package/dist/_chunks/storage.mjs.map +0 -1
- package/dist/_chunks/types.d.mts +0 -90
- package/dist/_chunks/types.d.mts.map +0 -1
- package/dist/_chunks/utils.d.mts +0 -529
- package/dist/_chunks/utils.d.mts.map +0 -1
- package/dist/_chunks/version.d.mts +0 -153
- package/dist/_chunks/version.d.mts.map +0 -1
- package/dist/_chunks/yaml.mjs +0 -415
- package/dist/_chunks/yaml.mjs.map +0 -1
- package/dist/agent/index.d.mts.map +0 -1
- package/dist/cli.mjs.map +0 -1
- package/dist/retriv/index.d.mts.map +0 -1
- package/dist/retriv/index.mjs.map +0 -1
- package/dist/retriv/worker.d.mts.map +0 -1
- package/dist/retriv/worker.mjs.map +0 -1
package/dist/_chunks/npm.mjs
DELETED
|
@@ -1,1821 +0,0 @@
|
|
|
1
|
-
import { i as getCacheDir } from "./config.mjs";
|
|
2
|
-
import { a as getDocOverride, i as getBlogPreset, n as yamlParseKV } from "./yaml.mjs";
|
|
3
|
-
import { basename, dirname, join, resolve } from "pathe";
|
|
4
|
-
import { createWriteStream, existsSync, mkdirSync, readFileSync, readdirSync, rmSync, unlinkSync } from "node:fs";
|
|
5
|
-
import { htmlToMarkdown } from "mdream";
|
|
6
|
-
import { spawnSync } from "node:child_process";
|
|
7
|
-
import { ofetch } from "ofetch";
|
|
8
|
-
import { globby } from "globby";
|
|
9
|
-
import pLimit from "p-limit";
|
|
10
|
-
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
11
|
-
import { Writable } from "node:stream";
|
|
12
|
-
import { resolvePathSync } from "mlly";
|
|
13
|
-
const BOT_USERS = new Set([
|
|
14
|
-
"renovate[bot]",
|
|
15
|
-
"dependabot[bot]",
|
|
16
|
-
"renovate-bot",
|
|
17
|
-
"dependabot",
|
|
18
|
-
"github-actions[bot]"
|
|
19
|
-
]);
|
|
20
|
-
const isoDate = (iso) => iso.split("T")[0];
|
|
21
|
-
function buildFrontmatter(fields) {
|
|
22
|
-
const lines = ["---"];
|
|
23
|
-
for (const [k, v] of Object.entries(fields)) if (v !== void 0) lines.push(`${k}: ${typeof v === "string" && /[:"[\]]/.test(v) ? `"${v.replace(/"/g, "\\\"")}"` : v}`);
|
|
24
|
-
lines.push("---");
|
|
25
|
-
return lines.join("\n");
|
|
26
|
-
}
|
|
27
|
-
let _ghAvailable;
|
|
28
|
-
function isGhAvailable() {
|
|
29
|
-
if (_ghAvailable !== void 0) return _ghAvailable;
|
|
30
|
-
const { status } = spawnSync("gh", ["auth", "status"], { stdio: "ignore" });
|
|
31
|
-
return _ghAvailable = status === 0;
|
|
32
|
-
}
|
|
33
|
-
const NOISE_LABELS = new Set([
|
|
34
|
-
"duplicate",
|
|
35
|
-
"stale",
|
|
36
|
-
"invalid",
|
|
37
|
-
"wontfix",
|
|
38
|
-
"won't fix",
|
|
39
|
-
"spam",
|
|
40
|
-
"off-topic",
|
|
41
|
-
"needs triage",
|
|
42
|
-
"triage"
|
|
43
|
-
]);
|
|
44
|
-
const FEATURE_LABELS = new Set([
|
|
45
|
-
"enhancement",
|
|
46
|
-
"feature",
|
|
47
|
-
"feature request",
|
|
48
|
-
"feature-request",
|
|
49
|
-
"proposal",
|
|
50
|
-
"rfc",
|
|
51
|
-
"idea",
|
|
52
|
-
"suggestion"
|
|
53
|
-
]);
|
|
54
|
-
const BUG_LABELS = new Set([
|
|
55
|
-
"bug",
|
|
56
|
-
"defect",
|
|
57
|
-
"regression",
|
|
58
|
-
"error",
|
|
59
|
-
"crash",
|
|
60
|
-
"fix",
|
|
61
|
-
"confirmed",
|
|
62
|
-
"verified"
|
|
63
|
-
]);
|
|
64
|
-
const QUESTION_LABELS = new Set([
|
|
65
|
-
"question",
|
|
66
|
-
"help wanted",
|
|
67
|
-
"support",
|
|
68
|
-
"usage",
|
|
69
|
-
"how-to",
|
|
70
|
-
"help",
|
|
71
|
-
"assistance"
|
|
72
|
-
]);
|
|
73
|
-
const DOCS_LABELS = new Set([
|
|
74
|
-
"documentation",
|
|
75
|
-
"docs",
|
|
76
|
-
"doc",
|
|
77
|
-
"typo"
|
|
78
|
-
]);
|
|
79
|
-
function classifyIssue(labels) {
|
|
80
|
-
const lower = labels.map((l) => l.toLowerCase());
|
|
81
|
-
if (lower.some((l) => BUG_LABELS.has(l))) return "bug";
|
|
82
|
-
if (lower.some((l) => QUESTION_LABELS.has(l))) return "question";
|
|
83
|
-
if (lower.some((l) => DOCS_LABELS.has(l))) return "docs";
|
|
84
|
-
if (lower.some((l) => FEATURE_LABELS.has(l))) return "feature";
|
|
85
|
-
return "other";
|
|
86
|
-
}
|
|
87
|
-
function isNoiseIssue(issue) {
|
|
88
|
-
if (issue.labels.map((l) => l.toLowerCase()).some((l) => NOISE_LABELS.has(l))) return true;
|
|
89
|
-
if (issue.title.startsWith("☂️") || issue.title.startsWith("[META]") || issue.title.startsWith("[Tracking]")) return true;
|
|
90
|
-
return false;
|
|
91
|
-
}
|
|
92
|
-
function bodyLimit(reactions) {
|
|
93
|
-
if (reactions >= 10) return 2e3;
|
|
94
|
-
if (reactions >= 5) return 1500;
|
|
95
|
-
return 800;
|
|
96
|
-
}
|
|
97
|
-
function fetchIssuesByState(owner, repo, state, count, releasedAt) {
|
|
98
|
-
const fetchCount = Math.min(count * 3, 100);
|
|
99
|
-
let datePart = "";
|
|
100
|
-
if (state === "closed") if (releasedAt) {
|
|
101
|
-
const date = new Date(releasedAt);
|
|
102
|
-
date.setMonth(date.getMonth() + 6);
|
|
103
|
-
datePart = `+closed:<=${isoDate(date.toISOString())}`;
|
|
104
|
-
} else datePart = `+closed:>${oneYearAgo()}`;
|
|
105
|
-
else if (releasedAt) {
|
|
106
|
-
const date = new Date(releasedAt);
|
|
107
|
-
date.setMonth(date.getMonth() + 6);
|
|
108
|
-
datePart = `+created:<=${isoDate(date.toISOString())}`;
|
|
109
|
-
}
|
|
110
|
-
const { stdout: result } = spawnSync("gh", [
|
|
111
|
-
"api",
|
|
112
|
-
`search/issues?q=${`repo:${owner}/${repo}+is:issue+is:${state}${datePart}`}&sort=reactions&order=desc&per_page=${fetchCount}`,
|
|
113
|
-
"-q",
|
|
114
|
-
".items[] | {number, title, state, labels: [.labels[]?.name], body, createdAt: .created_at, url: .html_url, reactions: .reactions[\"+1\"], comments: .comments, user: .user.login, userType: .user.type}"
|
|
115
|
-
], {
|
|
116
|
-
encoding: "utf-8",
|
|
117
|
-
maxBuffer: 10 * 1024 * 1024
|
|
118
|
-
});
|
|
119
|
-
if (!result) return [];
|
|
120
|
-
return result.trim().split("\n").filter(Boolean).map((line) => JSON.parse(line)).filter((issue) => !BOT_USERS.has(issue.user) && issue.userType !== "Bot").filter((issue) => !isNoiseIssue(issue)).map(({ user: _, userType: __, ...issue }) => ({
|
|
121
|
-
...issue,
|
|
122
|
-
type: classifyIssue(issue.labels),
|
|
123
|
-
topComments: []
|
|
124
|
-
})).sort((a, b) => (a.type === "feature" ? 1 : 0) - (b.type === "feature" ? 1 : 0)).slice(0, count);
|
|
125
|
-
}
|
|
126
|
-
function oneYearAgo() {
|
|
127
|
-
const d = /* @__PURE__ */ new Date();
|
|
128
|
-
d.setFullYear(d.getFullYear() - 1);
|
|
129
|
-
return isoDate(d.toISOString());
|
|
130
|
-
}
|
|
131
|
-
function enrichWithComments(owner, repo, issues, topN = 10) {
|
|
132
|
-
const worth = issues.filter((i) => i.comments > 0 && (i.type === "bug" || i.type === "question" || i.reactions >= 3)).sort((a, b) => b.reactions - a.reactions).slice(0, topN);
|
|
133
|
-
if (worth.length === 0) return;
|
|
134
|
-
const query = `query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { ${worth.map((issue, i) => `i${i}: issue(number: ${issue.number}) { comments(first: 3) { nodes { body author { login } reactions { totalCount } } } }`).join(" ")} } }`;
|
|
135
|
-
try {
|
|
136
|
-
const { stdout: result } = spawnSync("gh", [
|
|
137
|
-
"api",
|
|
138
|
-
"graphql",
|
|
139
|
-
"-f",
|
|
140
|
-
`query=${query}`,
|
|
141
|
-
"-f",
|
|
142
|
-
`owner=${owner}`,
|
|
143
|
-
"-f",
|
|
144
|
-
`repo=${repo}`
|
|
145
|
-
], {
|
|
146
|
-
encoding: "utf-8",
|
|
147
|
-
maxBuffer: 10 * 1024 * 1024
|
|
148
|
-
});
|
|
149
|
-
if (!result) return;
|
|
150
|
-
const repo_ = JSON.parse(result)?.data?.repository;
|
|
151
|
-
if (!repo_) return;
|
|
152
|
-
for (let i = 0; i < worth.length; i++) {
|
|
153
|
-
const nodes = repo_[`i${i}`]?.comments?.nodes;
|
|
154
|
-
if (!Array.isArray(nodes)) continue;
|
|
155
|
-
worth[i].topComments = nodes.filter((c) => c.author && !BOT_USERS.has(c.author.login)).map((c) => ({
|
|
156
|
-
body: c.body || "",
|
|
157
|
-
author: c.author.login,
|
|
158
|
-
reactions: c.reactions?.totalCount || 0
|
|
159
|
-
}));
|
|
160
|
-
}
|
|
161
|
-
} catch {}
|
|
162
|
-
}
|
|
163
|
-
async function fetchGitHubIssues(owner, repo, limit = 30, releasedAt) {
|
|
164
|
-
if (!isGhAvailable()) return [];
|
|
165
|
-
const openCount = Math.ceil(limit * .75);
|
|
166
|
-
const closedCount = limit - openCount;
|
|
167
|
-
try {
|
|
168
|
-
const open = fetchIssuesByState(owner, repo, "open", openCount, releasedAt);
|
|
169
|
-
const closed = fetchIssuesByState(owner, repo, "closed", closedCount, releasedAt);
|
|
170
|
-
const all = [...open, ...closed];
|
|
171
|
-
enrichWithComments(owner, repo, all);
|
|
172
|
-
return all;
|
|
173
|
-
} catch {
|
|
174
|
-
return [];
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
function formatIssueAsMarkdown(issue) {
|
|
178
|
-
const limit = bodyLimit(issue.reactions);
|
|
179
|
-
const fmFields = {
|
|
180
|
-
number: issue.number,
|
|
181
|
-
title: issue.title,
|
|
182
|
-
type: issue.type,
|
|
183
|
-
state: issue.state,
|
|
184
|
-
created: isoDate(issue.createdAt),
|
|
185
|
-
url: issue.url,
|
|
186
|
-
reactions: issue.reactions,
|
|
187
|
-
comments: issue.comments
|
|
188
|
-
};
|
|
189
|
-
if (issue.labels.length > 0) fmFields.labels = `[${issue.labels.join(", ")}]`;
|
|
190
|
-
const lines = [
|
|
191
|
-
buildFrontmatter(fmFields),
|
|
192
|
-
"",
|
|
193
|
-
`# ${issue.title}`
|
|
194
|
-
];
|
|
195
|
-
if (issue.body) {
|
|
196
|
-
const body = issue.body.length > limit ? `${issue.body.slice(0, limit)}...` : issue.body;
|
|
197
|
-
lines.push("", body);
|
|
198
|
-
}
|
|
199
|
-
if (issue.topComments.length > 0) {
|
|
200
|
-
lines.push("", "---", "", "## Top Comments");
|
|
201
|
-
for (const c of issue.topComments) {
|
|
202
|
-
const reactions = c.reactions > 0 ? ` (+${c.reactions})` : "";
|
|
203
|
-
const commentBody = c.body.length > 600 ? `${c.body.slice(0, 600)}...` : c.body;
|
|
204
|
-
lines.push("", `**@${c.author}**${reactions}:`, "", commentBody);
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
return lines.join("\n");
|
|
208
|
-
}
|
|
209
|
-
function generateIssueIndex(issues) {
|
|
210
|
-
const byType = /* @__PURE__ */ new Map();
|
|
211
|
-
for (const issue of issues) {
|
|
212
|
-
const list = byType.get(issue.type) || [];
|
|
213
|
-
list.push(issue);
|
|
214
|
-
byType.set(issue.type, list);
|
|
215
|
-
}
|
|
216
|
-
const typeLabels = {
|
|
217
|
-
bug: "Bugs & Regressions",
|
|
218
|
-
question: "Questions & Usage Help",
|
|
219
|
-
docs: "Documentation",
|
|
220
|
-
feature: "Feature Requests",
|
|
221
|
-
other: "Other"
|
|
222
|
-
};
|
|
223
|
-
const typeOrder = [
|
|
224
|
-
"bug",
|
|
225
|
-
"question",
|
|
226
|
-
"docs",
|
|
227
|
-
"other",
|
|
228
|
-
"feature"
|
|
229
|
-
];
|
|
230
|
-
const sections = [
|
|
231
|
-
[
|
|
232
|
-
"---",
|
|
233
|
-
`total: ${issues.length}`,
|
|
234
|
-
`open: ${issues.filter((i) => i.state === "open").length}`,
|
|
235
|
-
`closed: ${issues.filter((i) => i.state !== "open").length}`,
|
|
236
|
-
"---"
|
|
237
|
-
].join("\n"),
|
|
238
|
-
"",
|
|
239
|
-
"# Issues Index",
|
|
240
|
-
""
|
|
241
|
-
];
|
|
242
|
-
for (const type of typeOrder) {
|
|
243
|
-
const group = byType.get(type);
|
|
244
|
-
if (!group?.length) continue;
|
|
245
|
-
sections.push(`## ${typeLabels[type]} (${group.length})`, "");
|
|
246
|
-
for (const issue of group) {
|
|
247
|
-
const reactions = issue.reactions > 0 ? ` (+${issue.reactions})` : "";
|
|
248
|
-
const state = issue.state === "open" ? "" : " [closed]";
|
|
249
|
-
const date = isoDate(issue.createdAt);
|
|
250
|
-
sections.push(`- [#${issue.number}](./issue-${issue.number}.md): ${issue.title}${reactions}${state} (${date})`);
|
|
251
|
-
}
|
|
252
|
-
sections.push("");
|
|
253
|
-
}
|
|
254
|
-
return sections.join("\n");
|
|
255
|
-
}
|
|
256
|
-
const $fetch = ofetch.create({
|
|
257
|
-
retry: 3,
|
|
258
|
-
retryDelay: 500,
|
|
259
|
-
timeout: 15e3,
|
|
260
|
-
headers: { "User-Agent": "skilld/1.0" }
|
|
261
|
-
});
|
|
262
|
-
async function fetchText(url) {
|
|
263
|
-
return $fetch(url, { responseType: "text" }).catch(() => null);
|
|
264
|
-
}
|
|
265
|
-
async function verifyUrl(url) {
|
|
266
|
-
const res = await $fetch.raw(url, { method: "HEAD" }).catch(() => null);
|
|
267
|
-
if (!res) return false;
|
|
268
|
-
return !(res.headers.get("content-type") || "").includes("text/html");
|
|
269
|
-
}
|
|
270
|
-
const USELESS_HOSTS = new Set([
|
|
271
|
-
"twitter.com",
|
|
272
|
-
"x.com",
|
|
273
|
-
"facebook.com",
|
|
274
|
-
"linkedin.com",
|
|
275
|
-
"youtube.com",
|
|
276
|
-
"instagram.com",
|
|
277
|
-
"npmjs.com",
|
|
278
|
-
"www.npmjs.com",
|
|
279
|
-
"yarnpkg.com"
|
|
280
|
-
]);
|
|
281
|
-
function isUselessDocsUrl(url) {
|
|
282
|
-
try {
|
|
283
|
-
const { hostname } = new URL(url);
|
|
284
|
-
return USELESS_HOSTS.has(hostname);
|
|
285
|
-
} catch {
|
|
286
|
-
return false;
|
|
287
|
-
}
|
|
288
|
-
}
|
|
289
|
-
function isGitHubRepoUrl(url) {
|
|
290
|
-
try {
|
|
291
|
-
const parsed = new URL(url);
|
|
292
|
-
return parsed.hostname === "github.com" || parsed.hostname === "www.github.com";
|
|
293
|
-
} catch {
|
|
294
|
-
return false;
|
|
295
|
-
}
|
|
296
|
-
}
|
|
297
|
-
function parseGitHubUrl(url) {
|
|
298
|
-
const match = url.match(/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?(?:[/#]|$)/);
|
|
299
|
-
if (!match) return null;
|
|
300
|
-
return {
|
|
301
|
-
owner: match[1],
|
|
302
|
-
repo: match[2]
|
|
303
|
-
};
|
|
304
|
-
}
|
|
305
|
-
function normalizeRepoUrl(url) {
|
|
306
|
-
return url.replace(/^git\+/, "").replace(/#.*$/, "").replace(/\.git$/, "").replace(/^git:\/\//, "https://").replace(/^ssh:\/\/git@github\.com/, "https://github.com").replace(/^git@github\.com:/, "https://github.com/");
|
|
307
|
-
}
|
|
308
|
-
function extractBranchHint(url) {
|
|
309
|
-
const hash = url.indexOf("#");
|
|
310
|
-
if (hash === -1) return void 0;
|
|
311
|
-
const fragment = url.slice(hash + 1);
|
|
312
|
-
if (!fragment || fragment === "readme") return void 0;
|
|
313
|
-
return fragment;
|
|
314
|
-
}
|
|
315
|
-
function parseSemver(version) {
|
|
316
|
-
const clean = version.replace(/^v/, "");
|
|
317
|
-
const match = clean.match(/^(\d+)(?:\.(\d+))?(?:\.(\d+))?/);
|
|
318
|
-
if (!match) return null;
|
|
319
|
-
return {
|
|
320
|
-
major: +match[1],
|
|
321
|
-
minor: match[2] ? +match[2] : 0,
|
|
322
|
-
patch: match[3] ? +match[3] : 0,
|
|
323
|
-
raw: clean
|
|
324
|
-
};
|
|
325
|
-
}
|
|
326
|
-
function extractVersion(tag, packageName) {
|
|
327
|
-
if (packageName) {
|
|
328
|
-
const atMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}@(.+)$`));
|
|
329
|
-
if (atMatch) return atMatch[1];
|
|
330
|
-
const dashMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}-v?(.+)$`));
|
|
331
|
-
if (dashMatch) return dashMatch[1];
|
|
332
|
-
}
|
|
333
|
-
return tag.replace(/^v/, "");
|
|
334
|
-
}
|
|
335
|
-
function escapeRegex(str) {
|
|
336
|
-
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
337
|
-
}
|
|
338
|
-
function tagMatchesPackage(tag, packageName) {
|
|
339
|
-
return tag.startsWith(`${packageName}@`) || tag.startsWith(`${packageName}-v`) || tag.startsWith(`${packageName}-`);
|
|
340
|
-
}
|
|
341
|
-
function compareSemver(a, b) {
|
|
342
|
-
if (a.major !== b.major) return a.major - b.major;
|
|
343
|
-
if (a.minor !== b.minor) return a.minor - b.minor;
|
|
344
|
-
return a.patch - b.patch;
|
|
345
|
-
}
|
|
346
|
-
function fetchReleasesViaGh(owner, repo) {
|
|
347
|
-
try {
|
|
348
|
-
const { stdout: json } = spawnSync("gh", [
|
|
349
|
-
"api",
|
|
350
|
-
`repos/${owner}/${repo}/releases?per_page=100`,
|
|
351
|
-
"--jq",
|
|
352
|
-
"[.[] | {id: .id, tag: .tag_name, name: .name, prerelease: .prerelease, createdAt: .created_at, publishedAt: .published_at, markdown: .body}]"
|
|
353
|
-
], {
|
|
354
|
-
encoding: "utf-8",
|
|
355
|
-
timeout: 15e3,
|
|
356
|
-
stdio: [
|
|
357
|
-
"ignore",
|
|
358
|
-
"pipe",
|
|
359
|
-
"ignore"
|
|
360
|
-
]
|
|
361
|
-
});
|
|
362
|
-
if (!json) return [];
|
|
363
|
-
return JSON.parse(json);
|
|
364
|
-
} catch {
|
|
365
|
-
return [];
|
|
366
|
-
}
|
|
367
|
-
}
|
|
368
|
-
async function fetchReleasesViaUngh(owner, repo) {
|
|
369
|
-
return (await $fetch(`https://ungh.cc/repos/${owner}/${repo}/releases`, { signal: AbortSignal.timeout(15e3) }).catch(() => null))?.releases ?? [];
|
|
370
|
-
}
|
|
371
|
-
async function fetchAllReleases(owner, repo) {
|
|
372
|
-
if (isGhAvailable()) {
|
|
373
|
-
const releases = fetchReleasesViaGh(owner, repo);
|
|
374
|
-
if (releases.length > 0) return releases;
|
|
375
|
-
}
|
|
376
|
-
return fetchReleasesViaUngh(owner, repo);
|
|
377
|
-
}
|
|
378
|
-
function selectReleases(releases, packageName, installedVersion) {
|
|
379
|
-
const hasMonorepoTags = packageName && releases.some((r) => tagMatchesPackage(r.tag, packageName));
|
|
380
|
-
const installedSv = installedVersion ? parseSemver(installedVersion) : null;
|
|
381
|
-
return releases.filter((r) => {
|
|
382
|
-
if (r.prerelease) return false;
|
|
383
|
-
const ver = extractVersion(r.tag, hasMonorepoTags ? packageName : void 0);
|
|
384
|
-
if (!ver) return false;
|
|
385
|
-
const sv = parseSemver(ver);
|
|
386
|
-
if (!sv) return false;
|
|
387
|
-
if (hasMonorepoTags && packageName && !tagMatchesPackage(r.tag, packageName)) return false;
|
|
388
|
-
if (installedSv && compareSemver(sv, installedSv) > 0) return false;
|
|
389
|
-
return true;
|
|
390
|
-
}).sort((a, b) => {
|
|
391
|
-
const verA = extractVersion(a.tag, hasMonorepoTags ? packageName : void 0);
|
|
392
|
-
const verB = extractVersion(b.tag, hasMonorepoTags ? packageName : void 0);
|
|
393
|
-
if (!verA || !verB) return 0;
|
|
394
|
-
return compareSemver(parseSemver(verB), parseSemver(verA));
|
|
395
|
-
}).slice(0, 20);
|
|
396
|
-
}
|
|
397
|
-
function formatRelease(release, packageName) {
|
|
398
|
-
const date = isoDate(release.publishedAt || release.createdAt);
|
|
399
|
-
const version = extractVersion(release.tag, packageName) || release.tag;
|
|
400
|
-
const fm = [
|
|
401
|
-
"---",
|
|
402
|
-
`tag: ${release.tag}`,
|
|
403
|
-
`version: ${version}`,
|
|
404
|
-
`published: ${date}`
|
|
405
|
-
];
|
|
406
|
-
if (release.name && release.name !== release.tag) fm.push(`name: "${release.name.replace(/"/g, "\\\"")}"`);
|
|
407
|
-
fm.push("---");
|
|
408
|
-
return `${fm.join("\n")}\n\n# ${release.name || release.tag}\n\n${release.markdown}`;
|
|
409
|
-
}
|
|
410
|
-
function generateReleaseIndex(releasesOrOpts, packageName) {
|
|
411
|
-
const opts = Array.isArray(releasesOrOpts) ? {
|
|
412
|
-
releases: releasesOrOpts,
|
|
413
|
-
packageName
|
|
414
|
-
} : releasesOrOpts;
|
|
415
|
-
const { releases, blogReleases, hasChangelog } = opts;
|
|
416
|
-
const pkg = opts.packageName;
|
|
417
|
-
const lines = [
|
|
418
|
-
[
|
|
419
|
-
"---",
|
|
420
|
-
`total: ${releases.length + (blogReleases?.length ?? 0)}`,
|
|
421
|
-
`latest: ${releases[0]?.tag || "unknown"}`,
|
|
422
|
-
"---"
|
|
423
|
-
].join("\n"),
|
|
424
|
-
"",
|
|
425
|
-
"# Releases Index",
|
|
426
|
-
""
|
|
427
|
-
];
|
|
428
|
-
if (blogReleases && blogReleases.length > 0) {
|
|
429
|
-
lines.push("## Blog Releases", "");
|
|
430
|
-
for (const b of blogReleases) lines.push(`- [${b.version}](./blog-${b.version}.md): ${b.title} (${b.date})`);
|
|
431
|
-
lines.push("");
|
|
432
|
-
}
|
|
433
|
-
if (releases.length > 0) {
|
|
434
|
-
if (blogReleases && blogReleases.length > 0) lines.push("## Release Notes", "");
|
|
435
|
-
for (const r of releases) {
|
|
436
|
-
const date = isoDate(r.publishedAt || r.createdAt);
|
|
437
|
-
const filename = r.tag.includes("@") || r.tag.startsWith("v") ? r.tag : `v${r.tag}`;
|
|
438
|
-
const sv = parseSemver(extractVersion(r.tag, pkg) || r.tag);
|
|
439
|
-
const label = sv?.patch === 0 && sv.minor === 0 ? " **[MAJOR]**" : sv?.patch === 0 ? " **[MINOR]**" : "";
|
|
440
|
-
lines.push(`- [${r.tag}](./${filename}.md): ${r.name || r.tag} (${date})${label}`);
|
|
441
|
-
}
|
|
442
|
-
lines.push("");
|
|
443
|
-
}
|
|
444
|
-
if (hasChangelog) {
|
|
445
|
-
lines.push("## Changelog", "");
|
|
446
|
-
lines.push("- [CHANGELOG.md](./CHANGELOG.md)");
|
|
447
|
-
lines.push("");
|
|
448
|
-
}
|
|
449
|
-
return lines.join("\n");
|
|
450
|
-
}
|
|
451
|
-
function isChangelogRedirectPattern(releases) {
|
|
452
|
-
const sample = releases.slice(0, 3);
|
|
453
|
-
if (sample.length === 0) return false;
|
|
454
|
-
return sample.every((r) => {
|
|
455
|
-
const body = (r.markdown || "").trim();
|
|
456
|
-
return body.length < 500 && /changelog\.md/i.test(body);
|
|
457
|
-
});
|
|
458
|
-
}
|
|
459
|
-
async function fetchChangelog(owner, repo, ref) {
|
|
460
|
-
for (const filename of [
|
|
461
|
-
"CHANGELOG.md",
|
|
462
|
-
"changelog.md",
|
|
463
|
-
"CHANGES.md"
|
|
464
|
-
]) {
|
|
465
|
-
const content = await $fetch(`https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${filename}`, {
|
|
466
|
-
responseType: "text",
|
|
467
|
-
signal: AbortSignal.timeout(1e4)
|
|
468
|
-
}).catch(() => null);
|
|
469
|
-
if (content) return content;
|
|
470
|
-
}
|
|
471
|
-
return null;
|
|
472
|
-
}
|
|
473
|
-
async function fetchReleaseNotes(owner, repo, installedVersion, gitRef, packageName) {
|
|
474
|
-
const selected = selectReleases(await fetchAllReleases(owner, repo), packageName, installedVersion);
|
|
475
|
-
if (selected.length > 0) {
|
|
476
|
-
if (isChangelogRedirectPattern(selected)) {
|
|
477
|
-
const changelog = await fetchChangelog(owner, repo, gitRef || selected[0].tag);
|
|
478
|
-
if (changelog) return [{
|
|
479
|
-
path: "releases/CHANGELOG.md",
|
|
480
|
-
content: changelog
|
|
481
|
-
}];
|
|
482
|
-
}
|
|
483
|
-
const docs = selected.map((r) => {
|
|
484
|
-
return {
|
|
485
|
-
path: `releases/${r.tag.includes("@") || r.tag.startsWith("v") ? r.tag : `v${r.tag}`}.md`,
|
|
486
|
-
content: formatRelease(r, packageName)
|
|
487
|
-
};
|
|
488
|
-
});
|
|
489
|
-
const changelog = await fetchChangelog(owner, repo, gitRef || selected[0].tag);
|
|
490
|
-
if (changelog && changelog.length < 5e5) docs.push({
|
|
491
|
-
path: "releases/CHANGELOG.md",
|
|
492
|
-
content: changelog
|
|
493
|
-
});
|
|
494
|
-
return docs;
|
|
495
|
-
}
|
|
496
|
-
const changelog = await fetchChangelog(owner, repo, gitRef || "main");
|
|
497
|
-
if (!changelog) return [];
|
|
498
|
-
return [{
|
|
499
|
-
path: "releases/CHANGELOG.md",
|
|
500
|
-
content: changelog
|
|
501
|
-
}];
|
|
502
|
-
}
|
|
503
|
-
function parseVersionFromUrl(url) {
|
|
504
|
-
const match = url.match(/\/posts\/\w+-(\d+)-(\d+)/);
|
|
505
|
-
if (match) return `${match[1]}.${match[2]}`;
|
|
506
|
-
return null;
|
|
507
|
-
}
|
|
508
|
-
function formatBlogRelease(release) {
|
|
509
|
-
return `${[
|
|
510
|
-
"---",
|
|
511
|
-
`version: ${release.version}`,
|
|
512
|
-
`title: "${release.title.replace(/"/g, "\\\"")}"`,
|
|
513
|
-
`date: ${release.date}`,
|
|
514
|
-
`url: ${release.url}`,
|
|
515
|
-
`source: blog-release`,
|
|
516
|
-
"---"
|
|
517
|
-
].join("\n")}\n\n# ${release.title}\n\n${release.markdown}`;
|
|
518
|
-
}
|
|
519
|
-
async function fetchBlogPost(url) {
|
|
520
|
-
try {
|
|
521
|
-
const html = await $fetch(url, {
|
|
522
|
-
responseType: "text",
|
|
523
|
-
signal: AbortSignal.timeout(1e4)
|
|
524
|
-
}).catch(() => null);
|
|
525
|
-
if (!html) return null;
|
|
526
|
-
const version = parseVersionFromUrl(url);
|
|
527
|
-
if (!version) return null;
|
|
528
|
-
let title = "";
|
|
529
|
-
const titleMatch = html.match(/<h1[^>]*>([^<]+)<\/h1>/);
|
|
530
|
-
if (titleMatch) title = titleMatch[1].trim();
|
|
531
|
-
if (!title) {
|
|
532
|
-
const metaTitleMatch = html.match(/<title>([^<]+)<\/title>/);
|
|
533
|
-
if (metaTitleMatch) title = metaTitleMatch[1].trim();
|
|
534
|
-
}
|
|
535
|
-
let date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
536
|
-
const dateMatch = html.match(/(?:published|date|posted)["']?\s*:\s*["']?(\d{4}-\d{2}-\d{2})/);
|
|
537
|
-
if (dateMatch) date = dateMatch[1];
|
|
538
|
-
const markdown = htmlToMarkdown(html);
|
|
539
|
-
if (!markdown) return null;
|
|
540
|
-
return {
|
|
541
|
-
version,
|
|
542
|
-
title: title || `Release ${version}`,
|
|
543
|
-
date,
|
|
544
|
-
markdown,
|
|
545
|
-
url
|
|
546
|
-
};
|
|
547
|
-
} catch {
|
|
548
|
-
return null;
|
|
549
|
-
}
|
|
550
|
-
}
|
|
551
|
-
function filterBlogsByVersion(entries, installedVersion) {
|
|
552
|
-
const installedSv = parseSemver(installedVersion);
|
|
553
|
-
if (!installedSv) return entries;
|
|
554
|
-
return entries.filter((entry) => {
|
|
555
|
-
const entrySv = parseSemver(entry.version);
|
|
556
|
-
if (!entrySv) return false;
|
|
557
|
-
return compareSemver(entrySv, installedSv) <= 0;
|
|
558
|
-
});
|
|
559
|
-
}
|
|
560
|
-
async function fetchBlogReleases(packageName, installedVersion) {
|
|
561
|
-
const preset = getBlogPreset(packageName);
|
|
562
|
-
if (!preset) return [];
|
|
563
|
-
const filteredReleases = filterBlogsByVersion(preset.releases, installedVersion);
|
|
564
|
-
if (filteredReleases.length === 0) return [];
|
|
565
|
-
const releases = [];
|
|
566
|
-
const batchSize = 3;
|
|
567
|
-
for (let i = 0; i < filteredReleases.length; i += batchSize) {
|
|
568
|
-
const batch = filteredReleases.slice(i, i + batchSize);
|
|
569
|
-
const results = await Promise.all(batch.map((entry) => fetchBlogPost(entry.url)));
|
|
570
|
-
for (const result of results) if (result) releases.push(result);
|
|
571
|
-
}
|
|
572
|
-
if (releases.length === 0) return [];
|
|
573
|
-
releases.sort((a, b) => {
|
|
574
|
-
const aVer = a.version.split(".").map(Number);
|
|
575
|
-
const bVer = b.version.split(".").map(Number);
|
|
576
|
-
for (let i = 0; i < Math.max(aVer.length, bVer.length); i++) {
|
|
577
|
-
const diff = (bVer[i] ?? 0) - (aVer[i] ?? 0);
|
|
578
|
-
if (diff !== 0) return diff;
|
|
579
|
-
}
|
|
580
|
-
return 0;
|
|
581
|
-
});
|
|
582
|
-
return releases.map((r) => ({
|
|
583
|
-
path: `releases/blog-${r.version}.md`,
|
|
584
|
-
content: formatBlogRelease(r)
|
|
585
|
-
}));
|
|
586
|
-
}
|
|
587
|
-
const HIGH_VALUE_CATEGORIES = new Set([
|
|
588
|
-
"q&a",
|
|
589
|
-
"help",
|
|
590
|
-
"troubleshooting",
|
|
591
|
-
"support"
|
|
592
|
-
]);
|
|
593
|
-
const LOW_VALUE_CATEGORIES = new Set([
|
|
594
|
-
"show and tell",
|
|
595
|
-
"ideas",
|
|
596
|
-
"polls"
|
|
597
|
-
]);
|
|
598
|
-
async function fetchGitHubDiscussions(owner, repo, limit = 20, releasedAt) {
|
|
599
|
-
if (!isGhAvailable()) return [];
|
|
600
|
-
if (releasedAt) {
|
|
601
|
-
const cutoff = new Date(releasedAt);
|
|
602
|
-
cutoff.setMonth(cutoff.getMonth() + 6);
|
|
603
|
-
if (cutoff < /* @__PURE__ */ new Date()) return [];
|
|
604
|
-
}
|
|
605
|
-
try {
|
|
606
|
-
const { stdout: result } = spawnSync("gh", [
|
|
607
|
-
"api",
|
|
608
|
-
"graphql",
|
|
609
|
-
"-f",
|
|
610
|
-
`query=${`query($owner: String!, $repo: String!) { repository(owner: $owner, name: $repo) { discussions(first: ${Math.min(limit * 3, 80)}, orderBy: {field: CREATED_AT, direction: DESC}) { nodes { number title body category { name } createdAt url upvoteCount comments(first: 3) { totalCount nodes { body author { login } } } answer { body } author { login } } } } }`}`,
|
|
611
|
-
"-f",
|
|
612
|
-
`owner=${owner}`,
|
|
613
|
-
"-f",
|
|
614
|
-
`repo=${repo}`
|
|
615
|
-
], {
|
|
616
|
-
encoding: "utf-8",
|
|
617
|
-
maxBuffer: 10 * 1024 * 1024
|
|
618
|
-
});
|
|
619
|
-
if (!result) return [];
|
|
620
|
-
const nodes = JSON.parse(result)?.data?.repository?.discussions?.nodes;
|
|
621
|
-
if (!Array.isArray(nodes)) return [];
|
|
622
|
-
return nodes.filter((d) => d.author && !BOT_USERS.has(d.author.login)).filter((d) => {
|
|
623
|
-
const cat = (d.category?.name || "").toLowerCase();
|
|
624
|
-
return !LOW_VALUE_CATEGORIES.has(cat);
|
|
625
|
-
}).map((d) => ({
|
|
626
|
-
number: d.number,
|
|
627
|
-
title: d.title,
|
|
628
|
-
body: d.body || "",
|
|
629
|
-
category: d.category?.name || "",
|
|
630
|
-
createdAt: d.createdAt,
|
|
631
|
-
url: d.url,
|
|
632
|
-
upvoteCount: d.upvoteCount || 0,
|
|
633
|
-
comments: d.comments?.totalCount || 0,
|
|
634
|
-
answer: d.answer?.body || void 0,
|
|
635
|
-
topComments: (d.comments?.nodes || []).filter((c) => c.author && !BOT_USERS.has(c.author.login)).map((c) => ({
|
|
636
|
-
body: c.body || "",
|
|
637
|
-
author: c.author.login
|
|
638
|
-
}))
|
|
639
|
-
})).sort((a, b) => {
|
|
640
|
-
const aHigh = HIGH_VALUE_CATEGORIES.has(a.category.toLowerCase()) ? 1 : 0;
|
|
641
|
-
const bHigh = HIGH_VALUE_CATEGORIES.has(b.category.toLowerCase()) ? 1 : 0;
|
|
642
|
-
if (aHigh !== bHigh) return bHigh - aHigh;
|
|
643
|
-
return b.upvoteCount + b.comments - (a.upvoteCount + a.comments);
|
|
644
|
-
}).slice(0, limit);
|
|
645
|
-
} catch {
|
|
646
|
-
return [];
|
|
647
|
-
}
|
|
648
|
-
}
|
|
649
|
-
function formatDiscussionAsMarkdown(d) {
|
|
650
|
-
const fm = buildFrontmatter({
|
|
651
|
-
number: d.number,
|
|
652
|
-
title: d.title,
|
|
653
|
-
category: d.category,
|
|
654
|
-
created: isoDate(d.createdAt),
|
|
655
|
-
url: d.url,
|
|
656
|
-
upvotes: d.upvoteCount,
|
|
657
|
-
comments: d.comments,
|
|
658
|
-
answered: !!d.answer
|
|
659
|
-
});
|
|
660
|
-
const bodyLimit = d.upvoteCount >= 5 ? 1500 : 800;
|
|
661
|
-
const lines = [
|
|
662
|
-
fm,
|
|
663
|
-
"",
|
|
664
|
-
`# ${d.title}`
|
|
665
|
-
];
|
|
666
|
-
if (d.body) {
|
|
667
|
-
const body = d.body.length > bodyLimit ? `${d.body.slice(0, bodyLimit)}...` : d.body;
|
|
668
|
-
lines.push("", body);
|
|
669
|
-
}
|
|
670
|
-
if (d.answer) {
|
|
671
|
-
const answerLimit = 1e3;
|
|
672
|
-
const answer = d.answer.length > answerLimit ? `${d.answer.slice(0, answerLimit)}...` : d.answer;
|
|
673
|
-
lines.push("", "---", "", "## Accepted Answer", "", answer);
|
|
674
|
-
} else if (d.topComments.length > 0) {
|
|
675
|
-
lines.push("", "---", "", "## Top Comments");
|
|
676
|
-
for (const c of d.topComments) {
|
|
677
|
-
const commentBody = c.body.length > 600 ? `${c.body.slice(0, 600)}...` : c.body;
|
|
678
|
-
lines.push("", `**@${c.author}:**`, "", commentBody);
|
|
679
|
-
}
|
|
680
|
-
}
|
|
681
|
-
return lines.join("\n");
|
|
682
|
-
}
|
|
683
|
-
function generateDiscussionIndex(discussions) {
|
|
684
|
-
const byCategory = /* @__PURE__ */ new Map();
|
|
685
|
-
for (const d of discussions) {
|
|
686
|
-
const cat = d.category || "Uncategorized";
|
|
687
|
-
const list = byCategory.get(cat) || [];
|
|
688
|
-
list.push(d);
|
|
689
|
-
byCategory.set(cat, list);
|
|
690
|
-
}
|
|
691
|
-
const answered = discussions.filter((d) => d.answer).length;
|
|
692
|
-
const sections = [
|
|
693
|
-
[
|
|
694
|
-
"---",
|
|
695
|
-
`total: ${discussions.length}`,
|
|
696
|
-
`answered: ${answered}`,
|
|
697
|
-
"---"
|
|
698
|
-
].join("\n"),
|
|
699
|
-
"",
|
|
700
|
-
"# Discussions Index",
|
|
701
|
-
""
|
|
702
|
-
];
|
|
703
|
-
const cats = [...byCategory.keys()].sort((a, b) => {
|
|
704
|
-
return (HIGH_VALUE_CATEGORIES.has(a.toLowerCase()) ? 0 : 1) - (HIGH_VALUE_CATEGORIES.has(b.toLowerCase()) ? 0 : 1) || a.localeCompare(b);
|
|
705
|
-
});
|
|
706
|
-
for (const cat of cats) {
|
|
707
|
-
const group = byCategory.get(cat);
|
|
708
|
-
sections.push(`## ${cat} (${group.length})`, "");
|
|
709
|
-
for (const d of group) {
|
|
710
|
-
const upvotes = d.upvoteCount > 0 ? ` (+${d.upvoteCount})` : "";
|
|
711
|
-
const answered = d.answer ? " [answered]" : "";
|
|
712
|
-
const date = isoDate(d.createdAt);
|
|
713
|
-
sections.push(`- [#${d.number}](./discussion-${d.number}.md): ${d.title}${upvotes}${answered} (${date})`);
|
|
714
|
-
}
|
|
715
|
-
sections.push("");
|
|
716
|
-
}
|
|
717
|
-
return sections.join("\n");
|
|
718
|
-
}
|
|
719
|
-
const SKIP_DIRS = [
|
|
720
|
-
"node_modules",
|
|
721
|
-
"_vendor",
|
|
722
|
-
"__tests__",
|
|
723
|
-
"__mocks__",
|
|
724
|
-
"__fixtures__",
|
|
725
|
-
"test",
|
|
726
|
-
"tests",
|
|
727
|
-
"fixture",
|
|
728
|
-
"fixtures",
|
|
729
|
-
"locales",
|
|
730
|
-
"locale",
|
|
731
|
-
"i18n",
|
|
732
|
-
".git"
|
|
733
|
-
];
|
|
734
|
-
const SKIP_PATTERNS = [
|
|
735
|
-
"*.min.*",
|
|
736
|
-
"*.prod.*",
|
|
737
|
-
"*.global.*",
|
|
738
|
-
"*.browser.*",
|
|
739
|
-
"*.map",
|
|
740
|
-
"*.map.js",
|
|
741
|
-
"CHANGELOG*",
|
|
742
|
-
"LICENSE*",
|
|
743
|
-
"README*"
|
|
744
|
-
];
|
|
745
|
-
const MAX_FILE_SIZE = 500 * 1024;
|
|
746
|
-
async function resolveEntryFiles(packageDir) {
|
|
747
|
-
if (!existsSync(join(packageDir, "package.json"))) return [];
|
|
748
|
-
const files = await globby(["**/*.d.{ts,mts,cts}"], {
|
|
749
|
-
cwd: packageDir,
|
|
750
|
-
ignore: [...SKIP_DIRS.map((d) => `**/${d}/**`), ...SKIP_PATTERNS],
|
|
751
|
-
absolute: false
|
|
752
|
-
});
|
|
753
|
-
const entries = [];
|
|
754
|
-
for (const file of files) {
|
|
755
|
-
const absPath = join(packageDir, file);
|
|
756
|
-
let content;
|
|
757
|
-
try {
|
|
758
|
-
content = readFileSync(absPath, "utf-8");
|
|
759
|
-
} catch {
|
|
760
|
-
continue;
|
|
761
|
-
}
|
|
762
|
-
if (content.length > MAX_FILE_SIZE) continue;
|
|
763
|
-
entries.push({
|
|
764
|
-
path: file,
|
|
765
|
-
content,
|
|
766
|
-
type: "types"
|
|
767
|
-
});
|
|
768
|
-
}
|
|
769
|
-
return entries;
|
|
770
|
-
}
|
|
771
|
-
function parseGitSkillInput(input) {
|
|
772
|
-
const trimmed = input.trim();
|
|
773
|
-
if (trimmed.startsWith("@")) return null;
|
|
774
|
-
if (trimmed.startsWith("./") || trimmed.startsWith("../") || trimmed.startsWith("/") || trimmed.startsWith("~")) return {
|
|
775
|
-
type: "local",
|
|
776
|
-
localPath: trimmed.startsWith("~") ? resolve(process.env.HOME || "", trimmed.slice(1)) : resolve(trimmed)
|
|
777
|
-
};
|
|
778
|
-
if (trimmed.startsWith("git@")) {
|
|
779
|
-
const gh = parseGitHubUrl(normalizeRepoUrl(trimmed));
|
|
780
|
-
if (gh) return {
|
|
781
|
-
type: "github",
|
|
782
|
-
owner: gh.owner,
|
|
783
|
-
repo: gh.repo
|
|
784
|
-
};
|
|
785
|
-
return null;
|
|
786
|
-
}
|
|
787
|
-
if (trimmed.startsWith("https://") || trimmed.startsWith("http://")) return parseGitUrl(trimmed);
|
|
788
|
-
if (/^[\w.-]+\/[\w.-]+$/.test(trimmed)) return {
|
|
789
|
-
type: "github",
|
|
790
|
-
owner: trimmed.split("/")[0],
|
|
791
|
-
repo: trimmed.split("/")[1]
|
|
792
|
-
};
|
|
793
|
-
return null;
|
|
794
|
-
}
|
|
795
|
-
function parseGitUrl(url) {
|
|
796
|
-
try {
|
|
797
|
-
const parsed = new URL(url);
|
|
798
|
-
if (parsed.hostname === "github.com" || parsed.hostname === "www.github.com") {
|
|
799
|
-
const parts = parsed.pathname.replace(/^\//, "").replace(/\.git$/, "").split("/");
|
|
800
|
-
const owner = parts[0];
|
|
801
|
-
const repo = parts[1];
|
|
802
|
-
if (!owner || !repo) return null;
|
|
803
|
-
if (parts[2] === "tree" && parts.length >= 4) return {
|
|
804
|
-
type: "github",
|
|
805
|
-
owner,
|
|
806
|
-
repo,
|
|
807
|
-
ref: parts[3],
|
|
808
|
-
skillPath: parts.length > 4 ? parts.slice(4).join("/") : void 0
|
|
809
|
-
};
|
|
810
|
-
return {
|
|
811
|
-
type: "github",
|
|
812
|
-
owner,
|
|
813
|
-
repo
|
|
814
|
-
};
|
|
815
|
-
}
|
|
816
|
-
if (parsed.hostname === "gitlab.com") {
|
|
817
|
-
const parts = parsed.pathname.replace(/^\//, "").replace(/\.git$/, "").split("/");
|
|
818
|
-
const owner = parts[0];
|
|
819
|
-
const repo = parts[1];
|
|
820
|
-
if (!owner || !repo) return null;
|
|
821
|
-
return {
|
|
822
|
-
type: "gitlab",
|
|
823
|
-
owner,
|
|
824
|
-
repo
|
|
825
|
-
};
|
|
826
|
-
}
|
|
827
|
-
return null;
|
|
828
|
-
} catch {
|
|
829
|
-
return null;
|
|
830
|
-
}
|
|
831
|
-
}
|
|
832
|
-
function parseSkillFrontmatterName(content) {
|
|
833
|
-
const match = content.match(/^---\n([\s\S]*?)\n---/);
|
|
834
|
-
if (!match) return {};
|
|
835
|
-
const result = {};
|
|
836
|
-
for (const line of match[1].split("\n")) {
|
|
837
|
-
const kv = yamlParseKV(line);
|
|
838
|
-
if (!kv) continue;
|
|
839
|
-
if (kv[0] === "name") result.name = kv[1];
|
|
840
|
-
if (kv[0] === "description") result.description = kv[1];
|
|
841
|
-
}
|
|
842
|
-
return result;
|
|
843
|
-
}
|
|
844
|
-
const SUPPORTING_DIRS = [
|
|
845
|
-
"scripts",
|
|
846
|
-
"references",
|
|
847
|
-
"assets"
|
|
848
|
-
];
|
|
849
|
-
async function fetchGitSkills(source, onProgress) {
|
|
850
|
-
if (source.type === "local") return fetchLocalSkills(source);
|
|
851
|
-
if (source.type === "github") return fetchGitHubSkills(source, onProgress);
|
|
852
|
-
if (source.type === "gitlab") return fetchGitLabSkills(source, onProgress);
|
|
853
|
-
return { skills: [] };
|
|
854
|
-
}
|
|
855
|
-
function fetchLocalSkills(source) {
|
|
856
|
-
const base = source.localPath;
|
|
857
|
-
if (!existsSync(base)) return { skills: [] };
|
|
858
|
-
const skills = [];
|
|
859
|
-
const skillsDir = resolve(base, "skills");
|
|
860
|
-
if (existsSync(skillsDir)) for (const entry of readdirSync(skillsDir, { withFileTypes: true })) {
|
|
861
|
-
if (!entry.isDirectory()) continue;
|
|
862
|
-
const skill = readLocalSkill(resolve(skillsDir, entry.name), `skills/${entry.name}`);
|
|
863
|
-
if (skill) skills.push(skill);
|
|
864
|
-
}
|
|
865
|
-
if (skills.length === 0) {
|
|
866
|
-
const skill = readLocalSkill(base, "");
|
|
867
|
-
if (skill) skills.push(skill);
|
|
868
|
-
}
|
|
869
|
-
return { skills };
|
|
870
|
-
}
|
|
871
|
-
function readLocalSkill(dir, repoPath) {
|
|
872
|
-
const skillMdPath = resolve(dir, "SKILL.md");
|
|
873
|
-
if (!existsSync(skillMdPath)) return null;
|
|
874
|
-
const content = readFileSync(skillMdPath, "utf-8");
|
|
875
|
-
const frontmatter = parseSkillFrontmatterName(content);
|
|
876
|
-
const dirName = dir.split("/").pop();
|
|
877
|
-
const name = frontmatter.name || dirName;
|
|
878
|
-
const files = [];
|
|
879
|
-
for (const subdir of SUPPORTING_DIRS) {
|
|
880
|
-
const subdirPath = resolve(dir, subdir);
|
|
881
|
-
if (!existsSync(subdirPath)) continue;
|
|
882
|
-
for (const file of readdirSync(subdirPath, { withFileTypes: true })) {
|
|
883
|
-
if (!file.isFile()) continue;
|
|
884
|
-
files.push({
|
|
885
|
-
path: `${subdir}/${file.name}`,
|
|
886
|
-
content: readFileSync(resolve(subdirPath, file.name), "utf-8")
|
|
887
|
-
});
|
|
888
|
-
}
|
|
889
|
-
}
|
|
890
|
-
return {
|
|
891
|
-
name,
|
|
892
|
-
description: frontmatter.description || "",
|
|
893
|
-
path: repoPath,
|
|
894
|
-
content,
|
|
895
|
-
files
|
|
896
|
-
};
|
|
897
|
-
}
|
|
898
|
-
async function fetchGitHubSkills(source, onProgress) {
|
|
899
|
-
const { owner, repo } = source;
|
|
900
|
-
if (!owner || !repo) return { skills: [] };
|
|
901
|
-
const ref = source.ref || "main";
|
|
902
|
-
onProgress?.(`Listing files at ${owner}/${repo}@${ref}`);
|
|
903
|
-
const data = await $fetch(`https://ungh.cc/repos/${owner}/${repo}/files/${ref}`).catch(() => null);
|
|
904
|
-
if (!data?.files?.length) {
|
|
905
|
-
if (ref === "main") {
|
|
906
|
-
const fallback = await $fetch(`https://ungh.cc/repos/${owner}/${repo}/files/master`).catch(() => null);
|
|
907
|
-
if (fallback?.files?.length) return extractGitHubSkills(owner, repo, "master", fallback, source.skillPath, onProgress);
|
|
908
|
-
}
|
|
909
|
-
return { skills: [] };
|
|
910
|
-
}
|
|
911
|
-
return extractGitHubSkills(owner, repo, ref, data, source.skillPath, onProgress);
|
|
912
|
-
}
|
|
913
|
-
async function extractGitHubSkills(owner, repo, ref, data, skillPath, onProgress) {
|
|
914
|
-
const allFiles = data.files.map((f) => f.path);
|
|
915
|
-
const commitSha = data.meta?.sha;
|
|
916
|
-
let skillMdPaths;
|
|
917
|
-
if (skillPath) {
|
|
918
|
-
const candidates = [`${skillPath}/SKILL.md`, skillPath.endsWith("/SKILL.md") ? skillPath : null].filter(Boolean);
|
|
919
|
-
skillMdPaths = allFiles.filter((f) => candidates.includes(f));
|
|
920
|
-
} else skillMdPaths = allFiles.filter((f) => f.match(/^skills\/[^/]+\/SKILL\.md$/) || f === "SKILL.md");
|
|
921
|
-
if (skillMdPaths.length === 0) return {
|
|
922
|
-
skills: [],
|
|
923
|
-
commitSha
|
|
924
|
-
};
|
|
925
|
-
const limit = pLimit(5);
|
|
926
|
-
const skills = [];
|
|
927
|
-
onProgress?.(`Found ${skillMdPaths.length} skill(s), downloading...`);
|
|
928
|
-
await Promise.all(skillMdPaths.map((mdPath) => limit(async () => {
|
|
929
|
-
const skillDir = mdPath === "SKILL.md" ? "" : mdPath.replace(/\/SKILL\.md$/, "");
|
|
930
|
-
const content = await fetchRawGitHub(owner, repo, ref, mdPath);
|
|
931
|
-
if (!content) return;
|
|
932
|
-
const frontmatter = parseSkillFrontmatterName(content);
|
|
933
|
-
const dirName = skillDir ? skillDir.split("/").pop() : repo;
|
|
934
|
-
const name = frontmatter.name || dirName;
|
|
935
|
-
const supportingFiles = [];
|
|
936
|
-
const prefix = skillDir ? `${skillDir}/` : "";
|
|
937
|
-
for (const subdir of SUPPORTING_DIRS) {
|
|
938
|
-
const subdirPrefix = `${prefix}${subdir}/`;
|
|
939
|
-
const matching = allFiles.filter((f) => f.startsWith(subdirPrefix));
|
|
940
|
-
for (const filePath of matching) {
|
|
941
|
-
const fileContent = await fetchRawGitHub(owner, repo, ref, filePath);
|
|
942
|
-
if (fileContent) {
|
|
943
|
-
const relativePath = filePath.slice(prefix.length);
|
|
944
|
-
supportingFiles.push({
|
|
945
|
-
path: relativePath,
|
|
946
|
-
content: fileContent
|
|
947
|
-
});
|
|
948
|
-
}
|
|
949
|
-
}
|
|
950
|
-
}
|
|
951
|
-
skills.push({
|
|
952
|
-
name,
|
|
953
|
-
description: frontmatter.description || "",
|
|
954
|
-
path: skillDir,
|
|
955
|
-
content,
|
|
956
|
-
files: supportingFiles
|
|
957
|
-
});
|
|
958
|
-
})));
|
|
959
|
-
return {
|
|
960
|
-
skills,
|
|
961
|
-
commitSha
|
|
962
|
-
};
|
|
963
|
-
}
|
|
964
|
-
async function fetchRawGitHub(owner, repo, ref, path) {
|
|
965
|
-
return $fetch(`https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${path}`, { responseType: "text" }).catch(() => null);
|
|
966
|
-
}
|
|
967
|
-
async function fetchGitLabSkills(source, onProgress) {
|
|
968
|
-
const { owner, repo } = source;
|
|
969
|
-
if (!owner || !repo) return { skills: [] };
|
|
970
|
-
const ref = source.ref || "main";
|
|
971
|
-
const projectId = encodeURIComponent(`${owner}/${repo}`);
|
|
972
|
-
onProgress?.(`Listing files at ${owner}/${repo}@${ref}`);
|
|
973
|
-
const tree = await $fetch(`https://gitlab.com/api/v4/projects/${projectId}/repository/tree?ref=${ref}&recursive=true&per_page=100`).catch(() => null);
|
|
974
|
-
if (!tree?.length) return { skills: [] };
|
|
975
|
-
const allFiles = tree.filter((e) => e.type === "blob").map((e) => e.path);
|
|
976
|
-
const skillMdPaths = source.skillPath ? allFiles.filter((f) => f === `${source.skillPath}/SKILL.md`) : allFiles.filter((f) => f.match(/^skills\/[^/]+\/SKILL\.md$/) || f === "SKILL.md");
|
|
977
|
-
if (skillMdPaths.length === 0) return { skills: [] };
|
|
978
|
-
const limit = pLimit(5);
|
|
979
|
-
const skills = [];
|
|
980
|
-
onProgress?.(`Found ${skillMdPaths.length} skill(s), downloading...`);
|
|
981
|
-
await Promise.all(skillMdPaths.map((mdPath) => limit(async () => {
|
|
982
|
-
const skillDir = mdPath === "SKILL.md" ? "" : mdPath.replace(/\/SKILL\.md$/, "");
|
|
983
|
-
const content = await fetchRawGitLab(owner, repo, ref, mdPath);
|
|
984
|
-
if (!content) return;
|
|
985
|
-
const frontmatter = parseSkillFrontmatterName(content);
|
|
986
|
-
const dirName = skillDir ? skillDir.split("/").pop() : repo;
|
|
987
|
-
const name = frontmatter.name || dirName;
|
|
988
|
-
const supportingFiles = [];
|
|
989
|
-
const prefix = skillDir ? `${skillDir}/` : "";
|
|
990
|
-
for (const subdir of SUPPORTING_DIRS) {
|
|
991
|
-
const subdirPrefix = `${prefix}${subdir}/`;
|
|
992
|
-
const matching = allFiles.filter((f) => f.startsWith(subdirPrefix));
|
|
993
|
-
for (const filePath of matching) {
|
|
994
|
-
const fileContent = await fetchRawGitLab(owner, repo, ref, filePath);
|
|
995
|
-
if (fileContent) {
|
|
996
|
-
const relativePath = filePath.slice(prefix.length);
|
|
997
|
-
supportingFiles.push({
|
|
998
|
-
path: relativePath,
|
|
999
|
-
content: fileContent
|
|
1000
|
-
});
|
|
1001
|
-
}
|
|
1002
|
-
}
|
|
1003
|
-
}
|
|
1004
|
-
skills.push({
|
|
1005
|
-
name,
|
|
1006
|
-
description: frontmatter.description || "",
|
|
1007
|
-
path: skillDir,
|
|
1008
|
-
content,
|
|
1009
|
-
files: supportingFiles
|
|
1010
|
-
});
|
|
1011
|
-
})));
|
|
1012
|
-
return { skills };
|
|
1013
|
-
}
|
|
1014
|
-
async function fetchRawGitLab(owner, repo, ref, path) {
|
|
1015
|
-
return $fetch(`https://gitlab.com/${owner}/${repo}/-/raw/${ref}/${path}`, { responseType: "text" }).catch(() => null);
|
|
1016
|
-
}
|
|
1017
|
-
const MIN_GIT_DOCS = 5;
|
|
1018
|
-
const isShallowGitDocs = (n) => n > 0 && n < MIN_GIT_DOCS;
|
|
1019
|
-
async function listFilesAtRef(owner, repo, ref) {
|
|
1020
|
-
return (await $fetch(`https://ungh.cc/repos/${owner}/${repo}/files/${ref}`).catch(() => null))?.files?.map((f) => f.path) ?? [];
|
|
1021
|
-
}
|
|
1022
|
-
async function findGitTag(owner, repo, version, packageName, branchHint) {
|
|
1023
|
-
const candidates = [`v${version}`, version];
|
|
1024
|
-
if (packageName) candidates.push(`${packageName}@${version}`);
|
|
1025
|
-
for (const tag of candidates) {
|
|
1026
|
-
const files = await listFilesAtRef(owner, repo, tag);
|
|
1027
|
-
if (files.length > 0) return {
|
|
1028
|
-
ref: tag,
|
|
1029
|
-
files
|
|
1030
|
-
};
|
|
1031
|
-
}
|
|
1032
|
-
if (packageName) {
|
|
1033
|
-
const latestTag = await findLatestReleaseTag(owner, repo, packageName);
|
|
1034
|
-
if (latestTag) {
|
|
1035
|
-
const files = await listFilesAtRef(owner, repo, latestTag);
|
|
1036
|
-
if (files.length > 0) return {
|
|
1037
|
-
ref: latestTag,
|
|
1038
|
-
files
|
|
1039
|
-
};
|
|
1040
|
-
}
|
|
1041
|
-
}
|
|
1042
|
-
const branches = branchHint ? [branchHint, ...["main", "master"].filter((b) => b !== branchHint)] : ["main", "master"];
|
|
1043
|
-
for (const branch of branches) {
|
|
1044
|
-
const files = await listFilesAtRef(owner, repo, branch);
|
|
1045
|
-
if (files.length > 0) return {
|
|
1046
|
-
ref: branch,
|
|
1047
|
-
files,
|
|
1048
|
-
fallback: true
|
|
1049
|
-
};
|
|
1050
|
-
}
|
|
1051
|
-
return null;
|
|
1052
|
-
}
|
|
1053
|
-
async function findLatestReleaseTag(owner, repo, packageName) {
|
|
1054
|
-
const data = await $fetch(`https://ungh.cc/repos/${owner}/${repo}/releases`).catch(() => null);
|
|
1055
|
-
const prefix = `${packageName}@`;
|
|
1056
|
-
return data?.releases?.find((r) => r.tag.startsWith(prefix))?.tag ?? null;
|
|
1057
|
-
}
|
|
1058
|
-
function filterDocFiles(files, pathPrefix) {
|
|
1059
|
-
return files.filter((f) => f.startsWith(pathPrefix) && /\.(?:md|mdx)$/.test(f));
|
|
1060
|
-
}
|
|
1061
|
-
const NOISE_PATTERNS = [
|
|
1062
|
-
/^\.changeset\//,
|
|
1063
|
-
/CHANGELOG\.md$/i,
|
|
1064
|
-
/CONTRIBUTING\.md$/i,
|
|
1065
|
-
/^\.github\//
|
|
1066
|
-
];
|
|
1067
|
-
const EXCLUDE_DIRS = new Set([
|
|
1068
|
-
"test",
|
|
1069
|
-
"tests",
|
|
1070
|
-
"__tests__",
|
|
1071
|
-
"fixtures",
|
|
1072
|
-
"fixture",
|
|
1073
|
-
"examples",
|
|
1074
|
-
"example",
|
|
1075
|
-
"node_modules",
|
|
1076
|
-
".git",
|
|
1077
|
-
"dist",
|
|
1078
|
-
"build",
|
|
1079
|
-
"coverage",
|
|
1080
|
-
"e2e",
|
|
1081
|
-
"spec",
|
|
1082
|
-
"mocks",
|
|
1083
|
-
"__mocks__"
|
|
1084
|
-
]);
|
|
1085
|
-
const DOC_DIR_BONUS = new Set([
|
|
1086
|
-
"docs",
|
|
1087
|
-
"documentation",
|
|
1088
|
-
"pages",
|
|
1089
|
-
"content",
|
|
1090
|
-
"website",
|
|
1091
|
-
"guide",
|
|
1092
|
-
"guides",
|
|
1093
|
-
"wiki",
|
|
1094
|
-
"manual",
|
|
1095
|
-
"api"
|
|
1096
|
-
]);
|
|
1097
|
-
function hasExcludedDir(path) {
|
|
1098
|
-
return path.split("/").some((p) => EXCLUDE_DIRS.has(p.toLowerCase()));
|
|
1099
|
-
}
|
|
1100
|
-
function getPathDepth(path) {
|
|
1101
|
-
return path.split("/").filter(Boolean).length;
|
|
1102
|
-
}
|
|
1103
|
-
function hasDocDirBonus(path) {
|
|
1104
|
-
return path.split("/").some((p) => DOC_DIR_BONUS.has(p.toLowerCase()));
|
|
1105
|
-
}
|
|
1106
|
-
function scoreDocDir(dir, fileCount) {
|
|
1107
|
-
const depth = getPathDepth(dir) || 1;
|
|
1108
|
-
return fileCount * (hasDocDirBonus(dir) ? 1.5 : 1) / depth;
|
|
1109
|
-
}
|
|
1110
|
-
function discoverDocFiles(allFiles) {
|
|
1111
|
-
const mdFiles = allFiles.filter((f) => /\.(?:md|mdx)$/.test(f)).filter((f) => !NOISE_PATTERNS.some((p) => p.test(f))).filter((f) => f.includes("/"));
|
|
1112
|
-
const docsGroups = /* @__PURE__ */ new Map();
|
|
1113
|
-
for (const file of mdFiles) {
|
|
1114
|
-
const docsIdx = file.lastIndexOf("/docs/");
|
|
1115
|
-
if (docsIdx === -1) continue;
|
|
1116
|
-
const prefix = file.slice(0, docsIdx + 6);
|
|
1117
|
-
const group = docsGroups.get(prefix) || [];
|
|
1118
|
-
group.push(file);
|
|
1119
|
-
docsGroups.set(prefix, group);
|
|
1120
|
-
}
|
|
1121
|
-
if (docsGroups.size > 0) {
|
|
1122
|
-
const largest = [...docsGroups.entries()].sort((a, b) => b[1].length - a[1].length)[0];
|
|
1123
|
-
if (largest[1].length >= 3) {
|
|
1124
|
-
const fullPrefix = largest[0];
|
|
1125
|
-
const docsIdx = fullPrefix.lastIndexOf("docs/");
|
|
1126
|
-
const stripPrefix = docsIdx > 0 ? fullPrefix.slice(0, docsIdx) : "";
|
|
1127
|
-
return {
|
|
1128
|
-
files: largest[1],
|
|
1129
|
-
prefix: stripPrefix
|
|
1130
|
-
};
|
|
1131
|
-
}
|
|
1132
|
-
}
|
|
1133
|
-
const dirGroups = /* @__PURE__ */ new Map();
|
|
1134
|
-
for (const file of mdFiles) {
|
|
1135
|
-
if (hasExcludedDir(file)) continue;
|
|
1136
|
-
const lastSlash = file.lastIndexOf("/");
|
|
1137
|
-
if (lastSlash === -1) continue;
|
|
1138
|
-
const dir = file.slice(0, lastSlash + 1);
|
|
1139
|
-
const group = dirGroups.get(dir) || [];
|
|
1140
|
-
group.push(file);
|
|
1141
|
-
dirGroups.set(dir, group);
|
|
1142
|
-
}
|
|
1143
|
-
if (dirGroups.size === 0) return null;
|
|
1144
|
-
const scored = [...dirGroups.entries()].map(([dir, files]) => ({
|
|
1145
|
-
dir,
|
|
1146
|
-
files,
|
|
1147
|
-
score: scoreDocDir(dir, files.length)
|
|
1148
|
-
})).filter((d) => d.files.length >= 5).sort((a, b) => b.score - a.score);
|
|
1149
|
-
if (scored.length === 0) return null;
|
|
1150
|
-
const best = scored[0];
|
|
1151
|
-
return {
|
|
1152
|
-
files: best.files,
|
|
1153
|
-
prefix: best.dir
|
|
1154
|
-
};
|
|
1155
|
-
}
|
|
1156
|
-
async function listDocsAtRef(owner, repo, ref, pathPrefix = "docs/") {
|
|
1157
|
-
return filterDocFiles(await listFilesAtRef(owner, repo, ref), pathPrefix);
|
|
1158
|
-
}
|
|
1159
|
-
async function fetchGitDocs(owner, repo, version, packageName, repoUrl) {
|
|
1160
|
-
const override = packageName ? getDocOverride(packageName) : void 0;
|
|
1161
|
-
if (override) {
|
|
1162
|
-
const ref = override.ref || "main";
|
|
1163
|
-
const fallback = !override.ref;
|
|
1164
|
-
const files = await listDocsAtRef(override.owner, override.repo, ref, `${override.path}/`);
|
|
1165
|
-
if (files.length === 0) return null;
|
|
1166
|
-
return {
|
|
1167
|
-
baseUrl: `https://raw.githubusercontent.com/${override.owner}/${override.repo}/${ref}`,
|
|
1168
|
-
ref,
|
|
1169
|
-
files,
|
|
1170
|
-
fallback
|
|
1171
|
-
};
|
|
1172
|
-
}
|
|
1173
|
-
const tag = await findGitTag(owner, repo, version, packageName, repoUrl ? extractBranchHint(repoUrl) : void 0);
|
|
1174
|
-
if (!tag) return null;
|
|
1175
|
-
let docs = filterDocFiles(tag.files, "docs/");
|
|
1176
|
-
let docsPrefix;
|
|
1177
|
-
let allFiles;
|
|
1178
|
-
if (docs.length === 0) {
|
|
1179
|
-
const discovered = discoverDocFiles(tag.files);
|
|
1180
|
-
if (discovered) {
|
|
1181
|
-
docs = discovered.files;
|
|
1182
|
-
docsPrefix = discovered.prefix || void 0;
|
|
1183
|
-
allFiles = tag.files;
|
|
1184
|
-
}
|
|
1185
|
-
}
|
|
1186
|
-
if (docs.length === 0) return null;
|
|
1187
|
-
return {
|
|
1188
|
-
baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,
|
|
1189
|
-
ref: tag.ref,
|
|
1190
|
-
files: docs,
|
|
1191
|
-
docsPrefix,
|
|
1192
|
-
allFiles,
|
|
1193
|
-
fallback: tag.fallback
|
|
1194
|
-
};
|
|
1195
|
-
}
|
|
1196
|
-
function normalizePath(p) {
|
|
1197
|
-
return p.replace(/^\//, "").replace(/\.(?:md|mdx)$/, "");
|
|
1198
|
-
}
|
|
1199
|
-
function validateGitDocsWithLlms(llmsLinks, repoFiles) {
|
|
1200
|
-
if (llmsLinks.length === 0) return {
|
|
1201
|
-
isValid: true,
|
|
1202
|
-
matchRatio: 1
|
|
1203
|
-
};
|
|
1204
|
-
const sample = llmsLinks.slice(0, 10);
|
|
1205
|
-
const normalizedLinks = sample.map((link) => {
|
|
1206
|
-
let path = link.url;
|
|
1207
|
-
if (path.startsWith("http")) try {
|
|
1208
|
-
path = new URL(path).pathname;
|
|
1209
|
-
} catch {}
|
|
1210
|
-
return normalizePath(path);
|
|
1211
|
-
});
|
|
1212
|
-
const repoNormalized = new Set(repoFiles.map(normalizePath));
|
|
1213
|
-
let matches = 0;
|
|
1214
|
-
for (const linkPath of normalizedLinks) for (const repoPath of repoNormalized) if (repoPath === linkPath || repoPath.endsWith(`/${linkPath}`)) {
|
|
1215
|
-
matches++;
|
|
1216
|
-
break;
|
|
1217
|
-
}
|
|
1218
|
-
const matchRatio = matches / sample.length;
|
|
1219
|
-
return {
|
|
1220
|
-
isValid: matchRatio >= .3,
|
|
1221
|
-
matchRatio
|
|
1222
|
-
};
|
|
1223
|
-
}
|
|
1224
|
-
async function verifyNpmRepo(owner, repo, packageName) {
|
|
1225
|
-
const base = `https://raw.githubusercontent.com/${owner}/${repo}/HEAD`;
|
|
1226
|
-
const paths = [
|
|
1227
|
-
"package.json",
|
|
1228
|
-
`packages/${packageName.replace(/^@.*\//, "")}/package.json`,
|
|
1229
|
-
`packages/${packageName.replace(/^@/, "").replace("/", "-")}/package.json`
|
|
1230
|
-
];
|
|
1231
|
-
for (const path of paths) {
|
|
1232
|
-
const text = await fetchText(`${base}/${path}`);
|
|
1233
|
-
if (!text) continue;
|
|
1234
|
-
try {
|
|
1235
|
-
if (JSON.parse(text).name === packageName) return true;
|
|
1236
|
-
} catch {}
|
|
1237
|
-
}
|
|
1238
|
-
return false;
|
|
1239
|
-
}
|
|
1240
|
-
async function searchGitHubRepo(packageName) {
|
|
1241
|
-
const shortName = packageName.replace(/^@.*\//, "");
|
|
1242
|
-
for (const candidate of [packageName.replace(/^@/, "").replace("/", "/"), shortName]) {
|
|
1243
|
-
if (!candidate.includes("/")) {
|
|
1244
|
-
if ((await $fetch.raw(`https://ungh.cc/repos/${shortName}/${shortName}`).catch(() => null))?.ok) return `https://github.com/${shortName}/${shortName}`;
|
|
1245
|
-
continue;
|
|
1246
|
-
}
|
|
1247
|
-
if ((await $fetch.raw(`https://ungh.cc/repos/${candidate}`).catch(() => null))?.ok) return `https://github.com/${candidate}`;
|
|
1248
|
-
}
|
|
1249
|
-
const searchTerm = packageName.replace(/^@/, "");
|
|
1250
|
-
if (isGhAvailable()) try {
|
|
1251
|
-
const { stdout: json } = spawnSync("gh", [
|
|
1252
|
-
"search",
|
|
1253
|
-
"repos",
|
|
1254
|
-
searchTerm,
|
|
1255
|
-
"--json",
|
|
1256
|
-
"fullName",
|
|
1257
|
-
"--limit",
|
|
1258
|
-
"5"
|
|
1259
|
-
], {
|
|
1260
|
-
encoding: "utf-8",
|
|
1261
|
-
timeout: 15e3
|
|
1262
|
-
});
|
|
1263
|
-
if (!json) throw new Error("no output");
|
|
1264
|
-
const repos = JSON.parse(json);
|
|
1265
|
-
const match = repos.find((r) => r.fullName.toLowerCase().endsWith(`/${packageName.toLowerCase()}`) || r.fullName.toLowerCase().endsWith(`/${shortName.toLowerCase()}`));
|
|
1266
|
-
if (match) return `https://github.com/${match.fullName}`;
|
|
1267
|
-
for (const candidate of repos) {
|
|
1268
|
-
const gh = parseGitHubUrl(`https://github.com/${candidate.fullName}`);
|
|
1269
|
-
if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName)) return `https://github.com/${candidate.fullName}`;
|
|
1270
|
-
}
|
|
1271
|
-
} catch {}
|
|
1272
|
-
const data = await $fetch(`https://api.github.com/search/repositories?q=${encodeURIComponent(`${searchTerm} in:name`)}&per_page=5`).catch(() => null);
|
|
1273
|
-
if (!data?.items?.length) return null;
|
|
1274
|
-
const match = data.items.find((r) => r.full_name.toLowerCase().endsWith(`/${packageName.toLowerCase()}`) || r.full_name.toLowerCase().endsWith(`/${shortName.toLowerCase()}`));
|
|
1275
|
-
if (match) return `https://github.com/${match.full_name}`;
|
|
1276
|
-
for (const candidate of data.items) {
|
|
1277
|
-
const gh = parseGitHubUrl(`https://github.com/${candidate.full_name}`);
|
|
1278
|
-
if (gh && await verifyNpmRepo(gh.owner, gh.repo, packageName)) return `https://github.com/${candidate.full_name}`;
|
|
1279
|
-
}
|
|
1280
|
-
return null;
|
|
1281
|
-
}
|
|
1282
|
-
async function fetchGitHubRepoMeta(owner, repo, packageName) {
|
|
1283
|
-
const override = packageName ? getDocOverride(packageName) : void 0;
|
|
1284
|
-
if (override?.homepage) return { homepage: override.homepage };
|
|
1285
|
-
if (isGhAvailable()) try {
|
|
1286
|
-
const { stdout: json } = spawnSync("gh", [
|
|
1287
|
-
"api",
|
|
1288
|
-
`repos/${owner}/${repo}`,
|
|
1289
|
-
"-q",
|
|
1290
|
-
"{homepage}"
|
|
1291
|
-
], {
|
|
1292
|
-
encoding: "utf-8",
|
|
1293
|
-
timeout: 1e4
|
|
1294
|
-
});
|
|
1295
|
-
if (!json) throw new Error("no output");
|
|
1296
|
-
const data = JSON.parse(json);
|
|
1297
|
-
return data?.homepage ? { homepage: data.homepage } : null;
|
|
1298
|
-
} catch {}
|
|
1299
|
-
const data = await $fetch(`https://api.github.com/repos/${owner}/${repo}`).catch(() => null);
|
|
1300
|
-
return data?.homepage ? { homepage: data.homepage } : null;
|
|
1301
|
-
}
|
|
1302
|
-
async function fetchReadme(owner, repo, subdir, ref) {
|
|
1303
|
-
const unghUrl = subdir ? `https://ungh.cc/repos/${owner}/${repo}/files/${ref || "main"}/${subdir}/README.md` : `https://ungh.cc/repos/${owner}/${repo}/readme${ref ? `?ref=${ref}` : ""}`;
|
|
1304
|
-
if ((await $fetch.raw(unghUrl).catch(() => null))?.ok) return `ungh://${owner}/${repo}${subdir ? `/${subdir}` : ""}${ref ? `@${ref}` : ""}`;
|
|
1305
|
-
const basePath = subdir ? `${subdir}/` : "";
|
|
1306
|
-
const branches = ref ? [ref] : ["main", "master"];
|
|
1307
|
-
for (const b of branches) for (const filename of [
|
|
1308
|
-
"README.md",
|
|
1309
|
-
"Readme.md",
|
|
1310
|
-
"readme.md"
|
|
1311
|
-
]) {
|
|
1312
|
-
const readmeUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${b}/${basePath}${filename}`;
|
|
1313
|
-
if ((await $fetch.raw(readmeUrl).catch(() => null))?.ok) return readmeUrl;
|
|
1314
|
-
}
|
|
1315
|
-
return null;
|
|
1316
|
-
}
|
|
1317
|
-
async function fetchReadmeContent(url) {
|
|
1318
|
-
if (url.startsWith("file://")) {
|
|
1319
|
-
const filePath = fileURLToPath(url);
|
|
1320
|
-
if (!existsSync(filePath)) return null;
|
|
1321
|
-
return readFileSync(filePath, "utf-8");
|
|
1322
|
-
}
|
|
1323
|
-
if (url.startsWith("ungh://")) {
|
|
1324
|
-
let path = url.replace("ungh://", "");
|
|
1325
|
-
let ref = "main";
|
|
1326
|
-
const atIdx = path.lastIndexOf("@");
|
|
1327
|
-
if (atIdx !== -1) {
|
|
1328
|
-
ref = path.slice(atIdx + 1);
|
|
1329
|
-
path = path.slice(0, atIdx);
|
|
1330
|
-
}
|
|
1331
|
-
const parts = path.split("/");
|
|
1332
|
-
const owner = parts[0];
|
|
1333
|
-
const repo = parts[1];
|
|
1334
|
-
const subdir = parts.slice(2).join("/");
|
|
1335
|
-
const text = await $fetch(subdir ? `https://ungh.cc/repos/${owner}/${repo}/files/${ref}/${subdir}/README.md` : `https://ungh.cc/repos/${owner}/${repo}/readme?ref=${ref}`, { responseType: "text" }).catch(() => null);
|
|
1336
|
-
if (!text) return null;
|
|
1337
|
-
try {
|
|
1338
|
-
const json = JSON.parse(text);
|
|
1339
|
-
return json.markdown || json.file?.contents || null;
|
|
1340
|
-
} catch {
|
|
1341
|
-
return text;
|
|
1342
|
-
}
|
|
1343
|
-
}
|
|
1344
|
-
return fetchText(url);
|
|
1345
|
-
}
|
|
1346
|
-
async function fetchLlmsUrl(docsUrl) {
|
|
1347
|
-
const llmsUrl = `${new URL(docsUrl).origin}/llms.txt`;
|
|
1348
|
-
if (await verifyUrl(llmsUrl)) return llmsUrl;
|
|
1349
|
-
return null;
|
|
1350
|
-
}
|
|
1351
|
-
async function fetchLlmsTxt(url) {
|
|
1352
|
-
const content = await fetchText(url);
|
|
1353
|
-
if (!content || content.length < 50) return null;
|
|
1354
|
-
return {
|
|
1355
|
-
raw: content,
|
|
1356
|
-
links: parseMarkdownLinks(content)
|
|
1357
|
-
};
|
|
1358
|
-
}
|
|
1359
|
-
function parseMarkdownLinks(content) {
|
|
1360
|
-
const links = [];
|
|
1361
|
-
const seen = /* @__PURE__ */ new Set();
|
|
1362
|
-
const linkRegex = /\[([^\]]+)\]\(([^)]+\.md)\)/g;
|
|
1363
|
-
for (let match = linkRegex.exec(content); match !== null; match = linkRegex.exec(content)) {
|
|
1364
|
-
const url = match[2];
|
|
1365
|
-
if (!seen.has(url)) {
|
|
1366
|
-
seen.add(url);
|
|
1367
|
-
links.push({
|
|
1368
|
-
title: match[1],
|
|
1369
|
-
url
|
|
1370
|
-
});
|
|
1371
|
-
}
|
|
1372
|
-
}
|
|
1373
|
-
return links;
|
|
1374
|
-
}
|
|
1375
|
-
function isSafeUrl(url) {
|
|
1376
|
-
try {
|
|
1377
|
-
const parsed = new URL(url);
|
|
1378
|
-
if (parsed.protocol !== "https:") return false;
|
|
1379
|
-
const host = parsed.hostname;
|
|
1380
|
-
if (host === "localhost" || host === "127.0.0.1" || host === "::1") return false;
|
|
1381
|
-
if (host === "169.254.169.254") return false;
|
|
1382
|
-
if (/^(?:10\.|172\.(?:1[6-9]|2\d|3[01])\.|192\.168\.)/.test(host)) return false;
|
|
1383
|
-
if (host.startsWith("[")) return false;
|
|
1384
|
-
return true;
|
|
1385
|
-
} catch {
|
|
1386
|
-
return false;
|
|
1387
|
-
}
|
|
1388
|
-
}
|
|
1389
|
-
async function downloadLlmsDocs(llmsContent, baseUrl, onProgress) {
|
|
1390
|
-
const limit = pLimit(5);
|
|
1391
|
-
let completed = 0;
|
|
1392
|
-
return (await Promise.all(llmsContent.links.map((link) => limit(async () => {
|
|
1393
|
-
const url = link.url.startsWith("http") ? link.url : `${baseUrl.replace(/\/$/, "")}${link.url.startsWith("/") ? "" : "/"}${link.url}`;
|
|
1394
|
-
if (!isSafeUrl(url)) return null;
|
|
1395
|
-
onProgress?.(link.url, completed++, llmsContent.links.length);
|
|
1396
|
-
const content = await fetchText(url);
|
|
1397
|
-
if (content && content.length > 100) return {
|
|
1398
|
-
url: link.url,
|
|
1399
|
-
title: link.title,
|
|
1400
|
-
content
|
|
1401
|
-
};
|
|
1402
|
-
return null;
|
|
1403
|
-
})))).filter((d) => d !== null);
|
|
1404
|
-
}
|
|
1405
|
-
function normalizeLlmsLinks(content, baseUrl) {
|
|
1406
|
-
let normalized = content;
|
|
1407
|
-
if (baseUrl) {
|
|
1408
|
-
const escaped = baseUrl.replace(/\/$/, "").replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
1409
|
-
normalized = normalized.replace(new RegExp(`\\]\\(${escaped}(/[^)]+\\.md)\\)`, "g"), "](./docs$1)");
|
|
1410
|
-
}
|
|
1411
|
-
normalized = normalized.replace(/\]\(\/([^)]+\.md)\)/g, "](./docs/$1)");
|
|
1412
|
-
return normalized;
|
|
1413
|
-
}
|
|
1414
|
-
function extractSections(content, patterns) {
|
|
1415
|
-
const sections = [];
|
|
1416
|
-
const parts = content.split(/\n---\n/);
|
|
1417
|
-
for (const part of parts) {
|
|
1418
|
-
const urlMatch = part.match(/^url: *(\S.*)$/m);
|
|
1419
|
-
if (!urlMatch) continue;
|
|
1420
|
-
const url = urlMatch[1];
|
|
1421
|
-
if (patterns.some((p) => url.includes(p))) {
|
|
1422
|
-
const contentStart = part.indexOf("\n", part.indexOf("url:"));
|
|
1423
|
-
if (contentStart > -1) sections.push(part.slice(contentStart + 1));
|
|
1424
|
-
}
|
|
1425
|
-
}
|
|
1426
|
-
if (sections.length === 0) return null;
|
|
1427
|
-
return sections.join("\n\n---\n\n");
|
|
1428
|
-
}
|
|
1429
|
-
async function searchNpmPackages(query, size = 5) {
|
|
1430
|
-
const data = await $fetch(`https://registry.npmjs.org/-/v1/search?text=${encodeURIComponent(query)}&size=${size}`).catch(() => null);
|
|
1431
|
-
if (!data?.objects?.length) return [];
|
|
1432
|
-
return data.objects.map((o) => ({
|
|
1433
|
-
name: o.package.name,
|
|
1434
|
-
description: o.package.description,
|
|
1435
|
-
version: o.package.version
|
|
1436
|
-
}));
|
|
1437
|
-
}
|
|
1438
|
-
async function fetchNpmPackage(packageName) {
|
|
1439
|
-
const data = await $fetch(`https://unpkg.com/${packageName}/package.json`).catch(() => null);
|
|
1440
|
-
if (data) return data;
|
|
1441
|
-
return $fetch(`https://registry.npmjs.org/${packageName}/latest`).catch(() => null);
|
|
1442
|
-
}
|
|
1443
|
-
async function fetchNpmRegistryMeta(packageName, version) {
|
|
1444
|
-
const data = await $fetch(`https://registry.npmjs.org/${packageName}`).catch(() => null);
|
|
1445
|
-
if (!data) return {};
|
|
1446
|
-
const distTags = data["dist-tags"] ? Object.fromEntries(Object.entries(data["dist-tags"]).map(([tag, ver]) => [tag, {
|
|
1447
|
-
version: ver,
|
|
1448
|
-
releasedAt: data.time?.[ver]
|
|
1449
|
-
}])) : void 0;
|
|
1450
|
-
return {
|
|
1451
|
-
releasedAt: data.time?.[version] || void 0,
|
|
1452
|
-
distTags
|
|
1453
|
-
};
|
|
1454
|
-
}
|
|
1455
|
-
async function resolveGitHub(gh, targetVersion, pkg, result, attempts, onProgress, opts) {
|
|
1456
|
-
let allFiles;
|
|
1457
|
-
if (targetVersion) {
|
|
1458
|
-
onProgress?.("github-docs");
|
|
1459
|
-
const gitDocs = await fetchGitDocs(gh.owner, gh.repo, targetVersion, pkg.name, opts?.rawRepoUrl);
|
|
1460
|
-
if (gitDocs) {
|
|
1461
|
-
result.gitDocsUrl = gitDocs.baseUrl;
|
|
1462
|
-
result.gitRef = gitDocs.ref;
|
|
1463
|
-
result.gitDocsFallback = gitDocs.fallback;
|
|
1464
|
-
allFiles = gitDocs.allFiles;
|
|
1465
|
-
attempts.push({
|
|
1466
|
-
source: "github-docs",
|
|
1467
|
-
url: gitDocs.baseUrl,
|
|
1468
|
-
status: "success",
|
|
1469
|
-
message: gitDocs.fallback ? `Found ${gitDocs.files.length} docs at ${gitDocs.ref} (no tag for v${targetVersion})` : `Found ${gitDocs.files.length} docs at ${gitDocs.ref}`
|
|
1470
|
-
});
|
|
1471
|
-
} else attempts.push({
|
|
1472
|
-
source: "github-docs",
|
|
1473
|
-
url: `${result.repoUrl}/tree/v${targetVersion}/docs`,
|
|
1474
|
-
status: "not-found",
|
|
1475
|
-
message: "No docs/ folder found at version tag"
|
|
1476
|
-
});
|
|
1477
|
-
}
|
|
1478
|
-
if (!result.docsUrl) {
|
|
1479
|
-
onProgress?.("github-meta");
|
|
1480
|
-
const repoMeta = await fetchGitHubRepoMeta(gh.owner, gh.repo, pkg.name);
|
|
1481
|
-
if (repoMeta?.homepage && !isUselessDocsUrl(repoMeta.homepage)) {
|
|
1482
|
-
result.docsUrl = repoMeta.homepage;
|
|
1483
|
-
attempts.push({
|
|
1484
|
-
source: "github-meta",
|
|
1485
|
-
url: result.repoUrl,
|
|
1486
|
-
status: "success",
|
|
1487
|
-
message: `Found homepage: ${repoMeta.homepage}`
|
|
1488
|
-
});
|
|
1489
|
-
} else attempts.push({
|
|
1490
|
-
source: "github-meta",
|
|
1491
|
-
url: result.repoUrl,
|
|
1492
|
-
status: "not-found",
|
|
1493
|
-
message: "No homepage in repo metadata"
|
|
1494
|
-
});
|
|
1495
|
-
}
|
|
1496
|
-
onProgress?.("readme");
|
|
1497
|
-
const readmeUrl = await fetchReadme(gh.owner, gh.repo, opts?.subdir, result.gitRef);
|
|
1498
|
-
if (readmeUrl) {
|
|
1499
|
-
result.readmeUrl = readmeUrl;
|
|
1500
|
-
attempts.push({
|
|
1501
|
-
source: "readme",
|
|
1502
|
-
url: readmeUrl,
|
|
1503
|
-
status: "success"
|
|
1504
|
-
});
|
|
1505
|
-
} else attempts.push({
|
|
1506
|
-
source: "readme",
|
|
1507
|
-
url: `${result.repoUrl}/README.md`,
|
|
1508
|
-
status: "not-found",
|
|
1509
|
-
message: "No README found"
|
|
1510
|
-
});
|
|
1511
|
-
return allFiles;
|
|
1512
|
-
}
|
|
1513
|
-
async function resolvePackageDocs(packageName, options = {}) {
|
|
1514
|
-
return (await resolvePackageDocsWithAttempts(packageName, options)).package;
|
|
1515
|
-
}
|
|
1516
|
-
async function resolvePackageDocsWithAttempts(packageName, options = {}) {
|
|
1517
|
-
const attempts = [];
|
|
1518
|
-
const { onProgress } = options;
|
|
1519
|
-
onProgress?.("npm");
|
|
1520
|
-
const pkg = await fetchNpmPackage(packageName);
|
|
1521
|
-
if (!pkg) {
|
|
1522
|
-
attempts.push({
|
|
1523
|
-
source: "npm",
|
|
1524
|
-
url: `https://registry.npmjs.org/${packageName}/latest`,
|
|
1525
|
-
status: "not-found",
|
|
1526
|
-
message: "Package not found on npm registry"
|
|
1527
|
-
});
|
|
1528
|
-
return {
|
|
1529
|
-
package: null,
|
|
1530
|
-
attempts
|
|
1531
|
-
};
|
|
1532
|
-
}
|
|
1533
|
-
attempts.push({
|
|
1534
|
-
source: "npm",
|
|
1535
|
-
url: `https://registry.npmjs.org/${packageName}/latest`,
|
|
1536
|
-
status: "success",
|
|
1537
|
-
message: `Found ${pkg.name}@${pkg.version}`
|
|
1538
|
-
});
|
|
1539
|
-
const registryMeta = pkg.version ? await fetchNpmRegistryMeta(packageName, pkg.version) : {};
|
|
1540
|
-
const result = {
|
|
1541
|
-
name: pkg.name,
|
|
1542
|
-
version: pkg.version,
|
|
1543
|
-
releasedAt: registryMeta.releasedAt,
|
|
1544
|
-
description: pkg.description,
|
|
1545
|
-
dependencies: pkg.dependencies,
|
|
1546
|
-
distTags: registryMeta.distTags
|
|
1547
|
-
};
|
|
1548
|
-
let gitDocsAllFiles;
|
|
1549
|
-
let subdir;
|
|
1550
|
-
let rawRepoUrl;
|
|
1551
|
-
if (typeof pkg.repository === "object" && pkg.repository?.url) {
|
|
1552
|
-
rawRepoUrl = pkg.repository.url;
|
|
1553
|
-
const normalized = normalizeRepoUrl(rawRepoUrl);
|
|
1554
|
-
if (!normalized.includes("://") && normalized.includes("/") && !normalized.includes(":")) result.repoUrl = `https://github.com/${normalized}`;
|
|
1555
|
-
else result.repoUrl = normalized;
|
|
1556
|
-
subdir = pkg.repository.directory;
|
|
1557
|
-
} else if (typeof pkg.repository === "string") if (pkg.repository.includes("://")) {
|
|
1558
|
-
const gh = parseGitHubUrl(pkg.repository);
|
|
1559
|
-
if (gh) result.repoUrl = `https://github.com/${gh.owner}/${gh.repo}`;
|
|
1560
|
-
} else {
|
|
1561
|
-
const repo = pkg.repository.replace(/^github:/, "");
|
|
1562
|
-
if (repo.includes("/") && !repo.includes(":")) result.repoUrl = `https://github.com/${repo}`;
|
|
1563
|
-
}
|
|
1564
|
-
if (pkg.homepage && !isGitHubRepoUrl(pkg.homepage) && !isUselessDocsUrl(pkg.homepage)) result.docsUrl = pkg.homepage;
|
|
1565
|
-
if (result.repoUrl?.includes("github.com")) {
|
|
1566
|
-
const gh = parseGitHubUrl(result.repoUrl);
|
|
1567
|
-
if (gh) gitDocsAllFiles = await resolveGitHub(gh, options.version || pkg.version, pkg, result, attempts, onProgress, {
|
|
1568
|
-
rawRepoUrl,
|
|
1569
|
-
subdir
|
|
1570
|
-
});
|
|
1571
|
-
} else if (!result.repoUrl) {
|
|
1572
|
-
onProgress?.("github-search");
|
|
1573
|
-
const searchedUrl = await searchGitHubRepo(pkg.name);
|
|
1574
|
-
if (searchedUrl) {
|
|
1575
|
-
result.repoUrl = searchedUrl;
|
|
1576
|
-
attempts.push({
|
|
1577
|
-
source: "github-search",
|
|
1578
|
-
url: searchedUrl,
|
|
1579
|
-
status: "success",
|
|
1580
|
-
message: `Found via GitHub search: ${searchedUrl}`
|
|
1581
|
-
});
|
|
1582
|
-
const gh = parseGitHubUrl(searchedUrl);
|
|
1583
|
-
if (gh) gitDocsAllFiles = await resolveGitHub(gh, options.version || pkg.version, pkg, result, attempts, onProgress);
|
|
1584
|
-
} else attempts.push({
|
|
1585
|
-
source: "github-search",
|
|
1586
|
-
status: "not-found",
|
|
1587
|
-
message: "No repository URL in package.json and GitHub search found no match"
|
|
1588
|
-
});
|
|
1589
|
-
}
|
|
1590
|
-
if (result.docsUrl) {
|
|
1591
|
-
onProgress?.("llms.txt");
|
|
1592
|
-
const llmsUrl = await fetchLlmsUrl(result.docsUrl);
|
|
1593
|
-
if (llmsUrl) {
|
|
1594
|
-
result.llmsUrl = llmsUrl;
|
|
1595
|
-
attempts.push({
|
|
1596
|
-
source: "llms.txt",
|
|
1597
|
-
url: llmsUrl,
|
|
1598
|
-
status: "success"
|
|
1599
|
-
});
|
|
1600
|
-
} else attempts.push({
|
|
1601
|
-
source: "llms.txt",
|
|
1602
|
-
url: `${new URL(result.docsUrl).origin}/llms.txt`,
|
|
1603
|
-
status: "not-found",
|
|
1604
|
-
message: "No llms.txt at docs URL"
|
|
1605
|
-
});
|
|
1606
|
-
}
|
|
1607
|
-
if (result.gitDocsUrl && result.llmsUrl && gitDocsAllFiles) {
|
|
1608
|
-
const llmsContent = await fetchLlmsTxt(result.llmsUrl);
|
|
1609
|
-
if (llmsContent && llmsContent.links.length > 0) {
|
|
1610
|
-
const validation = validateGitDocsWithLlms(llmsContent.links, gitDocsAllFiles);
|
|
1611
|
-
if (!validation.isValid) {
|
|
1612
|
-
attempts.push({
|
|
1613
|
-
source: "github-docs",
|
|
1614
|
-
url: result.gitDocsUrl,
|
|
1615
|
-
status: "not-found",
|
|
1616
|
-
message: `Heuristic git docs don't match llms.txt links (${Math.round(validation.matchRatio * 100)}% match), preferring llms.txt`
|
|
1617
|
-
});
|
|
1618
|
-
result.gitDocsUrl = void 0;
|
|
1619
|
-
result.gitRef = void 0;
|
|
1620
|
-
}
|
|
1621
|
-
}
|
|
1622
|
-
}
|
|
1623
|
-
if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl && options.cwd) {
|
|
1624
|
-
onProgress?.("local");
|
|
1625
|
-
const pkgDir = join(options.cwd, "node_modules", packageName);
|
|
1626
|
-
const readmeFile = existsSync(pkgDir) && readdirSync(pkgDir).find((f) => /^readme\.md$/i.test(f));
|
|
1627
|
-
if (readmeFile) {
|
|
1628
|
-
const readmePath = join(pkgDir, readmeFile);
|
|
1629
|
-
result.readmeUrl = pathToFileURL(readmePath).href;
|
|
1630
|
-
attempts.push({
|
|
1631
|
-
source: "readme",
|
|
1632
|
-
url: readmePath,
|
|
1633
|
-
status: "success",
|
|
1634
|
-
message: "Found local readme in node_modules"
|
|
1635
|
-
});
|
|
1636
|
-
}
|
|
1637
|
-
}
|
|
1638
|
-
if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl) return {
|
|
1639
|
-
package: null,
|
|
1640
|
-
attempts
|
|
1641
|
-
};
|
|
1642
|
-
return {
|
|
1643
|
-
package: result,
|
|
1644
|
-
attempts
|
|
1645
|
-
};
|
|
1646
|
-
}
|
|
1647
|
-
function parseVersionSpecifier(name, version, cwd) {
|
|
1648
|
-
if (version.startsWith("link:")) {
|
|
1649
|
-
const linkedPkgPath = join(resolve(cwd, version.slice(5)), "package.json");
|
|
1650
|
-
if (existsSync(linkedPkgPath)) {
|
|
1651
|
-
const linkedPkg = JSON.parse(readFileSync(linkedPkgPath, "utf-8"));
|
|
1652
|
-
return {
|
|
1653
|
-
name: linkedPkg.name || name,
|
|
1654
|
-
version: linkedPkg.version || "0.0.0"
|
|
1655
|
-
};
|
|
1656
|
-
}
|
|
1657
|
-
return null;
|
|
1658
|
-
}
|
|
1659
|
-
if (version.startsWith("npm:")) {
|
|
1660
|
-
const specifier = version.slice(4);
|
|
1661
|
-
const atIndex = specifier.startsWith("@") ? specifier.indexOf("@", 1) : specifier.indexOf("@");
|
|
1662
|
-
const realName = atIndex > 0 ? specifier.slice(0, atIndex) : specifier;
|
|
1663
|
-
return {
|
|
1664
|
-
name: realName,
|
|
1665
|
-
version: resolveInstalledVersion(realName, cwd) || "*"
|
|
1666
|
-
};
|
|
1667
|
-
}
|
|
1668
|
-
if (version.startsWith("file:") || version.startsWith("git:") || version.startsWith("git+")) return null;
|
|
1669
|
-
const installed = resolveInstalledVersion(name, cwd);
|
|
1670
|
-
if (installed) return {
|
|
1671
|
-
name,
|
|
1672
|
-
version: installed
|
|
1673
|
-
};
|
|
1674
|
-
if (/^[\^~>=<\d]/.test(version)) return {
|
|
1675
|
-
name,
|
|
1676
|
-
version: version.replace(/^[\^~>=<]/, "")
|
|
1677
|
-
};
|
|
1678
|
-
if (version.startsWith("catalog:") || version.startsWith("workspace:")) return {
|
|
1679
|
-
name,
|
|
1680
|
-
version: "*"
|
|
1681
|
-
};
|
|
1682
|
-
return null;
|
|
1683
|
-
}
|
|
1684
|
-
function resolveInstalledVersion(name, cwd) {
|
|
1685
|
-
try {
|
|
1686
|
-
const resolved = resolvePathSync(`${name}/package.json`, { url: cwd });
|
|
1687
|
-
return JSON.parse(readFileSync(resolved, "utf-8")).version || null;
|
|
1688
|
-
} catch {
|
|
1689
|
-
try {
|
|
1690
|
-
let dir = dirname(resolvePathSync(name, { url: cwd }));
|
|
1691
|
-
while (dir && basename(dir) !== "node_modules") {
|
|
1692
|
-
const pkgPath = join(dir, "package.json");
|
|
1693
|
-
if (existsSync(pkgPath)) return JSON.parse(readFileSync(pkgPath, "utf-8")).version || null;
|
|
1694
|
-
dir = dirname(dir);
|
|
1695
|
-
}
|
|
1696
|
-
} catch {}
|
|
1697
|
-
return null;
|
|
1698
|
-
}
|
|
1699
|
-
}
|
|
1700
|
-
async function readLocalDependencies(cwd) {
|
|
1701
|
-
const pkgPath = join(cwd, "package.json");
|
|
1702
|
-
if (!existsSync(pkgPath)) throw new Error("No package.json found in current directory");
|
|
1703
|
-
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
1704
|
-
const deps = {
|
|
1705
|
-
...pkg.dependencies,
|
|
1706
|
-
...pkg.devDependencies
|
|
1707
|
-
};
|
|
1708
|
-
const results = [];
|
|
1709
|
-
for (const [name, version] of Object.entries(deps)) {
|
|
1710
|
-
const parsed = parseVersionSpecifier(name, version, cwd);
|
|
1711
|
-
if (parsed) results.push(parsed);
|
|
1712
|
-
}
|
|
1713
|
-
return results;
|
|
1714
|
-
}
|
|
1715
|
-
function readLocalPackageInfo(localPath) {
|
|
1716
|
-
const pkgPath = join(localPath, "package.json");
|
|
1717
|
-
if (!existsSync(pkgPath)) return null;
|
|
1718
|
-
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
1719
|
-
let repoUrl;
|
|
1720
|
-
if (pkg.repository?.url) repoUrl = normalizeRepoUrl(pkg.repository.url);
|
|
1721
|
-
else if (typeof pkg.repository === "string") repoUrl = normalizeRepoUrl(pkg.repository);
|
|
1722
|
-
return {
|
|
1723
|
-
name: pkg.name,
|
|
1724
|
-
version: pkg.version || "0.0.0",
|
|
1725
|
-
description: pkg.description,
|
|
1726
|
-
repoUrl,
|
|
1727
|
-
localPath
|
|
1728
|
-
};
|
|
1729
|
-
}
|
|
1730
|
-
async function resolveLocalPackageDocs(localPath) {
|
|
1731
|
-
const info = readLocalPackageInfo(localPath);
|
|
1732
|
-
if (!info) return null;
|
|
1733
|
-
const result = {
|
|
1734
|
-
name: info.name,
|
|
1735
|
-
version: info.version,
|
|
1736
|
-
description: info.description,
|
|
1737
|
-
repoUrl: info.repoUrl
|
|
1738
|
-
};
|
|
1739
|
-
if (info.repoUrl?.includes("github.com")) {
|
|
1740
|
-
const gh = parseGitHubUrl(info.repoUrl);
|
|
1741
|
-
if (gh) {
|
|
1742
|
-
const gitDocs = await fetchGitDocs(gh.owner, gh.repo, info.version, info.name);
|
|
1743
|
-
if (gitDocs) {
|
|
1744
|
-
result.gitDocsUrl = gitDocs.baseUrl;
|
|
1745
|
-
result.gitRef = gitDocs.ref;
|
|
1746
|
-
result.gitDocsFallback = gitDocs.fallback;
|
|
1747
|
-
}
|
|
1748
|
-
const readmeUrl = await fetchReadme(gh.owner, gh.repo, void 0, result.gitRef);
|
|
1749
|
-
if (readmeUrl) result.readmeUrl = readmeUrl;
|
|
1750
|
-
}
|
|
1751
|
-
}
|
|
1752
|
-
if (!result.readmeUrl && !result.gitDocsUrl) {
|
|
1753
|
-
const readmeFile = readdirSync(localPath).find((f) => /^readme\.md$/i.test(f));
|
|
1754
|
-
if (readmeFile) result.readmeUrl = pathToFileURL(join(localPath, readmeFile)).href;
|
|
1755
|
-
}
|
|
1756
|
-
if (!result.readmeUrl && !result.gitDocsUrl) return null;
|
|
1757
|
-
return result;
|
|
1758
|
-
}
|
|
1759
|
-
async function fetchPkgDist(name, version) {
|
|
1760
|
-
const cacheDir = getCacheDir(name, version);
|
|
1761
|
-
const pkgDir = join(cacheDir, "pkg");
|
|
1762
|
-
if (existsSync(join(pkgDir, "package.json"))) return pkgDir;
|
|
1763
|
-
const data = await $fetch(`https://registry.npmjs.org/${name}/${version}`).catch(() => null);
|
|
1764
|
-
if (!data) return null;
|
|
1765
|
-
const tarballUrl = data.dist?.tarball;
|
|
1766
|
-
if (!tarballUrl) return null;
|
|
1767
|
-
const tarballRes = await fetch(tarballUrl, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
|
|
1768
|
-
if (!tarballRes?.ok || !tarballRes.body) return null;
|
|
1769
|
-
mkdirSync(pkgDir, { recursive: true });
|
|
1770
|
-
const tmpTarball = join(cacheDir, "_pkg.tgz");
|
|
1771
|
-
const fileStream = createWriteStream(tmpTarball);
|
|
1772
|
-
const reader = tarballRes.body.getReader();
|
|
1773
|
-
await new Promise((res, reject) => {
|
|
1774
|
-
const writable = new Writable({ write(chunk, _encoding, callback) {
|
|
1775
|
-
fileStream.write(chunk, callback);
|
|
1776
|
-
} });
|
|
1777
|
-
writable.on("finish", () => {
|
|
1778
|
-
fileStream.end();
|
|
1779
|
-
res();
|
|
1780
|
-
});
|
|
1781
|
-
writable.on("error", reject);
|
|
1782
|
-
function pump() {
|
|
1783
|
-
reader.read().then(({ done, value }) => {
|
|
1784
|
-
if (done) {
|
|
1785
|
-
writable.end();
|
|
1786
|
-
return;
|
|
1787
|
-
}
|
|
1788
|
-
writable.write(value, () => pump());
|
|
1789
|
-
}).catch(reject);
|
|
1790
|
-
}
|
|
1791
|
-
pump();
|
|
1792
|
-
});
|
|
1793
|
-
const { status } = spawnSync("tar", [
|
|
1794
|
-
"xzf",
|
|
1795
|
-
tmpTarball,
|
|
1796
|
-
"--strip-components=1",
|
|
1797
|
-
"-C",
|
|
1798
|
-
pkgDir
|
|
1799
|
-
], { stdio: "ignore" });
|
|
1800
|
-
if (status !== 0) {
|
|
1801
|
-
rmSync(pkgDir, {
|
|
1802
|
-
recursive: true,
|
|
1803
|
-
force: true
|
|
1804
|
-
});
|
|
1805
|
-
rmSync(tmpTarball, { force: true });
|
|
1806
|
-
return null;
|
|
1807
|
-
}
|
|
1808
|
-
unlinkSync(tmpTarball);
|
|
1809
|
-
return pkgDir;
|
|
1810
|
-
}
|
|
1811
|
-
async function fetchLatestVersion(packageName) {
|
|
1812
|
-
return (await $fetch(`https://unpkg.com/${packageName}/package.json`).catch(() => null))?.version || null;
|
|
1813
|
-
}
|
|
1814
|
-
function getInstalledSkillVersion(skillDir) {
|
|
1815
|
-
const skillPath = join(skillDir, "SKILL.md");
|
|
1816
|
-
if (!existsSync(skillPath)) return null;
|
|
1817
|
-
return readFileSync(skillPath, "utf-8").match(/^version:\s*"?([^"\n]+)"?/m)?.[1] || null;
|
|
1818
|
-
}
|
|
1819
|
-
export { resolveEntryFiles as A, extractBranchHint as B, fetchReadme as C, fetchGitSkills as D, validateGitDocsWithLlms as E, compareSemver as F, verifyUrl as G, isGitHubRepoUrl as H, fetchReleaseNotes as I, generateIssueIndex as J, fetchGitHubIssues as K, generateReleaseIndex as L, formatDiscussionAsMarkdown as M, generateDiscussionIndex as N, parseGitSkillInput as O, fetchBlogReleases as P, parseSemver as R, fetchGitHubRepoMeta as S, isShallowGitDocs as T, normalizeRepoUrl as U, fetchText as V, parseGitHubUrl as W, isGhAvailable as Y, fetchLlmsUrl as _, getInstalledSkillVersion as a, MIN_GIT_DOCS as b, readLocalPackageInfo as c, resolvePackageDocs as d, resolvePackageDocsWithAttempts as f, fetchLlmsTxt as g, extractSections as h, fetchPkgDist as i, fetchGitHubDiscussions as j, parseSkillFrontmatterName as k, resolveInstalledVersion as l, downloadLlmsDocs as m, fetchNpmPackage as n, parseVersionSpecifier as o, searchNpmPackages as p, formatIssueAsMarkdown as q, fetchNpmRegistryMeta as r, readLocalDependencies as s, fetchLatestVersion as t, resolveLocalPackageDocs as u, normalizeLlmsLinks as v, fetchReadmeContent as w, fetchGitDocs as x, parseMarkdownLinks as y, $fetch as z };
|
|
1820
|
-
|
|
1821
|
-
//# sourceMappingURL=npm.mjs.map
|