skilld 0.0.1 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +119 -88
  3. package/dist/_chunks/config.mjs +20 -0
  4. package/dist/_chunks/config.mjs.map +1 -0
  5. package/dist/_chunks/llm.mjs +877 -0
  6. package/dist/_chunks/llm.mjs.map +1 -0
  7. package/dist/_chunks/releases.mjs +986 -0
  8. package/dist/_chunks/releases.mjs.map +1 -0
  9. package/dist/_chunks/storage.mjs +198 -0
  10. package/dist/_chunks/storage.mjs.map +1 -0
  11. package/dist/_chunks/sync-parallel.mjs +540 -0
  12. package/dist/_chunks/sync-parallel.mjs.map +1 -0
  13. package/dist/_chunks/types.d.mts +87 -0
  14. package/dist/_chunks/types.d.mts.map +1 -0
  15. package/dist/_chunks/utils.d.mts +352 -0
  16. package/dist/_chunks/utils.d.mts.map +1 -0
  17. package/dist/_chunks/version.d.mts +147 -0
  18. package/dist/_chunks/version.d.mts.map +1 -0
  19. package/dist/agent/index.d.mts +205 -0
  20. package/dist/agent/index.d.mts.map +1 -0
  21. package/dist/agent/index.mjs +2 -0
  22. package/dist/cache/index.d.mts +2 -0
  23. package/dist/cache/index.mjs +3 -0
  24. package/dist/cli.mjs +2650 -449
  25. package/dist/cli.mjs.map +1 -1
  26. package/dist/index.d.mts +5 -14
  27. package/dist/index.mjs +7 -181
  28. package/dist/retriv/index.d.mts +12 -0
  29. package/dist/retriv/index.d.mts.map +1 -0
  30. package/dist/retriv/index.mjs +76 -0
  31. package/dist/retriv/index.mjs.map +1 -0
  32. package/dist/sources/index.d.mts +2 -0
  33. package/dist/sources/index.mjs +3 -0
  34. package/dist/types.d.mts +4 -37
  35. package/package.json +39 -13
  36. package/dist/agents.d.mts +0 -56
  37. package/dist/agents.d.mts.map +0 -1
  38. package/dist/agents.mjs +0 -148
  39. package/dist/agents.mjs.map +0 -1
  40. package/dist/index.d.mts.map +0 -1
  41. package/dist/index.mjs.map +0 -1
  42. package/dist/npm.d.mts +0 -48
  43. package/dist/npm.d.mts.map +0 -1
  44. package/dist/npm.mjs +0 -90
  45. package/dist/npm.mjs.map +0 -1
  46. package/dist/split-text.d.mts +0 -24
  47. package/dist/split-text.d.mts.map +0 -1
  48. package/dist/split-text.mjs +0 -87
  49. package/dist/split-text.mjs.map +0 -1
  50. package/dist/types.d.mts.map +0 -1
@@ -0,0 +1,986 @@
1
+ import { a as getCacheDir } from "./config.mjs";
2
+ import { join, resolve } from "node:path";
3
+ import { createWriteStream, existsSync, mkdirSync, readFileSync, rmSync, unlinkSync } from "node:fs";
4
+ import { execSync } from "node:child_process";
5
+ import { globby } from "globby";
6
+ import { Writable } from "node:stream";
7
+ import { pathToFileURL } from "node:url";
8
+ let _ghAvailable;
9
+ function isGhAvailable() {
10
+ if (_ghAvailable !== void 0) return _ghAvailable;
11
+ try {
12
+ execSync("gh auth status", { stdio: "ignore" });
13
+ return _ghAvailable = true;
14
+ } catch {
15
+ return _ghAvailable = false;
16
+ }
17
+ }
18
+ async function fetchGitHubIssues(owner, repo, limit = 20) {
19
+ if (!isGhAvailable()) return [];
20
+ try {
21
+ const result = execSync(`gh api "repos/${owner}/${repo}/issues?per_page=${Math.min(limit * 3, 100)}&state=all" -q '.[] | {number, title, state, labels: [.labels[].name], body, createdAt: .created_at, url: .html_url, isPr: (.pull_request != null), user: .user.login, userType: .user.type}'`, {
22
+ encoding: "utf-8",
23
+ maxBuffer: 10 * 1024 * 1024
24
+ });
25
+ const BOT_USERS = new Set([
26
+ "renovate[bot]",
27
+ "dependabot[bot]",
28
+ "renovate-bot",
29
+ "dependabot",
30
+ "github-actions[bot]"
31
+ ]);
32
+ return result.trim().split("\n").filter(Boolean).map((line) => JSON.parse(line)).filter((issue) => !issue.isPr && !BOT_USERS.has(issue.user) && issue.userType !== "Bot").slice(0, limit).map(({ isPr: _, user: __, userType: ___, ...issue }) => issue);
33
+ } catch {
34
+ return [];
35
+ }
36
+ }
37
+ function formatIssuesAsMarkdown(issues) {
38
+ if (issues.length === 0) return "";
39
+ const lines = ["# Recent Issues\n"];
40
+ for (const issue of issues) {
41
+ const labels = issue.labels.length > 0 ? ` [${issue.labels.join(", ")}]` : "";
42
+ lines.push(`## #${issue.number}: ${issue.title}${labels}`);
43
+ lines.push(`State: ${issue.state} | Created: ${issue.createdAt.split("T")[0]}`);
44
+ lines.push(`URL: ${issue.url}\n`);
45
+ if (issue.body) {
46
+ const body = issue.body.length > 500 ? `${issue.body.slice(0, 500)}...` : issue.body;
47
+ lines.push(body);
48
+ }
49
+ lines.push("\n---\n");
50
+ }
51
+ return lines.join("\n");
52
+ }
53
+ async function fetchGitHubDiscussions(owner, repo, limit = 20) {
54
+ if (!isGhAvailable()) return [];
55
+ try {
56
+ const result = execSync(`gh api graphql -f query='${`query { repository(owner: "${owner}", name: "${repo}") { discussions(first: ${Math.min(limit * 2, 50)}, orderBy: {field: CREATED_AT, direction: DESC}) { nodes { number title body category { name } createdAt url upvoteCount comments { totalCount } author { login } } } } }`}'`, {
57
+ encoding: "utf-8",
58
+ maxBuffer: 10 * 1024 * 1024
59
+ });
60
+ const nodes = JSON.parse(result)?.data?.repository?.discussions?.nodes;
61
+ if (!Array.isArray(nodes)) return [];
62
+ const BOT_USERS = new Set([
63
+ "renovate[bot]",
64
+ "dependabot[bot]",
65
+ "renovate-bot",
66
+ "dependabot",
67
+ "github-actions[bot]"
68
+ ]);
69
+ return nodes.filter((d) => d.author && !BOT_USERS.has(d.author.login)).slice(0, limit).map((d) => ({
70
+ number: d.number,
71
+ title: d.title,
72
+ body: d.body || "",
73
+ category: d.category?.name || "",
74
+ createdAt: d.createdAt,
75
+ url: d.url,
76
+ upvoteCount: d.upvoteCount || 0,
77
+ comments: d.comments?.totalCount || 0
78
+ }));
79
+ } catch {
80
+ return [];
81
+ }
82
+ }
83
+ function formatDiscussionsAsMarkdown(discussions) {
84
+ if (discussions.length === 0) return "";
85
+ const lines = ["# Recent Discussions\n"];
86
+ for (const d of discussions) {
87
+ const meta = [
88
+ d.category && `Category: ${d.category}`,
89
+ `Created: ${d.createdAt.split("T")[0]}`,
90
+ d.upvoteCount > 0 && `Upvotes: ${d.upvoteCount}`,
91
+ d.comments > 0 && `Comments: ${d.comments}`
92
+ ].filter(Boolean).join(" | ");
93
+ lines.push(`## #${d.number}: ${d.title}`);
94
+ lines.push(meta);
95
+ lines.push(`URL: ${d.url}\n`);
96
+ if (d.body) {
97
+ const body = d.body.length > 500 ? `${d.body.slice(0, 500)}...` : d.body;
98
+ lines.push(body);
99
+ }
100
+ lines.push("\n---\n");
101
+ }
102
+ return lines.join("\n");
103
+ }
104
+ const SKIP_DIRS = [
105
+ "node_modules",
106
+ "_vendor",
107
+ "__tests__",
108
+ "__mocks__",
109
+ "__fixtures__",
110
+ "test",
111
+ "tests",
112
+ "fixture",
113
+ "fixtures",
114
+ "locales",
115
+ "locale",
116
+ "i18n",
117
+ ".git"
118
+ ];
119
+ const SKIP_PATTERNS = [
120
+ "*.min.*",
121
+ "*.prod.*",
122
+ "*.global.*",
123
+ "*.browser.*",
124
+ "*.map",
125
+ "*.map.js",
126
+ "CHANGELOG*",
127
+ "LICENSE*",
128
+ "README*"
129
+ ];
130
+ const MAX_FILE_SIZE = 500 * 1024;
131
+ async function resolveEntryFiles(packageDir) {
132
+ if (!existsSync(join(packageDir, "package.json"))) return [];
133
+ const files = await globby(["**/*.d.{ts,mts,cts}"], {
134
+ cwd: packageDir,
135
+ ignore: [...SKIP_DIRS.map((d) => `**/${d}/**`), ...SKIP_PATTERNS],
136
+ absolute: false
137
+ });
138
+ const entries = [];
139
+ for (const file of files) {
140
+ const absPath = join(packageDir, file);
141
+ let content;
142
+ try {
143
+ content = readFileSync(absPath, "utf-8");
144
+ } catch {
145
+ continue;
146
+ }
147
+ if (content.length > MAX_FILE_SIZE) continue;
148
+ entries.push({
149
+ path: file,
150
+ content,
151
+ type: "types"
152
+ });
153
+ }
154
+ return entries;
155
+ }
156
+ const DOC_OVERRIDES = { vue: {
157
+ owner: "vuejs",
158
+ repo: "docs",
159
+ path: "src",
160
+ homepage: "https://vuejs.org"
161
+ } };
162
+ function getDocOverride(packageName) {
163
+ return DOC_OVERRIDES[packageName];
164
+ }
165
+ const USER_AGENT = "skilld/1.0";
166
+ async function fetchText(url) {
167
+ const res = await fetch(url, { headers: { "User-Agent": USER_AGENT } }).catch(() => null);
168
+ if (!res?.ok) return null;
169
+ return res.text();
170
+ }
171
+ async function verifyUrl(url) {
172
+ const res = await fetch(url, {
173
+ method: "HEAD",
174
+ headers: { "User-Agent": USER_AGENT }
175
+ }).catch(() => null);
176
+ if (!res?.ok) return false;
177
+ return !(res.headers.get("content-type") || "").includes("text/html");
178
+ }
179
+ function isGitHubRepoUrl(url) {
180
+ try {
181
+ const parsed = new URL(url);
182
+ return parsed.hostname === "github.com" || parsed.hostname === "www.github.com";
183
+ } catch {
184
+ return false;
185
+ }
186
+ }
187
+ function parseGitHubUrl(url) {
188
+ const match = url.match(/github\.com\/([^/]+)\/([^/]+)/);
189
+ if (!match) return null;
190
+ return {
191
+ owner: match[1],
192
+ repo: match[2]
193
+ };
194
+ }
195
+ function normalizeRepoUrl(url) {
196
+ return url.replace(/^git\+/, "").replace(/\.git$/, "").replace(/^git:\/\//, "https://").replace(/^ssh:\/\/git@github\.com/, "https://github.com");
197
+ }
198
+ async function listFilesAtRef(owner, repo, ref) {
199
+ const res = await fetch(`https://ungh.cc/repos/${owner}/${repo}/files/${ref}`, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
200
+ if (!res?.ok) return [];
201
+ return (await res.json().catch(() => null))?.files?.map((f) => f.path) ?? [];
202
+ }
203
+ async function findGitTag(owner, repo, version, packageName) {
204
+ const candidates = [`v${version}`, version];
205
+ if (packageName) candidates.push(`${packageName}@${version}`);
206
+ for (const tag of candidates) {
207
+ const files = await listFilesAtRef(owner, repo, tag);
208
+ if (files.length > 0) return {
209
+ ref: tag,
210
+ files
211
+ };
212
+ }
213
+ if (packageName) {
214
+ const latestTag = await findLatestReleaseTag(owner, repo, packageName);
215
+ if (latestTag) {
216
+ const files = await listFilesAtRef(owner, repo, latestTag);
217
+ if (files.length > 0) return {
218
+ ref: latestTag,
219
+ files
220
+ };
221
+ }
222
+ }
223
+ for (const branch of ["main", "master"]) {
224
+ const files = await listFilesAtRef(owner, repo, branch);
225
+ if (files.length > 0) return {
226
+ ref: branch,
227
+ files
228
+ };
229
+ }
230
+ return null;
231
+ }
232
+ async function findLatestReleaseTag(owner, repo, packageName) {
233
+ const res = await fetch(`https://ungh.cc/repos/${owner}/${repo}/releases`, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
234
+ if (!res?.ok) return null;
235
+ const data = await res.json().catch(() => null);
236
+ const prefix = `${packageName}@`;
237
+ return data?.releases?.find((r) => r.tag.startsWith(prefix))?.tag ?? null;
238
+ }
239
+ function filterDocFiles(files, pathPrefix) {
240
+ return files.filter((f) => f.startsWith(pathPrefix) && /\.(?:md|mdx)$/.test(f));
241
+ }
242
+ const NOISE_PATTERNS = [
243
+ /^\.changeset\//,
244
+ /CHANGELOG\.md$/i,
245
+ /CONTRIBUTING\.md$/i,
246
+ /^\.github\//
247
+ ];
248
+ const EXCLUDE_DIRS = new Set([
249
+ "test",
250
+ "tests",
251
+ "__tests__",
252
+ "fixtures",
253
+ "fixture",
254
+ "examples",
255
+ "example",
256
+ "node_modules",
257
+ ".git",
258
+ "dist",
259
+ "build",
260
+ "coverage",
261
+ "e2e",
262
+ "spec",
263
+ "mocks",
264
+ "__mocks__"
265
+ ]);
266
+ const DOC_DIR_BONUS = new Set([
267
+ "docs",
268
+ "documentation",
269
+ "pages",
270
+ "content",
271
+ "website",
272
+ "guide",
273
+ "guides",
274
+ "wiki",
275
+ "manual",
276
+ "api"
277
+ ]);
278
+ function hasExcludedDir(path) {
279
+ return path.split("/").some((p) => EXCLUDE_DIRS.has(p.toLowerCase()));
280
+ }
281
+ function getPathDepth(path) {
282
+ return path.split("/").filter(Boolean).length;
283
+ }
284
+ function hasDocDirBonus(path) {
285
+ return path.split("/").some((p) => DOC_DIR_BONUS.has(p.toLowerCase()));
286
+ }
287
+ function scoreDocDir(dir, fileCount) {
288
+ const depth = getPathDepth(dir) || 1;
289
+ return fileCount * (hasDocDirBonus(dir) ? 1.5 : 1) / depth;
290
+ }
291
+ function discoverDocFiles(allFiles) {
292
+ const mdFiles = allFiles.filter((f) => /\.(?:md|mdx)$/.test(f)).filter((f) => !NOISE_PATTERNS.some((p) => p.test(f))).filter((f) => f.includes("/"));
293
+ const docsGroups = /* @__PURE__ */ new Map();
294
+ for (const file of mdFiles) {
295
+ const docsIdx = file.lastIndexOf("/docs/");
296
+ if (docsIdx === -1) continue;
297
+ const prefix = file.slice(0, docsIdx + 6);
298
+ const group = docsGroups.get(prefix) || [];
299
+ group.push(file);
300
+ docsGroups.set(prefix, group);
301
+ }
302
+ if (docsGroups.size > 0) {
303
+ const largest = [...docsGroups.entries()].sort((a, b) => b[1].length - a[1].length)[0];
304
+ if (largest[1].length >= 3) {
305
+ const fullPrefix = largest[0];
306
+ const docsIdx = fullPrefix.lastIndexOf("docs/");
307
+ const stripPrefix = docsIdx > 0 ? fullPrefix.slice(0, docsIdx) : "";
308
+ return {
309
+ files: largest[1],
310
+ prefix: stripPrefix
311
+ };
312
+ }
313
+ }
314
+ const dirGroups = /* @__PURE__ */ new Map();
315
+ for (const file of mdFiles) {
316
+ if (hasExcludedDir(file)) continue;
317
+ const lastSlash = file.lastIndexOf("/");
318
+ if (lastSlash === -1) continue;
319
+ const dir = file.slice(0, lastSlash + 1);
320
+ const group = dirGroups.get(dir) || [];
321
+ group.push(file);
322
+ dirGroups.set(dir, group);
323
+ }
324
+ if (dirGroups.size === 0) return null;
325
+ const scored = [...dirGroups.entries()].map(([dir, files]) => ({
326
+ dir,
327
+ files,
328
+ score: scoreDocDir(dir, files.length)
329
+ })).filter((d) => d.files.length >= 5).sort((a, b) => b.score - a.score);
330
+ if (scored.length === 0) return null;
331
+ const best = scored[0];
332
+ return {
333
+ files: best.files,
334
+ prefix: best.dir
335
+ };
336
+ }
337
+ async function listDocsAtRef(owner, repo, ref, pathPrefix = "docs/") {
338
+ return filterDocFiles(await listFilesAtRef(owner, repo, ref), pathPrefix);
339
+ }
340
+ async function fetchGitDocs(owner, repo, version, packageName) {
341
+ const override = packageName ? getDocOverride(packageName) : void 0;
342
+ if (override) {
343
+ const ref = override.ref || "main";
344
+ const files = await listDocsAtRef(override.owner, override.repo, ref, `${override.path}/`);
345
+ if (files.length === 0) return null;
346
+ return {
347
+ baseUrl: `https://raw.githubusercontent.com/${override.owner}/${override.repo}/${ref}`,
348
+ ref,
349
+ files
350
+ };
351
+ }
352
+ const tag = await findGitTag(owner, repo, version, packageName);
353
+ if (!tag) return null;
354
+ let docs = filterDocFiles(tag.files, "docs/");
355
+ let docsPrefix;
356
+ if (docs.length === 0) {
357
+ const discovered = discoverDocFiles(tag.files);
358
+ if (discovered) {
359
+ docs = discovered.files;
360
+ docsPrefix = discovered.prefix || void 0;
361
+ }
362
+ }
363
+ if (docs.length === 0) return null;
364
+ return {
365
+ baseUrl: `https://raw.githubusercontent.com/${owner}/${repo}/${tag.ref}`,
366
+ ref: tag.ref,
367
+ files: docs,
368
+ docsPrefix
369
+ };
370
+ }
371
+ async function searchGitHubRepo(packageName) {
372
+ if (isGhAvailable()) try {
373
+ const json = execSync(`gh search repos "${packageName}" --json fullName --limit 5`, {
374
+ encoding: "utf-8",
375
+ timeout: 15e3
376
+ });
377
+ const repos = JSON.parse(json);
378
+ const match = repos.find((r) => r.fullName.toLowerCase().endsWith(`/${packageName.toLowerCase()}`) || r.fullName.toLowerCase().endsWith(`/${packageName.replace(/^@.*\//, "").toLowerCase()}`));
379
+ if (match) return `https://github.com/${match.fullName}`;
380
+ if (repos.length > 0) return `https://github.com/${repos[0].fullName}`;
381
+ } catch {}
382
+ const query = encodeURIComponent(`${packageName} in:name`);
383
+ const res = await fetch(`https://api.github.com/search/repositories?q=${query}&per_page=5`, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
384
+ if (!res?.ok) return null;
385
+ const data = await res.json().catch(() => null);
386
+ if (!data?.items?.length) return null;
387
+ const match = data.items.find((r) => r.full_name.toLowerCase().endsWith(`/${packageName.toLowerCase()}`) || r.full_name.toLowerCase().endsWith(`/${packageName.replace(/^@.*\//, "").toLowerCase()}`));
388
+ return match ? `https://github.com/${match.full_name}` : `https://github.com/${data.items[0].full_name}`;
389
+ }
390
+ async function fetchGitHubRepoMeta(owner, repo, packageName) {
391
+ const override = packageName ? getDocOverride(packageName) : void 0;
392
+ if (override?.homepage) return { homepage: override.homepage };
393
+ if (isGhAvailable()) try {
394
+ const json = execSync(`gh api "repos/${owner}/${repo}" -q '{homepage}'`, {
395
+ encoding: "utf-8",
396
+ timeout: 1e4
397
+ });
398
+ const data = JSON.parse(json);
399
+ return data?.homepage ? { homepage: data.homepage } : null;
400
+ } catch {}
401
+ const res = await fetch(`https://api.github.com/repos/${owner}/${repo}`, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
402
+ if (!res?.ok) return null;
403
+ const data = await res.json().catch(() => null);
404
+ return data?.homepage ? { homepage: data.homepage } : null;
405
+ }
406
+ async function fetchReadme(owner, repo, subdir) {
407
+ const unghUrl = subdir ? `https://ungh.cc/repos/${owner}/${repo}/files/main/${subdir}/README.md` : `https://ungh.cc/repos/${owner}/${repo}/readme`;
408
+ if ((await fetch(unghUrl, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null))?.ok) return `ungh://${owner}/${repo}${subdir ? `/${subdir}` : ""}`;
409
+ const basePath = subdir ? `${subdir}/` : "";
410
+ for (const branch of ["main", "master"]) for (const filename of ["README.md", "readme.md"]) {
411
+ const readmeUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${basePath}${filename}`;
412
+ if (await verifyUrl(readmeUrl)) return readmeUrl;
413
+ }
414
+ return null;
415
+ }
416
+ async function fetchReadmeContent(url) {
417
+ if (url.startsWith("file://")) {
418
+ const { readFileSync, existsSync } = await import("node:fs");
419
+ const { fileURLToPath } = await import("node:url");
420
+ const filePath = fileURLToPath(url);
421
+ if (!existsSync(filePath)) return null;
422
+ return readFileSync(filePath, "utf-8");
423
+ }
424
+ if (url.startsWith("ungh://")) {
425
+ const parts = url.replace("ungh://", "").split("/");
426
+ const owner = parts[0];
427
+ const repo = parts[1];
428
+ const subdir = parts.slice(2).join("/");
429
+ const unghUrl = subdir ? `https://ungh.cc/repos/${owner}/${repo}/files/main/${subdir}/README.md` : `https://ungh.cc/repos/${owner}/${repo}/readme`;
430
+ const res = await fetch(unghUrl, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
431
+ if (!res?.ok) return null;
432
+ const text = await res.text();
433
+ try {
434
+ const json = JSON.parse(text);
435
+ return json.markdown || json.file?.contents || null;
436
+ } catch {
437
+ return text;
438
+ }
439
+ }
440
+ return fetchText(url);
441
+ }
442
+ async function fetchLlmsUrl(docsUrl) {
443
+ const llmsUrl = `${docsUrl.replace(/\/$/, "")}/llms.txt`;
444
+ if (await verifyUrl(llmsUrl)) return llmsUrl;
445
+ return null;
446
+ }
447
+ async function fetchLlmsTxt(url) {
448
+ const content = await fetchText(url);
449
+ if (!content || content.length < 50) return null;
450
+ return {
451
+ raw: content,
452
+ links: parseMarkdownLinks(content)
453
+ };
454
+ }
455
+ function parseMarkdownLinks(content) {
456
+ const links = [];
457
+ const seen = /* @__PURE__ */ new Set();
458
+ const linkRegex = /\[([^\]]+)\]\(([^)]+\.md)\)/g;
459
+ for (let match = linkRegex.exec(content); match !== null; match = linkRegex.exec(content)) {
460
+ const url = match[2];
461
+ if (!seen.has(url)) {
462
+ seen.add(url);
463
+ links.push({
464
+ title: match[1],
465
+ url
466
+ });
467
+ }
468
+ }
469
+ return links;
470
+ }
471
+ async function downloadLlmsDocs(llmsContent, baseUrl, onProgress) {
472
+ const docs = [];
473
+ for (let i = 0; i < llmsContent.links.length; i++) {
474
+ const link = llmsContent.links[i];
475
+ onProgress?.(link.url, i, llmsContent.links.length);
476
+ const content = await fetchText(link.url.startsWith("http") ? link.url : `${baseUrl.replace(/\/$/, "")}${link.url.startsWith("/") ? "" : "/"}${link.url}`);
477
+ if (content && content.length > 100) docs.push({
478
+ url: link.url,
479
+ title: link.title,
480
+ content
481
+ });
482
+ }
483
+ return docs;
484
+ }
485
+ function normalizeLlmsLinks(content, baseUrl) {
486
+ let normalized = content;
487
+ if (baseUrl) {
488
+ const escaped = baseUrl.replace(/\/$/, "").replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
489
+ normalized = normalized.replace(new RegExp(`\\]\\(${escaped}(/[^)]+\\.md)\\)`, "g"), "](./docs$1)");
490
+ }
491
+ normalized = normalized.replace(/\]\(\/([^)]+\.md)\)/g, "](./docs/$1)");
492
+ return normalized;
493
+ }
494
+ function extractSections(content, patterns) {
495
+ const sections = [];
496
+ const parts = content.split(/\n---\n/);
497
+ for (const part of parts) {
498
+ const urlMatch = part.match(/^url: *(\S.*)$/m);
499
+ if (!urlMatch) continue;
500
+ const url = urlMatch[1];
501
+ if (patterns.some((p) => url.includes(p))) {
502
+ const contentStart = part.indexOf("\n", part.indexOf("url:"));
503
+ if (contentStart > -1) sections.push(part.slice(contentStart + 1));
504
+ }
505
+ }
506
+ if (sections.length === 0) return null;
507
+ return sections.join("\n\n---\n\n");
508
+ }
509
+ async function fetchNpmPackage(packageName) {
510
+ let res = await fetch(`https://unpkg.com/${packageName}/package.json`, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
511
+ if (!res?.ok) res = await fetch(`https://registry.npmjs.org/${packageName}/latest`, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
512
+ if (!res?.ok) return null;
513
+ return res.json();
514
+ }
515
+ async function fetchNpmRegistryMeta(packageName, version) {
516
+ const res = await fetch(`https://registry.npmjs.org/${packageName}`, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
517
+ if (!res?.ok) return {};
518
+ const data = await res.json();
519
+ const distTags = data["dist-tags"] ? Object.fromEntries(Object.entries(data["dist-tags"]).map(([tag, ver]) => [tag, {
520
+ version: ver,
521
+ releasedAt: data.time?.[ver]
522
+ }])) : void 0;
523
+ return {
524
+ releasedAt: data.time?.[version] || void 0,
525
+ distTags
526
+ };
527
+ }
528
+ async function resolvePackageDocs(packageName, options = {}) {
529
+ return (await resolvePackageDocsWithAttempts(packageName, options)).package;
530
+ }
531
+ async function resolvePackageDocsWithAttempts(packageName, options = {}) {
532
+ const attempts = [];
533
+ const { onProgress } = options;
534
+ onProgress?.("npm");
535
+ const pkg = await fetchNpmPackage(packageName);
536
+ if (!pkg) {
537
+ attempts.push({
538
+ source: "npm",
539
+ url: `https://registry.npmjs.org/${packageName}/latest`,
540
+ status: "not-found",
541
+ message: "Package not found on npm registry"
542
+ });
543
+ return {
544
+ package: null,
545
+ attempts
546
+ };
547
+ }
548
+ attempts.push({
549
+ source: "npm",
550
+ url: `https://registry.npmjs.org/${packageName}/latest`,
551
+ status: "success",
552
+ message: `Found ${pkg.name}@${pkg.version}`
553
+ });
554
+ const registryMeta = pkg.version ? await fetchNpmRegistryMeta(packageName, pkg.version) : {};
555
+ const result = {
556
+ name: pkg.name,
557
+ version: pkg.version,
558
+ releasedAt: registryMeta.releasedAt,
559
+ description: pkg.description,
560
+ dependencies: pkg.dependencies,
561
+ distTags: registryMeta.distTags
562
+ };
563
+ let subdir;
564
+ if (typeof pkg.repository === "object" && pkg.repository?.url) {
565
+ result.repoUrl = normalizeRepoUrl(pkg.repository.url);
566
+ subdir = pkg.repository.directory;
567
+ } else if (typeof pkg.repository === "string") {
568
+ const repo = pkg.repository.replace(/^github:/, "");
569
+ if (repo.includes("/") && !repo.includes(":")) result.repoUrl = `https://github.com/${repo}`;
570
+ }
571
+ if (result.repoUrl?.includes("github.com")) {
572
+ const gh = parseGitHubUrl(result.repoUrl);
573
+ if (gh) {
574
+ const targetVersion = options.version || pkg.version;
575
+ if (targetVersion) {
576
+ onProgress?.("github-docs");
577
+ const gitDocs = await fetchGitDocs(gh.owner, gh.repo, targetVersion, pkg.name);
578
+ if (gitDocs) {
579
+ result.gitDocsUrl = gitDocs.baseUrl;
580
+ result.gitRef = gitDocs.ref;
581
+ attempts.push({
582
+ source: "github-docs",
583
+ url: gitDocs.baseUrl,
584
+ status: "success",
585
+ message: `Found ${gitDocs.files.length} docs at ${gitDocs.ref}`
586
+ });
587
+ } else attempts.push({
588
+ source: "github-docs",
589
+ url: `${result.repoUrl}/tree/v${targetVersion}/docs`,
590
+ status: "not-found",
591
+ message: "No docs/ folder found at version tag"
592
+ });
593
+ }
594
+ if (!result.docsUrl) {
595
+ onProgress?.("github-meta");
596
+ const repoMeta = await fetchGitHubRepoMeta(gh.owner, gh.repo, pkg.name);
597
+ if (repoMeta?.homepage) {
598
+ result.docsUrl = repoMeta.homepage;
599
+ attempts.push({
600
+ source: "github-meta",
601
+ url: result.repoUrl,
602
+ status: "success",
603
+ message: `Found homepage: ${repoMeta.homepage}`
604
+ });
605
+ } else attempts.push({
606
+ source: "github-meta",
607
+ url: result.repoUrl,
608
+ status: "not-found",
609
+ message: "No homepage in repo metadata"
610
+ });
611
+ }
612
+ onProgress?.("readme");
613
+ const readmeUrl = await fetchReadme(gh.owner, gh.repo, subdir);
614
+ if (readmeUrl) {
615
+ result.readmeUrl = readmeUrl;
616
+ attempts.push({
617
+ source: "readme",
618
+ url: readmeUrl,
619
+ status: "success"
620
+ });
621
+ } else attempts.push({
622
+ source: "readme",
623
+ url: `${result.repoUrl}/README.md`,
624
+ status: "not-found",
625
+ message: "No README found"
626
+ });
627
+ }
628
+ } else if (!result.repoUrl) {
629
+ onProgress?.("github-search");
630
+ const searchedUrl = await searchGitHubRepo(pkg.name);
631
+ if (searchedUrl) {
632
+ result.repoUrl = searchedUrl;
633
+ attempts.push({
634
+ source: "github-search",
635
+ url: searchedUrl,
636
+ status: "success",
637
+ message: `Found via GitHub search: ${searchedUrl}`
638
+ });
639
+ const gh = parseGitHubUrl(searchedUrl);
640
+ if (gh) {
641
+ const targetVersion = options.version || pkg.version;
642
+ if (targetVersion) {
643
+ onProgress?.("github-docs");
644
+ const gitDocs = await fetchGitDocs(gh.owner, gh.repo, targetVersion, pkg.name);
645
+ if (gitDocs) {
646
+ result.gitDocsUrl = gitDocs.baseUrl;
647
+ result.gitRef = gitDocs.ref;
648
+ attempts.push({
649
+ source: "github-docs",
650
+ url: gitDocs.baseUrl,
651
+ status: "success",
652
+ message: `Found ${gitDocs.files.length} docs at ${gitDocs.ref}`
653
+ });
654
+ }
655
+ }
656
+ if (!result.docsUrl) {
657
+ onProgress?.("github-meta");
658
+ const repoMeta = await fetchGitHubRepoMeta(gh.owner, gh.repo, pkg.name);
659
+ if (repoMeta?.homepage) result.docsUrl = repoMeta.homepage;
660
+ }
661
+ onProgress?.("readme");
662
+ const readmeUrl = await fetchReadme(gh.owner, gh.repo);
663
+ if (readmeUrl) result.readmeUrl = readmeUrl;
664
+ }
665
+ } else attempts.push({
666
+ source: "github-search",
667
+ status: "not-found",
668
+ message: "No repository URL in package.json and GitHub search found no match"
669
+ });
670
+ }
671
+ if (pkg.homepage && !isGitHubRepoUrl(pkg.homepage)) result.docsUrl = pkg.homepage;
672
+ if (result.docsUrl) {
673
+ onProgress?.("llms.txt");
674
+ const llmsUrl = await fetchLlmsUrl(result.docsUrl);
675
+ if (llmsUrl) {
676
+ result.llmsUrl = llmsUrl;
677
+ attempts.push({
678
+ source: "llms.txt",
679
+ url: llmsUrl,
680
+ status: "success"
681
+ });
682
+ } else attempts.push({
683
+ source: "llms.txt",
684
+ url: `${result.docsUrl}/llms.txt`,
685
+ status: "not-found",
686
+ message: "No llms.txt at docs URL"
687
+ });
688
+ }
689
+ if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl && options.cwd) {
690
+ onProgress?.("local");
691
+ const pkgDir = join(options.cwd, "node_modules", packageName);
692
+ for (const filename of ["README.md", "readme.md"]) {
693
+ const readmePath = join(pkgDir, filename);
694
+ if (existsSync(readmePath)) {
695
+ result.readmeUrl = pathToFileURL(readmePath).href;
696
+ attempts.push({
697
+ source: "readme",
698
+ url: readmePath,
699
+ status: "success",
700
+ message: "Found local readme in node_modules"
701
+ });
702
+ break;
703
+ }
704
+ }
705
+ }
706
+ if (!result.docsUrl && !result.llmsUrl && !result.readmeUrl && !result.gitDocsUrl) return {
707
+ package: null,
708
+ attempts
709
+ };
710
+ return {
711
+ package: result,
712
+ attempts
713
+ };
714
+ }
715
+ function parseVersionSpecifier(name, version, cwd) {
716
+ if (version.startsWith("link:")) {
717
+ const linkedPkgPath = join(resolve(cwd, version.slice(5)), "package.json");
718
+ if (existsSync(linkedPkgPath)) {
719
+ const linkedPkg = JSON.parse(readFileSync(linkedPkgPath, "utf-8"));
720
+ return {
721
+ name: linkedPkg.name || name,
722
+ version: linkedPkg.version || "0.0.0"
723
+ };
724
+ }
725
+ return null;
726
+ }
727
+ if (version.startsWith("workspace:")) return {
728
+ name,
729
+ version: version.slice(10).replace(/^[\^~*]/, "") || "*"
730
+ };
731
+ if (version.startsWith("npm:")) {
732
+ const specifier = version.slice(4);
733
+ const atIndex = specifier.startsWith("@") ? specifier.indexOf("@", 1) : specifier.indexOf("@");
734
+ if (atIndex > 0) return {
735
+ name: specifier.slice(0, atIndex),
736
+ version: specifier.slice(atIndex + 1)
737
+ };
738
+ return {
739
+ name: specifier,
740
+ version: "*"
741
+ };
742
+ }
743
+ if (version.startsWith("file:") || version.startsWith("git:") || version.startsWith("git+")) return null;
744
+ return {
745
+ name,
746
+ version: version.replace(/^[\^~>=<]/, "")
747
+ };
748
+ }
749
+ async function readLocalDependencies(cwd) {
750
+ const pkgPath = join(cwd, "package.json");
751
+ if (!existsSync(pkgPath)) throw new Error("No package.json found in current directory");
752
+ const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
753
+ const deps = {
754
+ ...pkg.dependencies,
755
+ ...pkg.devDependencies
756
+ };
757
+ const results = [];
758
+ for (const [name, version] of Object.entries(deps)) {
759
+ if (name.startsWith("@types/") || [
760
+ "typescript",
761
+ "eslint",
762
+ "prettier",
763
+ "vitest",
764
+ "jest"
765
+ ].includes(name)) continue;
766
+ const parsed = parseVersionSpecifier(name, version, cwd);
767
+ if (parsed) results.push(parsed);
768
+ }
769
+ return results;
770
+ }
771
+ function readLocalPackageInfo(localPath) {
772
+ const pkgPath = join(localPath, "package.json");
773
+ if (!existsSync(pkgPath)) return null;
774
+ const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
775
+ let repoUrl;
776
+ if (pkg.repository?.url) repoUrl = normalizeRepoUrl(pkg.repository.url);
777
+ else if (typeof pkg.repository === "string") repoUrl = normalizeRepoUrl(pkg.repository);
778
+ return {
779
+ name: pkg.name,
780
+ version: pkg.version || "0.0.0",
781
+ description: pkg.description,
782
+ repoUrl,
783
+ localPath
784
+ };
785
+ }
786
+ async function resolveLocalPackageDocs(localPath) {
787
+ const info = readLocalPackageInfo(localPath);
788
+ if (!info) return null;
789
+ const result = {
790
+ name: info.name,
791
+ version: info.version,
792
+ description: info.description,
793
+ repoUrl: info.repoUrl
794
+ };
795
+ if (info.repoUrl?.includes("github.com")) {
796
+ const gh = parseGitHubUrl(info.repoUrl);
797
+ if (gh) {
798
+ const gitDocs = await fetchGitDocs(gh.owner, gh.repo, info.version, info.name);
799
+ if (gitDocs) {
800
+ result.gitDocsUrl = gitDocs.baseUrl;
801
+ result.gitRef = gitDocs.ref;
802
+ }
803
+ const readmeUrl = await fetchReadme(gh.owner, gh.repo);
804
+ if (readmeUrl) result.readmeUrl = readmeUrl;
805
+ }
806
+ }
807
+ if (!result.readmeUrl && !result.gitDocsUrl) {
808
+ const localReadme = join(localPath, "README.md");
809
+ if (existsSync(localReadme)) result.readmeUrl = pathToFileURL(localReadme).href;
810
+ }
811
+ if (!result.readmeUrl && !result.gitDocsUrl) return null;
812
+ return result;
813
+ }
814
+ async function fetchPkgDist(name, version) {
815
+ const cacheDir = getCacheDir(name, version);
816
+ const pkgDir = join(cacheDir, "pkg");
817
+ if (existsSync(join(pkgDir, "package.json"))) return pkgDir;
818
+ const res = await fetch(`https://registry.npmjs.org/${name}/${version}`, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
819
+ if (!res?.ok) return null;
820
+ const tarballUrl = (await res.json()).dist?.tarball;
821
+ if (!tarballUrl) return null;
822
+ const tarballRes = await fetch(tarballUrl, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
823
+ if (!tarballRes?.ok || !tarballRes.body) return null;
824
+ mkdirSync(pkgDir, { recursive: true });
825
+ const tmpTarball = join(cacheDir, "_pkg.tgz");
826
+ const fileStream = createWriteStream(tmpTarball);
827
+ const reader = tarballRes.body.getReader();
828
+ await new Promise((res, reject) => {
829
+ const writable = new Writable({ write(chunk, _encoding, callback) {
830
+ fileStream.write(chunk, callback);
831
+ } });
832
+ writable.on("finish", () => {
833
+ fileStream.end();
834
+ res();
835
+ });
836
+ writable.on("error", reject);
837
+ function pump() {
838
+ reader.read().then(({ done, value }) => {
839
+ if (done) {
840
+ writable.end();
841
+ return;
842
+ }
843
+ writable.write(value, () => pump());
844
+ }).catch(reject);
845
+ }
846
+ pump();
847
+ });
848
+ try {
849
+ execSync(`tar xzf "${tmpTarball}" --strip-components=1 -C "${pkgDir}"`, { stdio: "ignore" });
850
+ } catch {
851
+ rmSync(pkgDir, {
852
+ recursive: true,
853
+ force: true
854
+ });
855
+ rmSync(tmpTarball, { force: true });
856
+ return null;
857
+ }
858
+ unlinkSync(tmpTarball);
859
+ return pkgDir;
860
+ }
861
+ async function fetchLatestVersion(packageName) {
862
+ const res = await fetch(`https://unpkg.com/${packageName}/package.json`, { headers: { "User-Agent": "skilld/1.0" } }).catch(() => null);
863
+ if (!res?.ok) return null;
864
+ return (await res.json()).version || null;
865
+ }
866
+ function getInstalledSkillVersion(skillDir) {
867
+ const skillPath = join(skillDir, "SKILL.md");
868
+ if (!existsSync(skillPath)) return null;
869
+ return readFileSync(skillPath, "utf-8").match(/^version:\s*"?([^"\n]+)"?/m)?.[1] || null;
870
+ }
871
+ function parseSemver(version) {
872
+ const clean = version.replace(/^v/, "");
873
+ const match = clean.match(/^(\d+)\.(\d+)\.(\d+)/);
874
+ if (!match) return null;
875
+ return {
876
+ major: +match[1],
877
+ minor: +match[2],
878
+ patch: +match[3],
879
+ raw: clean
880
+ };
881
+ }
882
+ function extractVersion(tag, packageName) {
883
+ if (packageName) {
884
+ const atMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}@(.+)$`));
885
+ if (atMatch) return atMatch[1];
886
+ const dashMatch = tag.match(new RegExp(`^${escapeRegex(packageName)}-v?(.+)$`));
887
+ if (dashMatch) return dashMatch[1];
888
+ }
889
+ return tag.replace(/^v/, "");
890
+ }
891
+ function escapeRegex(str) {
892
+ return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
893
+ }
894
+ function tagMatchesPackage(tag, packageName) {
895
+ return tag.startsWith(`${packageName}@`) || tag.startsWith(`${packageName}-v`) || tag.startsWith(`${packageName}-`);
896
+ }
897
+ function compareSemver(a, b) {
898
+ if (a.major !== b.major) return a.major - b.major;
899
+ if (a.minor !== b.minor) return a.minor - b.minor;
900
+ return a.patch - b.patch;
901
+ }
902
+ function fetchReleasesViaGh(owner, repo) {
903
+ try {
904
+ const json = execSync(`gh api "repos/${owner}/${repo}/releases?per_page=100" --jq '[.[] | {id: .id, tag: .tag_name, name: .name, prerelease: .prerelease, createdAt: .created_at, publishedAt: .published_at, markdown: .body}]'`, {
905
+ encoding: "utf-8",
906
+ timeout: 15e3,
907
+ stdio: [
908
+ "ignore",
909
+ "pipe",
910
+ "ignore"
911
+ ]
912
+ });
913
+ return JSON.parse(json);
914
+ } catch {
915
+ return [];
916
+ }
917
+ }
918
+ async function fetchReleasesViaUngh(owner, repo) {
919
+ const res = await fetch(`https://ungh.cc/repos/${owner}/${repo}/releases`, {
920
+ headers: { "User-Agent": "skilld/1.0" },
921
+ signal: AbortSignal.timeout(15e3)
922
+ }).catch(() => null);
923
+ if (!res?.ok) return [];
924
+ return (await res.json().catch(() => null))?.releases ?? [];
925
+ }
926
+ async function fetchAllReleases(owner, repo) {
927
+ if (isGhAvailable()) {
928
+ const releases = fetchReleasesViaGh(owner, repo);
929
+ if (releases.length > 0) return releases;
930
+ }
931
+ return fetchReleasesViaUngh(owner, repo);
932
+ }
933
+ function selectReleases(releases, packageName) {
934
+ const hasMonorepoTags = packageName && releases.some((r) => tagMatchesPackage(r.tag, packageName));
935
+ return releases.filter((r) => {
936
+ if (r.prerelease) return false;
937
+ if (hasMonorepoTags && packageName) {
938
+ if (!tagMatchesPackage(r.tag, packageName)) return false;
939
+ const ver = extractVersion(r.tag, packageName);
940
+ return ver && parseSemver(ver);
941
+ }
942
+ return parseSemver(r.tag);
943
+ }).sort((a, b) => {
944
+ const verA = extractVersion(a.tag, hasMonorepoTags ? packageName : void 0);
945
+ const verB = extractVersion(b.tag, hasMonorepoTags ? packageName : void 0);
946
+ if (!verA || !verB) return 0;
947
+ return compareSemver(parseSemver(verB), parseSemver(verA));
948
+ }).slice(0, 20);
949
+ }
950
+ function formatRelease(release) {
951
+ const date = (release.publishedAt || release.createdAt).split("T")[0];
952
+ return `# ${release.name || release.tag}\n\nTag: ${release.tag} | Published: ${date}\n\n${release.markdown}`;
953
+ }
954
+ async function fetchChangelog(owner, repo, ref) {
955
+ for (const filename of [
956
+ "CHANGELOG.md",
957
+ "changelog.md",
958
+ "CHANGES.md"
959
+ ]) {
960
+ const url = `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${filename}`;
961
+ const res = await fetch(url, {
962
+ headers: { "User-Agent": "skilld/1.0" },
963
+ signal: AbortSignal.timeout(1e4)
964
+ }).catch(() => null);
965
+ if (res?.ok) return res.text();
966
+ }
967
+ return null;
968
+ }
969
+ async function fetchReleaseNotes(owner, repo, installedVersion, gitRef, packageName) {
970
+ const selected = selectReleases(await fetchAllReleases(owner, repo), packageName);
971
+ if (selected.length > 0) return selected.map((r) => {
972
+ return {
973
+ path: `releases/${r.tag.includes("@") || r.tag.startsWith("v") ? r.tag : `v${r.tag}`}.md`,
974
+ content: formatRelease(r)
975
+ };
976
+ });
977
+ const changelog = await fetchChangelog(owner, repo, gitRef || "main");
978
+ if (!changelog) return [];
979
+ return [{
980
+ path: "CHANGELOG.md",
981
+ content: changelog
982
+ }];
983
+ }
984
+ export { resolveEntryFiles as A, fetchText as C, verifyUrl as D, parseGitHubUrl as E, isGhAvailable as F, formatDiscussionsAsMarkdown as M, fetchGitHubIssues as N, DOC_OVERRIDES as O, formatIssuesAsMarkdown as P, fetchReadmeContent as S, normalizeRepoUrl as T, normalizeLlmsLinks as _, fetchPkgDist as a, fetchGitHubRepoMeta as b, readLocalDependencies as c, resolvePackageDocs as d, resolvePackageDocsWithAttempts as f, fetchLlmsUrl as g, fetchLlmsTxt as h, fetchNpmRegistryMeta as i, fetchGitHubDiscussions as j, getDocOverride as k, readLocalPackageInfo as l, extractSections as m, fetchLatestVersion as n, getInstalledSkillVersion as o, downloadLlmsDocs as p, fetchNpmPackage as r, parseVersionSpecifier as s, fetchReleaseNotes as t, resolveLocalPackageDocs as u, parseMarkdownLinks as v, isGitHubRepoUrl as w, fetchReadme as x, fetchGitDocs as y };
985
+
986
+ //# sourceMappingURL=releases.mjs.map