@oh-my-pi/pi-coding-agent 3.25.0 → 3.31.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (157) hide show
  1. package/CHANGELOG.md +90 -0
  2. package/package.json +5 -5
  3. package/src/cli/args.ts +4 -0
  4. package/src/core/agent-session.ts +29 -2
  5. package/src/core/bash-executor.ts +2 -1
  6. package/src/core/custom-commands/bundled/review/index.ts +369 -14
  7. package/src/core/custom-commands/bundled/wt/index.ts +1 -1
  8. package/src/core/session-manager.ts +158 -246
  9. package/src/core/session-storage.ts +379 -0
  10. package/src/core/settings-manager.ts +155 -4
  11. package/src/core/system-prompt.ts +62 -64
  12. package/src/core/tools/ask.ts +5 -4
  13. package/src/core/tools/bash-interceptor.ts +26 -61
  14. package/src/core/tools/bash.ts +13 -8
  15. package/src/core/tools/complete.ts +2 -4
  16. package/src/core/tools/edit-diff.ts +11 -4
  17. package/src/core/tools/edit.ts +7 -13
  18. package/src/core/tools/find.ts +111 -50
  19. package/src/core/tools/gemini-image.ts +128 -147
  20. package/src/core/tools/grep.ts +397 -415
  21. package/src/core/tools/index.test.ts +5 -1
  22. package/src/core/tools/index.ts +6 -8
  23. package/src/core/tools/jtd-to-json-schema.ts +174 -196
  24. package/src/core/tools/ls.ts +12 -10
  25. package/src/core/tools/lsp/client.ts +58 -9
  26. package/src/core/tools/lsp/config.ts +205 -656
  27. package/src/core/tools/lsp/defaults.json +465 -0
  28. package/src/core/tools/lsp/index.ts +55 -32
  29. package/src/core/tools/lsp/rust-analyzer.ts +49 -10
  30. package/src/core/tools/lsp/types.ts +1 -0
  31. package/src/core/tools/lsp/utils.ts +1 -1
  32. package/src/core/tools/read.ts +152 -76
  33. package/src/core/tools/render-utils.ts +70 -10
  34. package/src/core/tools/review.ts +38 -126
  35. package/src/core/tools/task/artifacts.ts +5 -4
  36. package/src/core/tools/task/executor.ts +204 -67
  37. package/src/core/tools/task/index.ts +129 -92
  38. package/src/core/tools/task/name-generator.ts +1544 -214
  39. package/src/core/tools/task/parallel.ts +30 -3
  40. package/src/core/tools/task/render.ts +85 -39
  41. package/src/core/tools/task/types.ts +34 -11
  42. package/src/core/tools/task/worker.ts +152 -27
  43. package/src/core/tools/web-fetch.ts +220 -1657
  44. package/src/core/tools/web-scrapers/academic.test.ts +239 -0
  45. package/src/core/tools/web-scrapers/artifacthub.ts +215 -0
  46. package/src/core/tools/web-scrapers/arxiv.ts +88 -0
  47. package/src/core/tools/web-scrapers/aur.ts +175 -0
  48. package/src/core/tools/web-scrapers/biorxiv.ts +141 -0
  49. package/src/core/tools/web-scrapers/bluesky.ts +284 -0
  50. package/src/core/tools/web-scrapers/brew.ts +177 -0
  51. package/src/core/tools/web-scrapers/business.test.ts +82 -0
  52. package/src/core/tools/web-scrapers/cheatsh.ts +78 -0
  53. package/src/core/tools/web-scrapers/chocolatey.ts +158 -0
  54. package/src/core/tools/web-scrapers/choosealicense.ts +110 -0
  55. package/src/core/tools/web-scrapers/cisa-kev.ts +100 -0
  56. package/src/core/tools/web-scrapers/clojars.ts +180 -0
  57. package/src/core/tools/web-scrapers/coingecko.ts +184 -0
  58. package/src/core/tools/web-scrapers/crates-io.ts +128 -0
  59. package/src/core/tools/web-scrapers/crossref.ts +149 -0
  60. package/src/core/tools/web-scrapers/dev-platforms.test.ts +254 -0
  61. package/src/core/tools/web-scrapers/devto.ts +177 -0
  62. package/src/core/tools/web-scrapers/discogs.ts +308 -0
  63. package/src/core/tools/web-scrapers/discourse.ts +221 -0
  64. package/src/core/tools/web-scrapers/dockerhub.ts +160 -0
  65. package/src/core/tools/web-scrapers/documentation.test.ts +85 -0
  66. package/src/core/tools/web-scrapers/fdroid.ts +158 -0
  67. package/src/core/tools/web-scrapers/finance-media.test.ts +144 -0
  68. package/src/core/tools/web-scrapers/firefox-addons.ts +214 -0
  69. package/src/core/tools/web-scrapers/flathub.ts +239 -0
  70. package/src/core/tools/web-scrapers/git-hosting.test.ts +272 -0
  71. package/src/core/tools/web-scrapers/github-gist.ts +68 -0
  72. package/src/core/tools/web-scrapers/github.ts +455 -0
  73. package/src/core/tools/web-scrapers/gitlab.ts +456 -0
  74. package/src/core/tools/web-scrapers/go-pkg.ts +275 -0
  75. package/src/core/tools/web-scrapers/hackage.ts +94 -0
  76. package/src/core/tools/web-scrapers/hackernews.ts +208 -0
  77. package/src/core/tools/web-scrapers/hex.ts +121 -0
  78. package/src/core/tools/web-scrapers/huggingface.ts +385 -0
  79. package/src/core/tools/web-scrapers/iacr.ts +86 -0
  80. package/src/core/tools/web-scrapers/index.ts +250 -0
  81. package/src/core/tools/web-scrapers/jetbrains-marketplace.ts +169 -0
  82. package/src/core/tools/web-scrapers/lemmy.ts +220 -0
  83. package/src/core/tools/web-scrapers/lobsters.ts +186 -0
  84. package/src/core/tools/web-scrapers/mastodon.ts +310 -0
  85. package/src/core/tools/web-scrapers/maven.ts +152 -0
  86. package/src/core/tools/web-scrapers/mdn.ts +174 -0
  87. package/src/core/tools/web-scrapers/media.test.ts +138 -0
  88. package/src/core/tools/web-scrapers/metacpan.ts +253 -0
  89. package/src/core/tools/web-scrapers/musicbrainz.ts +273 -0
  90. package/src/core/tools/web-scrapers/npm.ts +114 -0
  91. package/src/core/tools/web-scrapers/nuget.ts +205 -0
  92. package/src/core/tools/web-scrapers/nvd.ts +243 -0
  93. package/src/core/tools/web-scrapers/ollama.ts +267 -0
  94. package/src/core/tools/web-scrapers/open-vsx.ts +119 -0
  95. package/src/core/tools/web-scrapers/opencorporates.ts +275 -0
  96. package/src/core/tools/web-scrapers/openlibrary.ts +319 -0
  97. package/src/core/tools/web-scrapers/orcid.ts +299 -0
  98. package/src/core/tools/web-scrapers/osv.ts +189 -0
  99. package/src/core/tools/web-scrapers/package-managers-2.test.ts +199 -0
  100. package/src/core/tools/web-scrapers/package-managers.test.ts +171 -0
  101. package/src/core/tools/web-scrapers/package-registries.test.ts +259 -0
  102. package/src/core/tools/web-scrapers/packagist.ts +174 -0
  103. package/src/core/tools/web-scrapers/pub-dev.ts +185 -0
  104. package/src/core/tools/web-scrapers/pubmed.ts +178 -0
  105. package/src/core/tools/web-scrapers/pypi.ts +129 -0
  106. package/src/core/tools/web-scrapers/rawg.ts +124 -0
  107. package/src/core/tools/web-scrapers/readthedocs.ts +126 -0
  108. package/src/core/tools/web-scrapers/reddit.ts +104 -0
  109. package/src/core/tools/web-scrapers/repology.ts +262 -0
  110. package/src/core/tools/web-scrapers/research.test.ts +107 -0
  111. package/src/core/tools/web-scrapers/rfc.ts +209 -0
  112. package/src/core/tools/web-scrapers/rubygems.ts +117 -0
  113. package/src/core/tools/web-scrapers/searchcode.ts +217 -0
  114. package/src/core/tools/web-scrapers/sec-edgar.ts +274 -0
  115. package/src/core/tools/web-scrapers/security.test.ts +103 -0
  116. package/src/core/tools/web-scrapers/semantic-scholar.ts +190 -0
  117. package/src/core/tools/web-scrapers/snapcraft.ts +200 -0
  118. package/src/core/tools/web-scrapers/social-extended.test.ts +192 -0
  119. package/src/core/tools/web-scrapers/social.test.ts +259 -0
  120. package/src/core/tools/web-scrapers/sourcegraph.ts +373 -0
  121. package/src/core/tools/web-scrapers/spdx.ts +121 -0
  122. package/src/core/tools/web-scrapers/spotify.ts +218 -0
  123. package/src/core/tools/web-scrapers/stackexchange.test.ts +120 -0
  124. package/src/core/tools/web-scrapers/stackoverflow.ts +124 -0
  125. package/src/core/tools/web-scrapers/standards.test.ts +122 -0
  126. package/src/core/tools/web-scrapers/terraform.ts +304 -0
  127. package/src/core/tools/web-scrapers/tldr.ts +51 -0
  128. package/src/core/tools/web-scrapers/twitter.ts +96 -0
  129. package/src/core/tools/web-scrapers/types.ts +234 -0
  130. package/src/core/tools/web-scrapers/utils.ts +162 -0
  131. package/src/core/tools/web-scrapers/vimeo.ts +152 -0
  132. package/src/core/tools/web-scrapers/vscode-marketplace.ts +195 -0
  133. package/src/core/tools/web-scrapers/w3c.ts +163 -0
  134. package/src/core/tools/web-scrapers/wikidata.ts +357 -0
  135. package/src/core/tools/web-scrapers/wikipedia.test.ts +73 -0
  136. package/src/core/tools/web-scrapers/wikipedia.ts +95 -0
  137. package/src/core/tools/web-scrapers/youtube.test.ts +198 -0
  138. package/src/core/tools/web-scrapers/youtube.ts +371 -0
  139. package/src/core/tools/write.ts +21 -18
  140. package/src/core/voice.ts +3 -2
  141. package/src/lib/worktree/collapse.ts +2 -1
  142. package/src/lib/worktree/git.ts +2 -18
  143. package/src/main.ts +59 -3
  144. package/src/modes/interactive/components/extensions/extension-dashboard.ts +33 -19
  145. package/src/modes/interactive/components/extensions/extension-list.ts +15 -8
  146. package/src/modes/interactive/components/hook-editor.ts +2 -1
  147. package/src/modes/interactive/components/model-selector.ts +19 -4
  148. package/src/modes/interactive/interactive-mode.ts +41 -38
  149. package/src/modes/interactive/theme/theme.ts +58 -58
  150. package/src/modes/rpc/rpc-mode.ts +10 -9
  151. package/src/prompts/review-request.md +27 -0
  152. package/src/prompts/reviewer.md +64 -68
  153. package/src/prompts/tools/output.md +22 -3
  154. package/src/prompts/tools/task.md +32 -33
  155. package/src/utils/clipboard.ts +2 -1
  156. package/src/utils/tools-manager.ts +110 -8
  157. package/examples/extensions/subagent/agents/reviewer.md +0 -35
@@ -0,0 +1,126 @@
1
+ /**
2
+ * Read the Docs handler for web-fetch
3
+ */
4
+
5
+ import { parse as parseHtml } from "node-html-parser";
6
+ import type { RenderResult, SpecialHandler } from "./types";
7
+ import { finalizeOutput, htmlToBasicMarkdown, loadPage } from "./types";
8
+
9
+ export const handleReadTheDocs: SpecialHandler = async (
10
+ url: string,
11
+ timeout: number,
12
+ signal?: AbortSignal,
13
+ ): Promise<RenderResult | null> => {
14
+ // Check if URL matches Read the Docs patterns
15
+ const urlObj = new URL(url);
16
+ const isReadTheDocs =
17
+ urlObj.hostname.endsWith(".readthedocs.io") ||
18
+ urlObj.hostname === "readthedocs.org" ||
19
+ urlObj.hostname === "www.readthedocs.org";
20
+
21
+ if (!isReadTheDocs) {
22
+ return null;
23
+ }
24
+
25
+ const notes: string[] = [];
26
+ const fetchedAt = new Date().toISOString();
27
+
28
+ // Fetch the page
29
+ const result = await loadPage(url, { timeout, signal });
30
+ if (!result.ok) {
31
+ return {
32
+ url,
33
+ finalUrl: result.finalUrl,
34
+ contentType: result.contentType,
35
+ method: "readthedocs",
36
+ content: `Failed to fetch Read the Docs page (status: ${result.status ?? "unknown"})`,
37
+ fetchedAt,
38
+ truncated: false,
39
+ notes,
40
+ };
41
+ }
42
+
43
+ // Parse HTML
44
+ const root = parseHtml(result.content);
45
+
46
+ // Extract main content from common Read the Docs selectors
47
+ let mainContent =
48
+ root.querySelector(".document") ||
49
+ root.querySelector('[role="main"]') ||
50
+ root.querySelector("main") ||
51
+ root.querySelector(".rst-content") ||
52
+ root.querySelector(".body");
53
+
54
+ if (!mainContent) {
55
+ // Fallback to body if no main content found
56
+ mainContent = root.querySelector("body");
57
+ notes.push("Using full body content (no main content div found)");
58
+ }
59
+
60
+ // Remove navigation, sidebar, footer elements
61
+ mainContent
62
+ ?.querySelectorAll(
63
+ ".headerlink, .viewcode-link, nav, .sidebar, footer, .related, .sphinxsidebar, .toctree-wrapper",
64
+ )
65
+ .forEach((el) => {
66
+ el.remove();
67
+ });
68
+
69
+ // Try to find Edit on GitHub/GitLab links for raw source
70
+ const editLinks = root.querySelectorAll('a[href*="github.com"], a[href*="gitlab.com"]');
71
+ let sourceUrl: string | null = null;
72
+
73
+ for (const link of editLinks) {
74
+ const href = link.getAttribute("href");
75
+ const text = link.textContent?.toLowerCase() || "";
76
+
77
+ if (href && (text.includes("edit") || text.includes("source"))) {
78
+ // Convert edit URL to raw URL
79
+ if (href.includes("github.com")) {
80
+ sourceUrl = href.replace("/blob/", "/raw/").replace("/edit/", "/raw/");
81
+ } else if (href.includes("gitlab.com")) {
82
+ sourceUrl = href.replace("/blob/", "/raw/").replace("/edit/", "/raw/");
83
+ }
84
+ break;
85
+ }
86
+ }
87
+
88
+ let content = "";
89
+
90
+ // Try to fetch raw source if available
91
+ if (sourceUrl) {
92
+ try {
93
+ const sourceResult = await loadPage(sourceUrl, { timeout: Math.min(timeout, 10), signal });
94
+ if (sourceResult.ok && sourceResult.content.length > 0 && sourceResult.content.length < 1_000_000) {
95
+ content = sourceResult.content;
96
+ notes.push(`Fetched raw source from ${sourceUrl}`);
97
+ }
98
+ } catch (_err) {
99
+ // Ignore errors, fall back to HTML
100
+ }
101
+ }
102
+
103
+ // If no raw source, convert HTML to markdown
104
+ if (!content && mainContent) {
105
+ const html = mainContent.innerHTML;
106
+ content = htmlToBasicMarkdown(html);
107
+ }
108
+
109
+ if (!content) {
110
+ content = "No content extracted from Read the Docs page";
111
+ notes.push("Failed to extract content");
112
+ }
113
+
114
+ const { content: finalContent, truncated } = finalizeOutput(content);
115
+
116
+ return {
117
+ url,
118
+ finalUrl: result.finalUrl,
119
+ contentType: sourceUrl ? "text/plain" : "text/html",
120
+ method: "readthedocs",
121
+ content: finalContent,
122
+ fetchedAt,
123
+ truncated,
124
+ notes,
125
+ };
126
+ };
@@ -0,0 +1,104 @@
1
+ import type { RenderResult, SpecialHandler } from "./types";
2
+ import { finalizeOutput, loadPage } from "./types";
3
+
4
+ interface RedditPost {
5
+ title: string;
6
+ selftext: string;
7
+ author: string;
8
+ score: number;
9
+ num_comments: number;
10
+ created_utc: number;
11
+ subreddit: string;
12
+ url: string;
13
+ is_self: boolean;
14
+ }
15
+
16
+ interface RedditComment {
17
+ body: string;
18
+ author: string;
19
+ score: number;
20
+ created_utc: number;
21
+ replies?: { data: { children: Array<{ data: RedditComment }> } };
22
+ }
23
+
24
+ /**
25
+ * Handle Reddit URLs via JSON API
26
+ */
27
+ export const handleReddit: SpecialHandler = async (
28
+ url: string,
29
+ timeout: number,
30
+ signal?: AbortSignal,
31
+ ): Promise<RenderResult | null> => {
32
+ try {
33
+ const parsed = new URL(url);
34
+ if (!parsed.hostname.includes("reddit.com")) return null;
35
+
36
+ const fetchedAt = new Date().toISOString();
37
+
38
+ // Append .json to get JSON response
39
+ let jsonUrl = `${url.replace(/\/$/, "")}.json`;
40
+ if (parsed.search) {
41
+ jsonUrl = `${url.replace(/\/$/, "").replace(parsed.search, "")}.json${parsed.search}`;
42
+ }
43
+
44
+ const result = await loadPage(jsonUrl, { timeout, signal });
45
+ if (!result.ok) return null;
46
+
47
+ const data = JSON.parse(result.content);
48
+ let md = "";
49
+
50
+ // Handle different Reddit URL types
51
+ if (Array.isArray(data) && data.length >= 1) {
52
+ // Post page (with comments)
53
+ const postData = data[0]?.data?.children?.[0]?.data as RedditPost | undefined;
54
+ if (postData) {
55
+ md = `# ${postData.title}\n\n`;
56
+ md += `**r/${postData.subreddit}** · u/${postData.author} · ${postData.score} points · ${postData.num_comments} comments\n`;
57
+ md += `*${new Date(postData.created_utc * 1000).toISOString().split("T")[0]}*\n\n`;
58
+
59
+ if (postData.is_self && postData.selftext) {
60
+ md += `---\n\n${postData.selftext}\n\n`;
61
+ } else if (!postData.is_self) {
62
+ md += `**Link:** ${postData.url}\n\n`;
63
+ }
64
+
65
+ // Add comments if available
66
+ if (data.length >= 2 && data[1]?.data?.children) {
67
+ md += `---\n\n## Top Comments\n\n`;
68
+ const comments = data[1].data.children.filter((c: { kind: string }) => c.kind === "t1").slice(0, 10);
69
+
70
+ for (const { data: comment } of comments as Array<{ data: RedditComment }>) {
71
+ md += `### u/${comment.author} · ${comment.score} points\n\n`;
72
+ md += `${comment.body}\n\n---\n\n`;
73
+ }
74
+ }
75
+ }
76
+ } else if (data?.data?.children) {
77
+ // Subreddit or listing page
78
+ const posts = data.data.children.slice(0, 20) as Array<{ data: RedditPost }>;
79
+ const subreddit = posts[0]?.data?.subreddit;
80
+
81
+ md = `# r/${subreddit || "Reddit"}\n\n`;
82
+ for (const { data: post } of posts) {
83
+ md += `- **${post.title}** (${post.score} pts, ${post.num_comments} comments)\n`;
84
+ md += ` by u/${post.author}\n\n`;
85
+ }
86
+ }
87
+
88
+ if (!md) return null;
89
+
90
+ const output = finalizeOutput(md);
91
+ return {
92
+ url,
93
+ finalUrl: url,
94
+ contentType: "text/markdown",
95
+ method: "reddit",
96
+ content: output.content,
97
+ fetchedAt,
98
+ truncated: output.truncated,
99
+ notes: ["Fetched via Reddit JSON API"],
100
+ };
101
+ } catch {}
102
+
103
+ return null;
104
+ };
@@ -0,0 +1,262 @@
1
+ import type { RenderResult, SpecialHandler } from "./types";
2
+ import { finalizeOutput, loadPage } from "./types";
3
+
4
+ interface RepologyPackage {
5
+ repo: string;
6
+ subrepo?: string;
7
+ srcname?: string;
8
+ binname?: string;
9
+ visiblename?: string;
10
+ version: string;
11
+ origversion?: string;
12
+ status:
13
+ | "newest"
14
+ | "devel"
15
+ | "unique"
16
+ | "outdated"
17
+ | "legacy"
18
+ | "rolling"
19
+ | "noscheme"
20
+ | "incorrect"
21
+ | "untrusted"
22
+ | "ignored";
23
+ summary?: string;
24
+ categories?: string[];
25
+ licenses?: string[];
26
+ maintainers?: string[];
27
+ }
28
+
29
+ /**
30
+ * Get emoji indicator for version status
31
+ */
32
+ function statusIndicator(status: string): string {
33
+ switch (status) {
34
+ case "newest":
35
+ return "\u2705"; // green check
36
+ case "devel":
37
+ return "\uD83D\uDEA7"; // construction
38
+ case "unique":
39
+ return "\uD83D\uDD35"; // blue circle
40
+ case "outdated":
41
+ return "\uD83D\uDD34"; // red circle
42
+ case "legacy":
43
+ return "\u26A0\uFE0F"; // warning
44
+ case "rolling":
45
+ return "\uD83D\uDD04"; // arrows
46
+ default:
47
+ return "\u2796"; // minus
48
+ }
49
+ }
50
+
51
+ /**
52
+ * Prettify repository name
53
+ */
54
+ function prettifyRepo(repo: string): string {
55
+ const mapping: Record<string, string> = {
56
+ arch: "Arch Linux",
57
+ aur: "AUR",
58
+ debian_unstable: "Debian Unstable",
59
+ debian_stable: "Debian Stable",
60
+ ubuntu_24_04: "Ubuntu 24.04",
61
+ ubuntu_22_04: "Ubuntu 22.04",
62
+ fedora_rawhide: "Fedora Rawhide",
63
+ fedora_40: "Fedora 40",
64
+ gentoo: "Gentoo",
65
+ nix_unstable: "Nixpkgs Unstable",
66
+ nix_stable: "Nixpkgs Stable",
67
+ homebrew: "Homebrew",
68
+ macports: "MacPorts",
69
+ alpine_edge: "Alpine Edge",
70
+ freebsd: "FreeBSD",
71
+ openbsd: "OpenBSD",
72
+ void_x86_64: "Void Linux",
73
+ opensuse_tumbleweed: "openSUSE Tumbleweed",
74
+ msys2_mingw: "MSYS2",
75
+ chocolatey: "Chocolatey",
76
+ winget: "Winget",
77
+ scoop: "Scoop",
78
+ conda_main: "Conda",
79
+ pypi: "PyPI",
80
+ crates_io: "Crates.io",
81
+ npm: "npm",
82
+ rubygems: "RubyGems",
83
+ cpan: "CPAN",
84
+ hackage: "Hackage",
85
+ };
86
+
87
+ // Check exact match first
88
+ if (mapping[repo]) return mapping[repo];
89
+
90
+ // Check partial matches
91
+ for (const [key, value] of Object.entries(mapping)) {
92
+ if (repo.startsWith(key)) return value;
93
+ }
94
+
95
+ // Fallback: titlecase with underscores replaced
96
+ return repo
97
+ .split("_")
98
+ .map((w) => w.charAt(0).toUpperCase() + w.slice(1))
99
+ .join(" ");
100
+ }
101
+
102
+ /**
103
+ * Handle Repology URLs via API
104
+ */
105
+ export const handleRepology: SpecialHandler = async (
106
+ url: string,
107
+ timeout: number,
108
+ signal?: AbortSignal,
109
+ ): Promise<RenderResult | null> => {
110
+ try {
111
+ const parsed = new URL(url);
112
+ if (parsed.hostname !== "repology.org" && parsed.hostname !== "www.repology.org") return null;
113
+
114
+ // Extract package name from /project/{name}/versions or /project/{name}/information
115
+ const match = parsed.pathname.match(/^\/project\/([^/]+)/);
116
+ if (!match) return null;
117
+
118
+ const packageName = decodeURIComponent(match[1]);
119
+ const fetchedAt = new Date().toISOString();
120
+
121
+ // Fetch from Repology API
122
+ const apiUrl = `https://repology.org/api/v1/project/${encodeURIComponent(packageName)}`;
123
+ const result = await loadPage(apiUrl, {
124
+ timeout,
125
+ headers: { Accept: "application/json" },
126
+ signal,
127
+ });
128
+
129
+ if (!result.ok) return null;
130
+
131
+ let packages: RepologyPackage[];
132
+ try {
133
+ packages = JSON.parse(result.content);
134
+ } catch {
135
+ return null;
136
+ }
137
+
138
+ // Empty response means package not found
139
+ if (!Array.isArray(packages) || packages.length === 0) return null;
140
+
141
+ // Find newest version(s) and extract metadata
142
+ const newestVersions = new Set<string>();
143
+ let summary: string | undefined;
144
+ let licenses: string[] = [];
145
+ const categories = new Set<string>();
146
+
147
+ for (const pkg of packages) {
148
+ if (pkg.status === "newest" || pkg.status === "unique") {
149
+ newestVersions.add(pkg.version);
150
+ }
151
+ if (!summary && pkg.summary) summary = pkg.summary;
152
+ if (pkg.licenses?.length && !licenses.length) licenses = pkg.licenses;
153
+ if (pkg.categories) {
154
+ for (const cat of pkg.categories) categories.add(cat);
155
+ }
156
+ }
157
+
158
+ // If no newest found, find the highest version
159
+ if (newestVersions.size === 0) {
160
+ const versions = packages.map((p) => p.version);
161
+ if (versions.length > 0) newestVersions.add(versions[0]);
162
+ }
163
+
164
+ // Group packages by status for counting
165
+ const statusCounts: Record<string, number> = {};
166
+ for (const pkg of packages) {
167
+ statusCounts[pkg.status] = (statusCounts[pkg.status] || 0) + 1;
168
+ }
169
+
170
+ // Build markdown
171
+ let md = `# ${packageName}\n\n`;
172
+ if (summary) md += `${summary}\n\n`;
173
+
174
+ md += `**Newest Version:** ${Array.from(newestVersions).join(", ") || "unknown"}\n`;
175
+ md += `**Repositories:** ${packages.length}\n`;
176
+ if (licenses.length) md += `**License:** ${licenses.join(", ")}\n`;
177
+ if (categories.size) md += `**Categories:** ${Array.from(categories).join(", ")}\n`;
178
+ md += "\n";
179
+
180
+ // Status summary
181
+ md += "## Version Status Summary\n\n";
182
+ const statusOrder = [
183
+ "newest",
184
+ "unique",
185
+ "devel",
186
+ "rolling",
187
+ "outdated",
188
+ "legacy",
189
+ "noscheme",
190
+ "incorrect",
191
+ "untrusted",
192
+ "ignored",
193
+ ];
194
+ for (const status of statusOrder) {
195
+ if (statusCounts[status]) {
196
+ md += `- ${statusIndicator(status)} **${status}**: ${statusCounts[status]} repos\n`;
197
+ }
198
+ }
199
+ md += "\n";
200
+
201
+ // Sort packages: newest first, then by repo name
202
+ const sortedPackages = [...packages].sort((a, b) => {
203
+ const statusPriority: Record<string, number> = {
204
+ newest: 0,
205
+ unique: 1,
206
+ devel: 2,
207
+ rolling: 3,
208
+ outdated: 4,
209
+ legacy: 5,
210
+ noscheme: 6,
211
+ incorrect: 7,
212
+ untrusted: 8,
213
+ ignored: 9,
214
+ };
215
+ const aPriority = statusPriority[a.status] ?? 10;
216
+ const bPriority = statusPriority[b.status] ?? 10;
217
+ if (aPriority !== bPriority) return aPriority - bPriority;
218
+ return a.repo.localeCompare(b.repo);
219
+ });
220
+
221
+ // Show top repositories (up to 15)
222
+ md += "## Package Versions by Repository\n\n";
223
+ md += "| Repository | Version | Status |\n";
224
+ md += "|------------|---------|--------|\n";
225
+
226
+ const shownRepos = new Set<string>();
227
+ let count = 0;
228
+ for (const pkg of sortedPackages) {
229
+ // Skip duplicate repos (some have multiple entries)
230
+ const repoKey = pkg.subrepo ? `${pkg.repo}/${pkg.subrepo}` : pkg.repo;
231
+ if (shownRepos.has(repoKey)) continue;
232
+ shownRepos.add(repoKey);
233
+
234
+ const repoName = prettifyRepo(pkg.repo);
235
+ const version = pkg.origversion || pkg.version;
236
+ md += `| ${repoName} | \`${version}\` | ${statusIndicator(pkg.status)} ${pkg.status} |\n`;
237
+
238
+ count++;
239
+ if (count >= 15) break;
240
+ }
241
+
242
+ if (packages.length > 15) {
243
+ md += `\n*...and ${packages.length - 15} more repositories*\n`;
244
+ }
245
+
246
+ md += `\n---\n\n[View on Repology](${url})\n`;
247
+
248
+ const output = finalizeOutput(md);
249
+ return {
250
+ url,
251
+ finalUrl: url,
252
+ contentType: "text/markdown",
253
+ method: "repology",
254
+ content: output.content,
255
+ fetchedAt,
256
+ truncated: output.truncated,
257
+ notes: ["Fetched via Repology API"],
258
+ };
259
+ } catch {}
260
+
261
+ return null;
262
+ };
@@ -0,0 +1,107 @@
1
+ import { describe, expect, it } from "bun:test";
2
+ import { handleBiorxiv } from "./biorxiv";
3
+ import { handleOpenLibrary } from "./openlibrary";
4
+ import { handleWikidata } from "./wikidata";
5
+
6
+ const SKIP = !process.env.WEB_FETCH_INTEGRATION;
7
+
8
+ describe.skipIf(SKIP)("handleWikidata", () => {
9
+ it("returns null for non-matching URLs", async () => {
10
+ const result = await handleWikidata("https://example.com", 20);
11
+ expect(result).toBeNull();
12
+ });
13
+
14
+ it("returns null for non-wikidata URLs", async () => {
15
+ const result = await handleWikidata("https://wikipedia.org/wiki/Apple_Inc", 20);
16
+ expect(result).toBeNull();
17
+ });
18
+
19
+ it("fetches Q312 - Apple Inc", async () => {
20
+ const result = await handleWikidata("https://www.wikidata.org/wiki/Q312", 20);
21
+ expect(result).not.toBeNull();
22
+ expect(result?.method).toBe("wikidata");
23
+ expect(result?.content).toContain("Apple");
24
+ expect(result?.content).toContain("Q312");
25
+ expect(result?.contentType).toBe("text/markdown");
26
+ expect(result?.fetchedAt).toBeTruthy();
27
+ expect(result?.truncated).toBeDefined();
28
+ });
29
+
30
+ it("fetches Q5 - human (entity)", async () => {
31
+ const result = await handleWikidata("https://www.wikidata.org/entity/Q5", 20);
32
+ expect(result).not.toBeNull();
33
+ expect(result?.method).toBe("wikidata");
34
+ expect(result?.content).toContain("human");
35
+ expect(result?.content).toContain("Q5");
36
+ expect(result?.contentType).toBe("text/markdown");
37
+ expect(result?.fetchedAt).toBeTruthy();
38
+ expect(result?.truncated).toBeDefined();
39
+ });
40
+ });
41
+
42
+ describe.skipIf(SKIP)("handleOpenLibrary", () => {
43
+ it("returns null for non-matching URLs", async () => {
44
+ const result = await handleOpenLibrary("https://example.com", 20);
45
+ expect(result).toBeNull();
46
+ });
47
+
48
+ it("returns null for non-openlibrary URLs", async () => {
49
+ const result = await handleOpenLibrary("https://amazon.com/books/123", 20);
50
+ expect(result).toBeNull();
51
+ });
52
+
53
+ it("fetches by ISBN - Fantastic Mr Fox", async () => {
54
+ const result = await handleOpenLibrary("https://openlibrary.org/isbn/9780140328721", 20);
55
+ expect(result).not.toBeNull();
56
+ expect(result?.method).toBe("openlibrary");
57
+ expect(result?.content).toContain("Fantastic Mr");
58
+ expect(result?.content).toContain("Roald Dahl");
59
+ expect(result?.contentType).toBe("text/markdown");
60
+ expect(result?.fetchedAt).toBeTruthy();
61
+ expect(result?.truncated).toBeDefined();
62
+ });
63
+
64
+ it("fetches work OL45804W - The Lord of the Rings", async () => {
65
+ const result = await handleOpenLibrary("https://openlibrary.org/works/OL45804W", 20);
66
+ expect(result).not.toBeNull();
67
+ expect(result?.method).toBe("openlibrary");
68
+ expect(result?.content).toContain("Lord of the Rings");
69
+ expect(result?.content).toContain("Tolkien");
70
+ expect(result?.contentType).toBe("text/markdown");
71
+ expect(result?.fetchedAt).toBeTruthy();
72
+ expect(result?.truncated).toBeDefined();
73
+ });
74
+ });
75
+
76
+ describe.skipIf(SKIP)("handleBiorxiv", () => {
77
+ it("returns null for non-matching URLs", async () => {
78
+ const result = await handleBiorxiv("https://example.com", 20);
79
+ expect(result).toBeNull();
80
+ });
81
+
82
+ it("returns null for non-biorxiv URLs", async () => {
83
+ const result = await handleBiorxiv("https://nature.com/articles/123", 20);
84
+ expect(result).toBeNull();
85
+ });
86
+
87
+ // Using the AlphaFold Protein Structure Database paper - highly cited and stable
88
+ it("fetches bioRxiv preprint - AlphaFold database", async () => {
89
+ const result = await handleBiorxiv("https://www.biorxiv.org/content/10.1101/2021.10.04.463034", 20);
90
+ expect(result).not.toBeNull();
91
+ expect(result?.method).toBe("biorxiv");
92
+ expect(result?.content).toContain("AlphaFold");
93
+ expect(result?.content).toContain("Abstract");
94
+ expect(result?.contentType).toBe("text/markdown");
95
+ expect(result?.fetchedAt).toBeTruthy();
96
+ expect(result?.truncated).toBeDefined();
97
+ });
98
+
99
+ // Testing with version suffix handling
100
+ it("fetches bioRxiv preprint with version suffix", async () => {
101
+ const result = await handleBiorxiv("https://www.biorxiv.org/content/10.1101/2021.10.04.463034v1", 20);
102
+ expect(result).not.toBeNull();
103
+ expect(result?.method).toBe("biorxiv");
104
+ expect(result?.content).toContain("AlphaFold");
105
+ expect(result?.contentType).toBe("text/markdown");
106
+ });
107
+ });