@oh-my-pi/pi-coding-agent 3.30.0 → 3.31.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +71 -0
- package/package.json +5 -5
- package/src/cli/args.ts +4 -0
- package/src/core/agent-session.ts +29 -2
- package/src/core/bash-executor.ts +2 -1
- package/src/core/custom-commands/bundled/review/index.ts +369 -14
- package/src/core/custom-commands/bundled/wt/index.ts +1 -1
- package/src/core/session-manager.ts +158 -246
- package/src/core/session-storage.ts +379 -0
- package/src/core/settings-manager.ts +155 -4
- package/src/core/system-prompt.ts +62 -64
- package/src/core/tools/ask.ts +5 -4
- package/src/core/tools/bash-interceptor.ts +26 -61
- package/src/core/tools/bash.ts +13 -8
- package/src/core/tools/edit-diff.ts +11 -4
- package/src/core/tools/edit.ts +7 -13
- package/src/core/tools/find.ts +111 -50
- package/src/core/tools/gemini-image.ts +128 -147
- package/src/core/tools/grep.ts +397 -415
- package/src/core/tools/index.test.ts +5 -1
- package/src/core/tools/index.ts +6 -8
- package/src/core/tools/ls.ts +12 -10
- package/src/core/tools/lsp/client.ts +58 -9
- package/src/core/tools/lsp/config.ts +205 -656
- package/src/core/tools/lsp/defaults.json +465 -0
- package/src/core/tools/lsp/index.ts +55 -32
- package/src/core/tools/lsp/rust-analyzer.ts +49 -10
- package/src/core/tools/lsp/types.ts +1 -0
- package/src/core/tools/lsp/utils.ts +1 -1
- package/src/core/tools/read.ts +150 -74
- package/src/core/tools/render-utils.ts +70 -10
- package/src/core/tools/review.ts +38 -126
- package/src/core/tools/task/artifacts.ts +5 -4
- package/src/core/tools/task/executor.ts +94 -83
- package/src/core/tools/task/index.ts +129 -92
- package/src/core/tools/task/parallel.ts +30 -3
- package/src/core/tools/task/render.ts +85 -39
- package/src/core/tools/task/types.ts +15 -6
- package/src/core/tools/task/worker.ts +124 -89
- package/src/core/tools/web-fetch.ts +112 -377
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/artifacthub.ts +6 -1
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/arxiv.ts +8 -4
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/aur.ts +6 -2
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/biorxiv.ts +6 -1
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/bluesky.ts +10 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/brew.ts +6 -2
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/cheatsh.ts +6 -1
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/chocolatey.ts +6 -1
- package/src/core/tools/web-scrapers/choosealicense.ts +110 -0
- package/src/core/tools/web-scrapers/cisa-kev.ts +100 -0
- package/src/core/tools/web-scrapers/clojars.ts +180 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/coingecko.ts +6 -1
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/crates-io.ts +7 -2
- package/src/core/tools/web-scrapers/crossref.ts +149 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/devto.ts +8 -4
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/discogs.ts +6 -1
- package/src/core/tools/web-scrapers/discourse.ts +221 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/dockerhub.ts +7 -3
- package/src/core/tools/web-scrapers/fdroid.ts +158 -0
- package/src/core/tools/web-scrapers/firefox-addons.ts +214 -0
- package/src/core/tools/web-scrapers/flathub.ts +239 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/github-gist.ts +6 -2
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/github.ts +63 -32
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/gitlab.ts +31 -19
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/go-pkg.ts +8 -4
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/hackage.ts +6 -1
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/hackernews.ts +18 -18
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/hex.ts +3 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/huggingface.ts +10 -10
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/iacr.ts +8 -4
- package/src/core/tools/web-scrapers/index.ts +250 -0
- package/src/core/tools/web-scrapers/jetbrains-marketplace.ts +169 -0
- package/src/core/tools/web-scrapers/lemmy.ts +220 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/lobsters.ts +3 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/mastodon.ts +11 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/maven.ts +6 -1
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/mdn.ts +2 -2
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/metacpan.ts +13 -7
- package/src/core/tools/web-scrapers/musicbrainz.ts +273 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/npm.ts +12 -5
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/nuget.ts +9 -5
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/nvd.ts +6 -1
- package/src/core/tools/web-scrapers/ollama.ts +267 -0
- package/src/core/tools/web-scrapers/open-vsx.ts +119 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/opencorporates.ts +2 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/openlibrary.ts +18 -12
- package/src/core/tools/web-scrapers/orcid.ts +299 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/osv.ts +6 -1
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/packagist.ts +6 -2
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/pub-dev.ts +3 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/pubmed.ts +8 -4
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/pypi.ts +7 -3
- package/src/core/tools/web-scrapers/rawg.ts +124 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/readthedocs.ts +7 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/reddit.ts +6 -2
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/repology.ts +6 -1
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/rfc.ts +7 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/rubygems.ts +6 -1
- package/src/core/tools/web-scrapers/searchcode.ts +217 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/sec-edgar.ts +6 -1
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/semantic-scholar.ts +2 -2
- package/src/core/tools/web-scrapers/snapcraft.ts +200 -0
- package/src/core/tools/web-scrapers/sourcegraph.ts +373 -0
- package/src/core/tools/web-scrapers/spdx.ts +121 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/spotify.ts +3 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/stackoverflow.ts +3 -2
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/terraform.ts +11 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/tldr.ts +6 -2
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/twitter.ts +15 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/types.ts +98 -27
- package/src/core/tools/web-scrapers/utils.ts +162 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/vimeo.ts +3 -3
- package/src/core/tools/web-scrapers/vscode-marketplace.ts +195 -0
- package/src/core/tools/web-scrapers/w3c.ts +163 -0
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/wikidata.ts +13 -5
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/wikipedia.ts +7 -3
- package/src/core/tools/{web-fetch-handlers → web-scrapers}/youtube.ts +72 -20
- package/src/core/tools/write.ts +21 -18
- package/src/core/voice.ts +3 -2
- package/src/lib/worktree/collapse.ts +2 -1
- package/src/lib/worktree/git.ts +2 -18
- package/src/main.ts +59 -3
- package/src/modes/interactive/components/extensions/extension-dashboard.ts +33 -19
- package/src/modes/interactive/components/extensions/extension-list.ts +15 -8
- package/src/modes/interactive/components/hook-editor.ts +2 -1
- package/src/modes/interactive/components/model-selector.ts +19 -4
- package/src/modes/interactive/interactive-mode.ts +41 -38
- package/src/modes/interactive/theme/theme.ts +58 -58
- package/src/modes/rpc/rpc-mode.ts +10 -9
- package/src/prompts/review-request.md +27 -0
- package/src/prompts/reviewer.md +64 -68
- package/src/prompts/tools/output.md +22 -3
- package/src/prompts/tools/task.md +32 -33
- package/src/utils/clipboard.ts +2 -1
- package/examples/extensions/subagent/agents/reviewer.md +0 -35
- package/src/core/tools/web-fetch-handlers/index.ts +0 -69
- package/src/core/tools/web-fetch-handlers/utils.ts +0 -91
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/academic.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/business.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/dev-platforms.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/documentation.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/finance-media.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/git-hosting.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/media.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/package-managers-2.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/package-managers.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/package-registries.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/research.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/security.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/social-extended.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/social.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/stackexchange.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/standards.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/wikipedia.test.ts +0 -0
- /package/src/core/tools/{web-fetch-handlers → web-scrapers}/youtube.test.ts +0 -0
|
@@ -0,0 +1,373 @@
|
|
|
1
|
+
import type { RenderResult, SpecialHandler } from "./types";
|
|
2
|
+
import { finalizeOutput, loadPage } from "./types";
|
|
3
|
+
|
|
4
|
+
const GRAPHQL_ENDPOINT = "https://sourcegraph.com/.api/graphql";
|
|
5
|
+
const GRAPHQL_HEADERS = {
|
|
6
|
+
Accept: "application/json",
|
|
7
|
+
"Content-Type": "application/json",
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
type SourcegraphTarget =
|
|
11
|
+
| { type: "search"; query: string }
|
|
12
|
+
| { type: "repo"; repoName: string; rev?: string }
|
|
13
|
+
| { type: "file"; repoName: string; rev?: string; filePath: string };
|
|
14
|
+
|
|
15
|
+
interface SourcegraphRepository {
|
|
16
|
+
name: string;
|
|
17
|
+
url: string;
|
|
18
|
+
description?: string | null;
|
|
19
|
+
defaultBranch?: { name: string } | null;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
interface RepoQueryData {
|
|
23
|
+
repository?: SourcegraphRepository | null;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
interface RepoFileQueryData {
|
|
27
|
+
repository?:
|
|
28
|
+
| (SourcegraphRepository & {
|
|
29
|
+
commit?: {
|
|
30
|
+
blob?: { content?: string | null } | null;
|
|
31
|
+
} | null;
|
|
32
|
+
})
|
|
33
|
+
| null;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
interface SearchQueryData {
|
|
37
|
+
search?: {
|
|
38
|
+
results?: {
|
|
39
|
+
results?: SearchResultItem[] | null;
|
|
40
|
+
matchCount?: number | null;
|
|
41
|
+
limitHit?: boolean | null;
|
|
42
|
+
} | null;
|
|
43
|
+
} | null;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
interface FileMatchResult {
|
|
47
|
+
__typename: "FileMatch";
|
|
48
|
+
repository?: { name?: string | null; url?: string | null } | null;
|
|
49
|
+
file?: { path?: string | null; url?: string | null } | null;
|
|
50
|
+
lineMatches?: Array<{ preview?: string | null; lineNumber?: number | null }> | null;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
interface RepositoryResult {
|
|
54
|
+
__typename: "Repository";
|
|
55
|
+
name?: string | null;
|
|
56
|
+
url?: string | null;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
type SearchResultItem = FileMatchResult | RepositoryResult | { __typename: string };
|
|
60
|
+
|
|
61
|
+
const REPO_QUERY = `query Repo($name: String!) {
|
|
62
|
+
repository(name: $name) {
|
|
63
|
+
name
|
|
64
|
+
url
|
|
65
|
+
description
|
|
66
|
+
defaultBranch {
|
|
67
|
+
name
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}`;
|
|
71
|
+
|
|
72
|
+
const REPO_FILE_QUERY = `query RepoFile($name: String!, $path: String!, $rev: String!) {
|
|
73
|
+
repository(name: $name) {
|
|
74
|
+
name
|
|
75
|
+
url
|
|
76
|
+
description
|
|
77
|
+
defaultBranch {
|
|
78
|
+
name
|
|
79
|
+
}
|
|
80
|
+
commit(rev: $rev) {
|
|
81
|
+
blob(path: $path) {
|
|
82
|
+
content
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}`;
|
|
87
|
+
|
|
88
|
+
const SEARCH_QUERY = `query Search($query: String!) {
|
|
89
|
+
search(query: $query, version: V2) {
|
|
90
|
+
results {
|
|
91
|
+
results {
|
|
92
|
+
__typename
|
|
93
|
+
... on FileMatch {
|
|
94
|
+
repository {
|
|
95
|
+
name
|
|
96
|
+
url
|
|
97
|
+
}
|
|
98
|
+
file {
|
|
99
|
+
path
|
|
100
|
+
url
|
|
101
|
+
}
|
|
102
|
+
lineMatches {
|
|
103
|
+
preview
|
|
104
|
+
lineNumber
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
... on Repository {
|
|
108
|
+
name
|
|
109
|
+
url
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
matchCount
|
|
113
|
+
limitHit
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}`;
|
|
117
|
+
|
|
118
|
+
function parseSourcegraphUrl(url: string): SourcegraphTarget | null {
|
|
119
|
+
try {
|
|
120
|
+
const parsed = new URL(url);
|
|
121
|
+
if (parsed.hostname !== "sourcegraph.com" && parsed.hostname !== "www.sourcegraph.com") return null;
|
|
122
|
+
|
|
123
|
+
if (parsed.pathname.startsWith("/search")) {
|
|
124
|
+
const query = parsed.searchParams.get("q")?.trim();
|
|
125
|
+
if (!query) return null;
|
|
126
|
+
return { type: "search", query };
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const parts = parsed.pathname
|
|
130
|
+
.split("/")
|
|
131
|
+
.filter(Boolean)
|
|
132
|
+
.map((part) => decodeURIComponent(part));
|
|
133
|
+
if (parts.length < 3) return null;
|
|
134
|
+
|
|
135
|
+
const hyphenIndex = parts.indexOf("-");
|
|
136
|
+
const repoParts = hyphenIndex === -1 ? parts : parts.slice(0, hyphenIndex);
|
|
137
|
+
if (repoParts.length < 3) return null;
|
|
138
|
+
|
|
139
|
+
const lastRepoPart = repoParts[repoParts.length - 1];
|
|
140
|
+
const atIndex = lastRepoPart.indexOf("@");
|
|
141
|
+
let rev: string | undefined;
|
|
142
|
+
let repoTail = lastRepoPart;
|
|
143
|
+
if (atIndex > 0) {
|
|
144
|
+
repoTail = lastRepoPart.slice(0, atIndex);
|
|
145
|
+
rev = lastRepoPart.slice(atIndex + 1) || undefined;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
repoParts[repoParts.length - 1] = repoTail;
|
|
149
|
+
const repoName = repoParts.join("/");
|
|
150
|
+
|
|
151
|
+
if (hyphenIndex !== -1 && parts[hyphenIndex + 1] === "blob") {
|
|
152
|
+
const filePath = parts.slice(hyphenIndex + 2).join("/");
|
|
153
|
+
if (!filePath) return null;
|
|
154
|
+
return { type: "file", repoName, rev, filePath };
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
return { type: "repo", repoName, rev };
|
|
158
|
+
} catch {
|
|
159
|
+
return null;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
function safeParseJson<T>(content: string): T | null {
|
|
164
|
+
try {
|
|
165
|
+
return JSON.parse(content) as T;
|
|
166
|
+
} catch {
|
|
167
|
+
return null;
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
async function fetchGraphql<T>(
|
|
172
|
+
query: string,
|
|
173
|
+
variables: Record<string, unknown>,
|
|
174
|
+
timeout: number,
|
|
175
|
+
signal?: AbortSignal,
|
|
176
|
+
): Promise<T | null> {
|
|
177
|
+
const body = JSON.stringify({ query, variables });
|
|
178
|
+
const result = await loadPage(GRAPHQL_ENDPOINT, {
|
|
179
|
+
timeout,
|
|
180
|
+
headers: GRAPHQL_HEADERS,
|
|
181
|
+
method: "POST",
|
|
182
|
+
body,
|
|
183
|
+
signal,
|
|
184
|
+
});
|
|
185
|
+
if (!result.ok) return null;
|
|
186
|
+
|
|
187
|
+
const parsed = safeParseJson<{ data?: T; errors?: unknown }>(result.content);
|
|
188
|
+
if (!parsed?.data) return null;
|
|
189
|
+
if (Array.isArray(parsed.errors) && parsed.errors.length > 0) return null;
|
|
190
|
+
return parsed.data;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
function isFileMatchResult(result: SearchResultItem): result is FileMatchResult {
|
|
194
|
+
return result.__typename === "FileMatch";
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
function isRepositoryResult(result: SearchResultItem): result is RepositoryResult {
|
|
198
|
+
return result.__typename === "Repository";
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
function formatRepoMarkdown(repo: SourcegraphRepository): string {
|
|
202
|
+
let md = `# ${repo.name}\n\n`;
|
|
203
|
+
if (repo.description) md += `${repo.description}\n\n`;
|
|
204
|
+
md += `**URL:** ${repo.url}\n`;
|
|
205
|
+
if (repo.defaultBranch?.name) md += `**Default branch:** ${repo.defaultBranch.name}\n`;
|
|
206
|
+
return md;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
async function renderRepo(
|
|
210
|
+
repoName: string,
|
|
211
|
+
timeout: number,
|
|
212
|
+
signal?: AbortSignal,
|
|
213
|
+
): Promise<{ content: string; ok: boolean }> {
|
|
214
|
+
const data = await fetchGraphql<RepoQueryData>(REPO_QUERY, { name: repoName }, timeout, signal);
|
|
215
|
+
if (!data?.repository) return { content: "", ok: false };
|
|
216
|
+
|
|
217
|
+
return { content: formatRepoMarkdown(data.repository), ok: true };
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
async function renderFile(
|
|
221
|
+
repoName: string,
|
|
222
|
+
filePath: string,
|
|
223
|
+
rev: string,
|
|
224
|
+
timeout: number,
|
|
225
|
+
signal?: AbortSignal,
|
|
226
|
+
): Promise<{ content: string; ok: boolean }> {
|
|
227
|
+
const data = await fetchGraphql<RepoFileQueryData>(
|
|
228
|
+
REPO_FILE_QUERY,
|
|
229
|
+
{ name: repoName, path: filePath, rev },
|
|
230
|
+
timeout,
|
|
231
|
+
signal,
|
|
232
|
+
);
|
|
233
|
+
const repo = data?.repository;
|
|
234
|
+
const content = repo?.commit?.blob?.content ?? null;
|
|
235
|
+
if (!repo || content === null) return { content: "", ok: false };
|
|
236
|
+
|
|
237
|
+
let md = `${formatRepoMarkdown(repo)}\n`;
|
|
238
|
+
md += `**Path:** ${filePath}\n`;
|
|
239
|
+
md += `**Revision:** ${rev}\n\n`;
|
|
240
|
+
md += `---\n\n## File\n\n`;
|
|
241
|
+
md += "```text\n";
|
|
242
|
+
md += `${content}\n`;
|
|
243
|
+
md += "```\n";
|
|
244
|
+
return { content: md, ok: true };
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
async function renderSearch(
|
|
248
|
+
query: string,
|
|
249
|
+
timeout: number,
|
|
250
|
+
signal?: AbortSignal,
|
|
251
|
+
): Promise<{ content: string; ok: boolean }> {
|
|
252
|
+
const data = await fetchGraphql<SearchQueryData>(SEARCH_QUERY, { query }, timeout, signal);
|
|
253
|
+
const resultsData = data?.search?.results;
|
|
254
|
+
if (!resultsData) return { content: "", ok: false };
|
|
255
|
+
const results = resultsData.results ?? [];
|
|
256
|
+
|
|
257
|
+
let md = "# Sourcegraph Search\n\n";
|
|
258
|
+
md += `**Query:** \`${query}\`\n`;
|
|
259
|
+
if (typeof resultsData?.matchCount === "number") {
|
|
260
|
+
md += `**Matches:** ${resultsData.matchCount}\n`;
|
|
261
|
+
}
|
|
262
|
+
if (typeof resultsData?.limitHit === "boolean") {
|
|
263
|
+
md += `**Limit hit:** ${resultsData.limitHit ? "yes" : "no"}\n`;
|
|
264
|
+
}
|
|
265
|
+
md += "\n";
|
|
266
|
+
|
|
267
|
+
if (!results || results.length === 0) {
|
|
268
|
+
md += "_No results._\n";
|
|
269
|
+
return { content: md, ok: true };
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
const maxResults = 10;
|
|
273
|
+
md += "## Results\n\n";
|
|
274
|
+
for (const result of results.slice(0, maxResults)) {
|
|
275
|
+
if (isFileMatchResult(result)) {
|
|
276
|
+
const repoName = result.repository?.name ?? "unknown";
|
|
277
|
+
const filePath = result.file?.path ?? "unknown";
|
|
278
|
+
md += `### ${repoName}/${filePath}\n\n`;
|
|
279
|
+
if (result.repository?.url) md += `**Repository:** ${result.repository.url}\n`;
|
|
280
|
+
if (result.file?.url) md += `**File:** ${result.file.url}\n`;
|
|
281
|
+
|
|
282
|
+
const lineMatches = result.lineMatches ?? [];
|
|
283
|
+
if (lineMatches.length > 0) {
|
|
284
|
+
md += "\n```text\n";
|
|
285
|
+
for (const line of lineMatches.slice(0, 5)) {
|
|
286
|
+
const preview = (line.preview ?? "").replace(/\n/g, " ").trim();
|
|
287
|
+
const lineNumber = line.lineNumber ?? 0;
|
|
288
|
+
md += `L${lineNumber}: ${preview}\n`;
|
|
289
|
+
}
|
|
290
|
+
md += "```\n\n";
|
|
291
|
+
}
|
|
292
|
+
continue;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
if (isRepositoryResult(result)) {
|
|
296
|
+
const name = result.name ?? "unknown";
|
|
297
|
+
md += `### ${name}\n\n`;
|
|
298
|
+
if (result.url) md += `**Repository:** ${result.url}\n`;
|
|
299
|
+
md += "\n";
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
if (results.length > maxResults) {
|
|
304
|
+
md += `... and ${results.length - maxResults} more results\n`;
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
return { content: md, ok: true };
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
export const handleSourcegraph: SpecialHandler = async (
|
|
311
|
+
url: string,
|
|
312
|
+
timeout: number,
|
|
313
|
+
signal?: AbortSignal,
|
|
314
|
+
): Promise<RenderResult | null> => {
|
|
315
|
+
try {
|
|
316
|
+
const target = parseSourcegraphUrl(url);
|
|
317
|
+
if (!target) return null;
|
|
318
|
+
|
|
319
|
+
const fetchedAt = new Date().toISOString();
|
|
320
|
+
const notes = ["Fetched via Sourcegraph GraphQL API"];
|
|
321
|
+
|
|
322
|
+
switch (target.type) {
|
|
323
|
+
case "search": {
|
|
324
|
+
const result = await renderSearch(target.query, timeout, signal);
|
|
325
|
+
if (!result.ok) return null;
|
|
326
|
+
const output = finalizeOutput(result.content);
|
|
327
|
+
return {
|
|
328
|
+
url,
|
|
329
|
+
finalUrl: url,
|
|
330
|
+
contentType: "text/markdown",
|
|
331
|
+
method: "sourcegraph-search",
|
|
332
|
+
content: output.content,
|
|
333
|
+
fetchedAt,
|
|
334
|
+
truncated: output.truncated,
|
|
335
|
+
notes,
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
case "file": {
|
|
339
|
+
const rev = target.rev ?? "HEAD";
|
|
340
|
+
const result = await renderFile(target.repoName, target.filePath, rev, timeout, signal);
|
|
341
|
+
if (!result.ok) return null;
|
|
342
|
+
const output = finalizeOutput(result.content);
|
|
343
|
+
return {
|
|
344
|
+
url,
|
|
345
|
+
finalUrl: url,
|
|
346
|
+
contentType: "text/markdown",
|
|
347
|
+
method: "sourcegraph-file",
|
|
348
|
+
content: output.content,
|
|
349
|
+
fetchedAt,
|
|
350
|
+
truncated: output.truncated,
|
|
351
|
+
notes,
|
|
352
|
+
};
|
|
353
|
+
}
|
|
354
|
+
case "repo": {
|
|
355
|
+
const result = await renderRepo(target.repoName, timeout, signal);
|
|
356
|
+
if (!result.ok) return null;
|
|
357
|
+
const output = finalizeOutput(result.content);
|
|
358
|
+
return {
|
|
359
|
+
url,
|
|
360
|
+
finalUrl: url,
|
|
361
|
+
contentType: "text/markdown",
|
|
362
|
+
method: "sourcegraph-repo",
|
|
363
|
+
content: output.content,
|
|
364
|
+
fetchedAt,
|
|
365
|
+
truncated: output.truncated,
|
|
366
|
+
notes,
|
|
367
|
+
};
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
} catch {}
|
|
371
|
+
|
|
372
|
+
return null;
|
|
373
|
+
};
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import type { RenderResult, SpecialHandler } from "./types";
|
|
2
|
+
import { finalizeOutput, htmlToBasicMarkdown, loadPage } from "./types";
|
|
3
|
+
|
|
4
|
+
interface SpdxCrossRef {
|
|
5
|
+
url?: string;
|
|
6
|
+
isValid?: boolean;
|
|
7
|
+
isLive?: boolean;
|
|
8
|
+
match?: string;
|
|
9
|
+
order?: number;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
interface SpdxLicense {
|
|
13
|
+
licenseId: string;
|
|
14
|
+
name: string;
|
|
15
|
+
isOsiApproved?: boolean;
|
|
16
|
+
isFsfLibre?: boolean;
|
|
17
|
+
licenseText?: string;
|
|
18
|
+
licenseTextHtml?: string;
|
|
19
|
+
seeAlso?: string[];
|
|
20
|
+
crossRef?: SpdxCrossRef[];
|
|
21
|
+
comment?: string;
|
|
22
|
+
licenseComments?: string;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function formatYesNo(value?: boolean): string {
|
|
26
|
+
if (value === true) return "Yes";
|
|
27
|
+
if (value === false) return "No";
|
|
28
|
+
return "Unknown";
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function collectCrossReferences(license: SpdxLicense): string[] {
|
|
32
|
+
const ordered = (license.crossRef ?? [])
|
|
33
|
+
.filter((ref) => ref.url)
|
|
34
|
+
.sort((a, b) => (a.order ?? 0) - (b.order ?? 0))
|
|
35
|
+
.map((ref) => ref.url as string);
|
|
36
|
+
|
|
37
|
+
const seeAlso = (license.seeAlso ?? []).filter((url) => url);
|
|
38
|
+
const combined = [...ordered, ...seeAlso];
|
|
39
|
+
return combined.filter((url, index) => combined.indexOf(url) === index);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Handle SPDX license URLs via SPDX JSON API
|
|
44
|
+
*/
|
|
45
|
+
export const handleSpdx: SpecialHandler = async (
|
|
46
|
+
url: string,
|
|
47
|
+
timeout: number,
|
|
48
|
+
signal?: AbortSignal,
|
|
49
|
+
): Promise<RenderResult | null> => {
|
|
50
|
+
try {
|
|
51
|
+
const parsed = new URL(url);
|
|
52
|
+
if (parsed.hostname !== "spdx.org" && parsed.hostname !== "www.spdx.org") return null;
|
|
53
|
+
|
|
54
|
+
const match = parsed.pathname.match(/^\/licenses\/([^/]+?)(?:\.html)?\/?$/i);
|
|
55
|
+
if (!match) return null;
|
|
56
|
+
|
|
57
|
+
const licenseId = decodeURIComponent(match[1]);
|
|
58
|
+
if (!licenseId) return null;
|
|
59
|
+
|
|
60
|
+
const fetchedAt = new Date().toISOString();
|
|
61
|
+
const apiUrl = `https://spdx.org/licenses/${encodeURIComponent(licenseId)}.json`;
|
|
62
|
+
const result = await loadPage(apiUrl, {
|
|
63
|
+
timeout,
|
|
64
|
+
headers: { Accept: "application/json" },
|
|
65
|
+
signal,
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
if (!result.ok) return null;
|
|
69
|
+
|
|
70
|
+
let license: SpdxLicense;
|
|
71
|
+
try {
|
|
72
|
+
license = JSON.parse(result.content);
|
|
73
|
+
} catch {
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const title = license.name || license.licenseId || licenseId;
|
|
78
|
+
let md = `# ${title}\n\n`;
|
|
79
|
+
|
|
80
|
+
md += `**License ID:** ${license.licenseId ? `\`${license.licenseId}\`` : `\`${licenseId}\``}\n`;
|
|
81
|
+
md += `**OSI Approved:** ${formatYesNo(license.isOsiApproved)}\n`;
|
|
82
|
+
md += `**FSF Libre:** ${formatYesNo(license.isFsfLibre)}\n`;
|
|
83
|
+
|
|
84
|
+
const description = license.licenseComments ?? license.comment;
|
|
85
|
+
if (description) {
|
|
86
|
+
md += `\n## Description\n\n${description}\n`;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
const crossReferences = collectCrossReferences(license);
|
|
90
|
+
if (crossReferences.length) {
|
|
91
|
+
md += `\n## Cross References\n\n`;
|
|
92
|
+
for (const ref of crossReferences) {
|
|
93
|
+
md += `- ${ref}\n`;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const licenseText = license.licenseText
|
|
98
|
+
? license.licenseText
|
|
99
|
+
: license.licenseTextHtml
|
|
100
|
+
? htmlToBasicMarkdown(license.licenseTextHtml)
|
|
101
|
+
: null;
|
|
102
|
+
|
|
103
|
+
if (licenseText) {
|
|
104
|
+
md += `\n## License Text\n\n\`\`\`\n${licenseText}\n\`\`\`\n`;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
const output = finalizeOutput(md);
|
|
108
|
+
return {
|
|
109
|
+
url,
|
|
110
|
+
finalUrl: url,
|
|
111
|
+
contentType: "text/markdown",
|
|
112
|
+
method: "spdx-api",
|
|
113
|
+
content: output.content,
|
|
114
|
+
fetchedAt,
|
|
115
|
+
truncated: output.truncated,
|
|
116
|
+
notes: ["Fetched via SPDX license API"],
|
|
117
|
+
};
|
|
118
|
+
} catch {}
|
|
119
|
+
|
|
120
|
+
return null;
|
|
121
|
+
};
|
|
@@ -157,7 +157,7 @@ function formatOutput(contentType: string, oEmbed: SpotifyOEmbedResponse, og: Op
|
|
|
157
157
|
return sections.join("\n");
|
|
158
158
|
}
|
|
159
159
|
|
|
160
|
-
export const handleSpotify: SpecialHandler = async (url: string, timeout: number) => {
|
|
160
|
+
export const handleSpotify: SpecialHandler = async (url: string, timeout: number, signal?: AbortSignal) => {
|
|
161
161
|
// Check if this is a Spotify URL
|
|
162
162
|
if (!url.includes("open.spotify.com/")) {
|
|
163
163
|
return null;
|
|
@@ -175,7 +175,7 @@ export const handleSpotify: SpecialHandler = async (url: string, timeout: number
|
|
|
175
175
|
// Fetch oEmbed data
|
|
176
176
|
try {
|
|
177
177
|
const oEmbedUrl = `https://open.spotify.com/oembed?url=${encodeURIComponent(url)}`;
|
|
178
|
-
const response = await loadPage(oEmbedUrl, { timeout });
|
|
178
|
+
const response = await loadPage(oEmbedUrl, { timeout, signal });
|
|
179
179
|
|
|
180
180
|
if (response.ok) {
|
|
181
181
|
oEmbedData = JSON.parse(response.content) as SpotifyOEmbedResponse;
|
|
@@ -189,7 +189,7 @@ export const handleSpotify: SpecialHandler = async (url: string, timeout: number
|
|
|
189
189
|
|
|
190
190
|
// Fetch page HTML for Open Graph metadata
|
|
191
191
|
try {
|
|
192
|
-
const pageResponse = await loadPage(url, { timeout });
|
|
192
|
+
const pageResponse = await loadPage(url, { timeout, signal });
|
|
193
193
|
|
|
194
194
|
if (pageResponse.ok) {
|
|
195
195
|
ogData = parseOpenGraph(pageResponse.content);
|
|
@@ -59,6 +59,7 @@ function getSiteParam(hostname: string): string | null {
|
|
|
59
59
|
export const handleStackOverflow: SpecialHandler = async (
|
|
60
60
|
url: string,
|
|
61
61
|
timeout: number,
|
|
62
|
+
signal?: AbortSignal,
|
|
62
63
|
): Promise<RenderResult | null> => {
|
|
63
64
|
try {
|
|
64
65
|
const parsed = new URL(url);
|
|
@@ -74,7 +75,7 @@ export const handleStackOverflow: SpecialHandler = async (
|
|
|
74
75
|
|
|
75
76
|
// Fetch question with answers
|
|
76
77
|
const apiUrl = `https://api.stackexchange.com/2.3/questions/${questionId}?order=desc&sort=votes&site=${site}&filter=withbody`;
|
|
77
|
-
const qResult = await loadPage(apiUrl, { timeout });
|
|
78
|
+
const qResult = await loadPage(apiUrl, { timeout, signal });
|
|
78
79
|
|
|
79
80
|
if (!qResult.ok) return null;
|
|
80
81
|
|
|
@@ -92,7 +93,7 @@ export const handleStackOverflow: SpecialHandler = async (
|
|
|
92
93
|
|
|
93
94
|
// Fetch answers
|
|
94
95
|
const aUrl = `https://api.stackexchange.com/2.3/questions/${questionId}/answers?order=desc&sort=votes&site=${site}&filter=withbody`;
|
|
95
|
-
const aResult = await loadPage(aUrl, { timeout });
|
|
96
|
+
const aResult = await loadPage(aUrl, { timeout, signal });
|
|
96
97
|
|
|
97
98
|
if (aResult.ok) {
|
|
98
99
|
const aData = JSON.parse(aResult.content) as { items: SOAnswer[] };
|
|
@@ -64,7 +64,11 @@ interface TerraformProvider {
|
|
|
64
64
|
/**
|
|
65
65
|
* Handle Terraform Registry URLs via API
|
|
66
66
|
*/
|
|
67
|
-
export const handleTerraform: SpecialHandler = async (
|
|
67
|
+
export const handleTerraform: SpecialHandler = async (
|
|
68
|
+
url: string,
|
|
69
|
+
timeout: number,
|
|
70
|
+
signal?: AbortSignal,
|
|
71
|
+
): Promise<RenderResult | null> => {
|
|
68
72
|
try {
|
|
69
73
|
const parsed = new URL(url);
|
|
70
74
|
if (!parsed.hostname.includes("registry.terraform.io")) return null;
|
|
@@ -75,14 +79,14 @@ export const handleTerraform: SpecialHandler = async (url: string, timeout: numb
|
|
|
75
79
|
const moduleMatch = parsed.pathname.match(/^\/modules\/([^/]+)\/([^/]+)\/([^/]+)/);
|
|
76
80
|
if (moduleMatch) {
|
|
77
81
|
const [, namespace, name, provider] = moduleMatch;
|
|
78
|
-
return await handleModuleUrl(url, namespace, name, provider, timeout, fetchedAt);
|
|
82
|
+
return await handleModuleUrl(url, namespace, name, provider, timeout, signal, fetchedAt);
|
|
79
83
|
}
|
|
80
84
|
|
|
81
85
|
// Match provider URL: /providers/{namespace}/{type}
|
|
82
86
|
const providerMatch = parsed.pathname.match(/^\/providers\/([^/]+)\/([^/]+)/);
|
|
83
87
|
if (providerMatch) {
|
|
84
88
|
const [, namespace, type] = providerMatch;
|
|
85
|
-
return await handleProviderUrl(url, namespace, type, timeout, fetchedAt);
|
|
89
|
+
return await handleProviderUrl(url, namespace, type, timeout, signal, fetchedAt);
|
|
86
90
|
}
|
|
87
91
|
|
|
88
92
|
return null;
|
|
@@ -97,11 +101,13 @@ async function handleModuleUrl(
|
|
|
97
101
|
name: string,
|
|
98
102
|
provider: string,
|
|
99
103
|
timeout: number,
|
|
104
|
+
signal: AbortSignal | undefined,
|
|
100
105
|
fetchedAt: string,
|
|
101
106
|
): Promise<RenderResult | null> {
|
|
102
107
|
const apiUrl = `https://registry.terraform.io/v1/modules/${namespace}/${name}/${provider}`;
|
|
103
108
|
const result = await loadPage(apiUrl, {
|
|
104
109
|
timeout,
|
|
110
|
+
signal,
|
|
105
111
|
headers: { Accept: "application/json" },
|
|
106
112
|
});
|
|
107
113
|
|
|
@@ -224,11 +230,13 @@ async function handleProviderUrl(
|
|
|
224
230
|
namespace: string,
|
|
225
231
|
type: string,
|
|
226
232
|
timeout: number,
|
|
233
|
+
signal: AbortSignal | undefined,
|
|
227
234
|
fetchedAt: string,
|
|
228
235
|
): Promise<RenderResult | null> {
|
|
229
236
|
const apiUrl = `https://registry.terraform.io/v1/providers/${namespace}/${type}`;
|
|
230
237
|
const result = await loadPage(apiUrl, {
|
|
231
238
|
timeout,
|
|
239
|
+
signal,
|
|
232
240
|
headers: { Accept: "application/json" },
|
|
233
241
|
});
|
|
234
242
|
|
|
@@ -9,7 +9,11 @@ const PLATFORMS = ["common", "linux", "osx"] as const;
|
|
|
9
9
|
* - https://tldr.sh/{command}
|
|
10
10
|
* - https://tldr.ostera.io/{command}
|
|
11
11
|
*/
|
|
12
|
-
export const handleTldr: SpecialHandler = async (
|
|
12
|
+
export const handleTldr: SpecialHandler = async (
|
|
13
|
+
url: string,
|
|
14
|
+
timeout: number,
|
|
15
|
+
signal?: AbortSignal,
|
|
16
|
+
): Promise<RenderResult | null> => {
|
|
13
17
|
try {
|
|
14
18
|
const parsed = new URL(url);
|
|
15
19
|
if (parsed.hostname !== "tldr.sh" && parsed.hostname !== "tldr.ostera.io") return null;
|
|
@@ -23,7 +27,7 @@ export const handleTldr: SpecialHandler = async (url: string, timeout: number):
|
|
|
23
27
|
// Try platforms in order: common, linux, osx
|
|
24
28
|
for (const platform of PLATFORMS) {
|
|
25
29
|
const rawUrl = `${TLDR_BASE}/${platform}/${command}.md`;
|
|
26
|
-
const result = await loadPage(rawUrl, { timeout });
|
|
30
|
+
const result = await loadPage(rawUrl, { timeout, signal });
|
|
27
31
|
|
|
28
32
|
if (result.ok && result.content.trim()) {
|
|
29
33
|
const output = finalizeOutput(result.content);
|
|
@@ -12,7 +12,11 @@ const NITTER_INSTANCES = [
|
|
|
12
12
|
/**
|
|
13
13
|
* Handle Twitter/X URLs via Nitter
|
|
14
14
|
*/
|
|
15
|
-
export const handleTwitter: SpecialHandler = async (
|
|
15
|
+
export const handleTwitter: SpecialHandler = async (
|
|
16
|
+
url: string,
|
|
17
|
+
timeout: number,
|
|
18
|
+
signal?: AbortSignal,
|
|
19
|
+
): Promise<RenderResult | null> => {
|
|
16
20
|
try {
|
|
17
21
|
const parsed = new URL(url);
|
|
18
22
|
if (!["twitter.com", "x.com", "www.twitter.com", "www.x.com"].includes(parsed.hostname)) {
|
|
@@ -24,7 +28,7 @@ export const handleTwitter: SpecialHandler = async (url: string, timeout: number
|
|
|
24
28
|
// Try Nitter instances
|
|
25
29
|
for (const instance of NITTER_INSTANCES) {
|
|
26
30
|
const nitterUrl = `https://${instance}${parsed.pathname}`;
|
|
27
|
-
const result = await loadPage(nitterUrl, { timeout: Math.min(timeout, 10) });
|
|
31
|
+
const result = await loadPage(nitterUrl, { timeout: Math.min(timeout, 10), signal });
|
|
28
32
|
|
|
29
33
|
if (result.ok && result.content.length > 500) {
|
|
30
34
|
// Parse the Nitter HTML
|
|
@@ -67,7 +71,15 @@ export const handleTwitter: SpecialHandler = async (url: string, timeout: number
|
|
|
67
71
|
}
|
|
68
72
|
}
|
|
69
73
|
}
|
|
70
|
-
} catch {
|
|
74
|
+
} catch {
|
|
75
|
+
if (signal?.aborted) {
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
if (signal?.aborted) {
|
|
81
|
+
return null;
|
|
82
|
+
}
|
|
71
83
|
|
|
72
84
|
// X.com blocks all bots - return a helpful error instead of falling through
|
|
73
85
|
return {
|