@oh-my-pi/pi-coding-agent 1.337.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +1228 -0
- package/README.md +1041 -0
- package/docs/compaction.md +403 -0
- package/docs/custom-tools.md +541 -0
- package/docs/extension-loading.md +1004 -0
- package/docs/hooks.md +867 -0
- package/docs/rpc.md +1040 -0
- package/docs/sdk.md +994 -0
- package/docs/session-tree-plan.md +441 -0
- package/docs/session.md +240 -0
- package/docs/skills.md +290 -0
- package/docs/theme.md +637 -0
- package/docs/tree.md +197 -0
- package/docs/tui.md +341 -0
- package/examples/README.md +21 -0
- package/examples/custom-tools/README.md +124 -0
- package/examples/custom-tools/hello/index.ts +20 -0
- package/examples/custom-tools/question/index.ts +84 -0
- package/examples/custom-tools/subagent/README.md +172 -0
- package/examples/custom-tools/subagent/agents/planner.md +37 -0
- package/examples/custom-tools/subagent/agents/reviewer.md +35 -0
- package/examples/custom-tools/subagent/agents/scout.md +50 -0
- package/examples/custom-tools/subagent/agents/worker.md +24 -0
- package/examples/custom-tools/subagent/agents.ts +156 -0
- package/examples/custom-tools/subagent/commands/implement-and-review.md +10 -0
- package/examples/custom-tools/subagent/commands/implement.md +10 -0
- package/examples/custom-tools/subagent/commands/scout-and-plan.md +9 -0
- package/examples/custom-tools/subagent/index.ts +1002 -0
- package/examples/custom-tools/todo/index.ts +212 -0
- package/examples/hooks/README.md +56 -0
- package/examples/hooks/auto-commit-on-exit.ts +49 -0
- package/examples/hooks/confirm-destructive.ts +59 -0
- package/examples/hooks/custom-compaction.ts +116 -0
- package/examples/hooks/dirty-repo-guard.ts +52 -0
- package/examples/hooks/file-trigger.ts +41 -0
- package/examples/hooks/git-checkpoint.ts +53 -0
- package/examples/hooks/handoff.ts +150 -0
- package/examples/hooks/permission-gate.ts +34 -0
- package/examples/hooks/protected-paths.ts +30 -0
- package/examples/hooks/qna.ts +119 -0
- package/examples/hooks/snake.ts +343 -0
- package/examples/hooks/status-line.ts +40 -0
- package/examples/sdk/01-minimal.ts +22 -0
- package/examples/sdk/02-custom-model.ts +49 -0
- package/examples/sdk/03-custom-prompt.ts +44 -0
- package/examples/sdk/04-skills.ts +44 -0
- package/examples/sdk/05-tools.ts +90 -0
- package/examples/sdk/06-hooks.ts +61 -0
- package/examples/sdk/07-context-files.ts +36 -0
- package/examples/sdk/08-slash-commands.ts +42 -0
- package/examples/sdk/09-api-keys-and-oauth.ts +55 -0
- package/examples/sdk/10-settings.ts +38 -0
- package/examples/sdk/11-sessions.ts +48 -0
- package/examples/sdk/12-full-control.ts +95 -0
- package/examples/sdk/README.md +154 -0
- package/package.json +81 -0
- package/src/cli/args.ts +246 -0
- package/src/cli/file-processor.ts +72 -0
- package/src/cli/list-models.ts +104 -0
- package/src/cli/plugin-cli.ts +650 -0
- package/src/cli/session-picker.ts +41 -0
- package/src/cli.ts +10 -0
- package/src/commands/init.md +20 -0
- package/src/config.ts +159 -0
- package/src/core/agent-session.ts +1900 -0
- package/src/core/auth-storage.ts +236 -0
- package/src/core/bash-executor.ts +196 -0
- package/src/core/compaction/branch-summarization.ts +343 -0
- package/src/core/compaction/compaction.ts +742 -0
- package/src/core/compaction/index.ts +7 -0
- package/src/core/compaction/utils.ts +154 -0
- package/src/core/custom-tools/index.ts +21 -0
- package/src/core/custom-tools/loader.ts +248 -0
- package/src/core/custom-tools/types.ts +169 -0
- package/src/core/custom-tools/wrapper.ts +28 -0
- package/src/core/exec.ts +129 -0
- package/src/core/export-html/index.ts +211 -0
- package/src/core/export-html/template.css +781 -0
- package/src/core/export-html/template.html +54 -0
- package/src/core/export-html/template.js +1185 -0
- package/src/core/export-html/vendor/highlight.min.js +1213 -0
- package/src/core/export-html/vendor/marked.min.js +6 -0
- package/src/core/hooks/index.ts +16 -0
- package/src/core/hooks/loader.ts +312 -0
- package/src/core/hooks/runner.ts +434 -0
- package/src/core/hooks/tool-wrapper.ts +99 -0
- package/src/core/hooks/types.ts +773 -0
- package/src/core/index.ts +52 -0
- package/src/core/mcp/client.ts +158 -0
- package/src/core/mcp/config.ts +154 -0
- package/src/core/mcp/index.ts +45 -0
- package/src/core/mcp/loader.ts +68 -0
- package/src/core/mcp/manager.ts +181 -0
- package/src/core/mcp/tool-bridge.ts +148 -0
- package/src/core/mcp/transports/http.ts +316 -0
- package/src/core/mcp/transports/index.ts +6 -0
- package/src/core/mcp/transports/stdio.ts +252 -0
- package/src/core/mcp/types.ts +220 -0
- package/src/core/messages.ts +189 -0
- package/src/core/model-registry.ts +317 -0
- package/src/core/model-resolver.ts +393 -0
- package/src/core/plugins/doctor.ts +59 -0
- package/src/core/plugins/index.ts +38 -0
- package/src/core/plugins/installer.ts +189 -0
- package/src/core/plugins/loader.ts +338 -0
- package/src/core/plugins/manager.ts +672 -0
- package/src/core/plugins/parser.ts +105 -0
- package/src/core/plugins/paths.ts +32 -0
- package/src/core/plugins/types.ts +190 -0
- package/src/core/sdk.ts +760 -0
- package/src/core/session-manager.ts +1128 -0
- package/src/core/settings-manager.ts +443 -0
- package/src/core/skills.ts +437 -0
- package/src/core/slash-commands.ts +248 -0
- package/src/core/system-prompt.ts +439 -0
- package/src/core/timings.ts +25 -0
- package/src/core/tools/ask.ts +211 -0
- package/src/core/tools/bash-interceptor.ts +120 -0
- package/src/core/tools/bash.ts +250 -0
- package/src/core/tools/context.ts +32 -0
- package/src/core/tools/edit-diff.ts +475 -0
- package/src/core/tools/edit.ts +208 -0
- package/src/core/tools/exa/company.ts +59 -0
- package/src/core/tools/exa/index.ts +64 -0
- package/src/core/tools/exa/linkedin.ts +59 -0
- package/src/core/tools/exa/logger.ts +56 -0
- package/src/core/tools/exa/mcp-client.ts +368 -0
- package/src/core/tools/exa/render.ts +196 -0
- package/src/core/tools/exa/researcher.ts +90 -0
- package/src/core/tools/exa/search.ts +337 -0
- package/src/core/tools/exa/types.ts +168 -0
- package/src/core/tools/exa/websets.ts +248 -0
- package/src/core/tools/find.ts +261 -0
- package/src/core/tools/grep.ts +555 -0
- package/src/core/tools/index.ts +202 -0
- package/src/core/tools/ls.ts +140 -0
- package/src/core/tools/lsp/client.ts +605 -0
- package/src/core/tools/lsp/config.ts +147 -0
- package/src/core/tools/lsp/edits.ts +101 -0
- package/src/core/tools/lsp/index.ts +804 -0
- package/src/core/tools/lsp/render.ts +447 -0
- package/src/core/tools/lsp/rust-analyzer.ts +145 -0
- package/src/core/tools/lsp/types.ts +463 -0
- package/src/core/tools/lsp/utils.ts +486 -0
- package/src/core/tools/notebook.ts +229 -0
- package/src/core/tools/path-utils.ts +61 -0
- package/src/core/tools/read.ts +240 -0
- package/src/core/tools/renderers.ts +540 -0
- package/src/core/tools/task/agents.ts +153 -0
- package/src/core/tools/task/artifacts.ts +114 -0
- package/src/core/tools/task/bundled-agents/browser.md +71 -0
- package/src/core/tools/task/bundled-agents/explore.md +82 -0
- package/src/core/tools/task/bundled-agents/plan.md +54 -0
- package/src/core/tools/task/bundled-agents/reviewer.md +59 -0
- package/src/core/tools/task/bundled-agents/task.md +53 -0
- package/src/core/tools/task/bundled-commands/architect-plan.md +10 -0
- package/src/core/tools/task/bundled-commands/implement-with-critic.md +11 -0
- package/src/core/tools/task/bundled-commands/implement.md +11 -0
- package/src/core/tools/task/commands.ts +213 -0
- package/src/core/tools/task/discovery.ts +208 -0
- package/src/core/tools/task/executor.ts +367 -0
- package/src/core/tools/task/index.ts +388 -0
- package/src/core/tools/task/model-resolver.ts +115 -0
- package/src/core/tools/task/parallel.ts +38 -0
- package/src/core/tools/task/render.ts +232 -0
- package/src/core/tools/task/types.ts +99 -0
- package/src/core/tools/truncate.ts +265 -0
- package/src/core/tools/web-fetch.ts +2370 -0
- package/src/core/tools/web-search/auth.ts +193 -0
- package/src/core/tools/web-search/index.ts +537 -0
- package/src/core/tools/web-search/providers/anthropic.ts +198 -0
- package/src/core/tools/web-search/providers/exa.ts +302 -0
- package/src/core/tools/web-search/providers/perplexity.ts +195 -0
- package/src/core/tools/web-search/render.ts +182 -0
- package/src/core/tools/web-search/types.ts +180 -0
- package/src/core/tools/write.ts +99 -0
- package/src/index.ts +176 -0
- package/src/main.ts +464 -0
- package/src/migrations.ts +135 -0
- package/src/modes/index.ts +43 -0
- package/src/modes/interactive/components/armin.ts +382 -0
- package/src/modes/interactive/components/assistant-message.ts +86 -0
- package/src/modes/interactive/components/bash-execution.ts +196 -0
- package/src/modes/interactive/components/bordered-loader.ts +41 -0
- package/src/modes/interactive/components/branch-summary-message.ts +42 -0
- package/src/modes/interactive/components/compaction-summary-message.ts +45 -0
- package/src/modes/interactive/components/custom-editor.ts +122 -0
- package/src/modes/interactive/components/diff.ts +147 -0
- package/src/modes/interactive/components/dynamic-border.ts +25 -0
- package/src/modes/interactive/components/footer.ts +381 -0
- package/src/modes/interactive/components/hook-editor.ts +117 -0
- package/src/modes/interactive/components/hook-input.ts +64 -0
- package/src/modes/interactive/components/hook-message.ts +96 -0
- package/src/modes/interactive/components/hook-selector.ts +91 -0
- package/src/modes/interactive/components/model-selector.ts +247 -0
- package/src/modes/interactive/components/oauth-selector.ts +120 -0
- package/src/modes/interactive/components/plugin-settings.ts +479 -0
- package/src/modes/interactive/components/queue-mode-selector.ts +56 -0
- package/src/modes/interactive/components/session-selector.ts +204 -0
- package/src/modes/interactive/components/settings-selector.ts +453 -0
- package/src/modes/interactive/components/show-images-selector.ts +45 -0
- package/src/modes/interactive/components/theme-selector.ts +62 -0
- package/src/modes/interactive/components/thinking-selector.ts +64 -0
- package/src/modes/interactive/components/tool-execution.ts +675 -0
- package/src/modes/interactive/components/tree-selector.ts +866 -0
- package/src/modes/interactive/components/user-message-selector.ts +159 -0
- package/src/modes/interactive/components/user-message.ts +18 -0
- package/src/modes/interactive/components/visual-truncate.ts +50 -0
- package/src/modes/interactive/components/welcome.ts +183 -0
- package/src/modes/interactive/interactive-mode.ts +2516 -0
- package/src/modes/interactive/theme/dark.json +101 -0
- package/src/modes/interactive/theme/light.json +98 -0
- package/src/modes/interactive/theme/theme-schema.json +308 -0
- package/src/modes/interactive/theme/theme.ts +998 -0
- package/src/modes/print-mode.ts +128 -0
- package/src/modes/rpc/rpc-client.ts +527 -0
- package/src/modes/rpc/rpc-mode.ts +483 -0
- package/src/modes/rpc/rpc-types.ts +203 -0
- package/src/utils/changelog.ts +99 -0
- package/src/utils/clipboard.ts +265 -0
- package/src/utils/fuzzy.ts +108 -0
- package/src/utils/mime.ts +30 -0
- package/src/utils/shell.ts +276 -0
- package/src/utils/tools-manager.ts +274 -0
|
@@ -0,0 +1,2370 @@
|
|
|
1
|
+
import { spawnSync } from "node:child_process";
|
|
2
|
+
import * as fs from "node:fs";
|
|
3
|
+
import * as os from "node:os";
|
|
4
|
+
import * as path from "node:path";
|
|
5
|
+
import type { AgentTool } from "@oh-my-pi/pi-agent-core";
|
|
6
|
+
import { Type } from "@sinclair/typebox";
|
|
7
|
+
import { parse as parseHtml } from "node-html-parser";
|
|
8
|
+
|
|
9
|
+
// =============================================================================
|
|
10
|
+
// Types and Constants
|
|
11
|
+
// =============================================================================
|
|
12
|
+
|
|
13
|
+
interface RenderResult {
|
|
14
|
+
url: string;
|
|
15
|
+
finalUrl: string;
|
|
16
|
+
contentType: string;
|
|
17
|
+
method: string;
|
|
18
|
+
content: string;
|
|
19
|
+
fetchedAt: string;
|
|
20
|
+
truncated: boolean;
|
|
21
|
+
notes: string[];
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const DEFAULT_TIMEOUT = 20;
|
|
25
|
+
const MAX_BYTES = 50 * 1024 * 1024; // 50MB for binary files
|
|
26
|
+
const MAX_OUTPUT_CHARS = 500_000;
|
|
27
|
+
|
|
28
|
+
// Convertible document types (markitdown supported)
|
|
29
|
+
const CONVERTIBLE_MIMES = new Set([
|
|
30
|
+
"application/pdf",
|
|
31
|
+
"application/msword",
|
|
32
|
+
"application/vnd.ms-powerpoint",
|
|
33
|
+
"application/vnd.ms-excel",
|
|
34
|
+
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
35
|
+
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
|
36
|
+
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
37
|
+
"application/rtf",
|
|
38
|
+
"application/epub+zip",
|
|
39
|
+
"application/zip",
|
|
40
|
+
"image/png",
|
|
41
|
+
"image/jpeg",
|
|
42
|
+
"image/gif",
|
|
43
|
+
"image/webp",
|
|
44
|
+
"audio/mpeg",
|
|
45
|
+
"audio/wav",
|
|
46
|
+
"audio/ogg",
|
|
47
|
+
]);
|
|
48
|
+
|
|
49
|
+
const CONVERTIBLE_EXTENSIONS = new Set([
|
|
50
|
+
".pdf",
|
|
51
|
+
".doc",
|
|
52
|
+
".docx",
|
|
53
|
+
".ppt",
|
|
54
|
+
".pptx",
|
|
55
|
+
".xls",
|
|
56
|
+
".xlsx",
|
|
57
|
+
".rtf",
|
|
58
|
+
".epub",
|
|
59
|
+
".png",
|
|
60
|
+
".jpg",
|
|
61
|
+
".jpeg",
|
|
62
|
+
".gif",
|
|
63
|
+
".webp",
|
|
64
|
+
".mp3",
|
|
65
|
+
".wav",
|
|
66
|
+
".ogg",
|
|
67
|
+
]);
|
|
68
|
+
|
|
69
|
+
const isWindows = process.platform === "win32";
|
|
70
|
+
|
|
71
|
+
const USER_AGENTS = [
|
|
72
|
+
"curl/8.0",
|
|
73
|
+
"Mozilla/5.0 (compatible; TextBot/1.0)",
|
|
74
|
+
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
|
|
75
|
+
];
|
|
76
|
+
|
|
77
|
+
// =============================================================================
|
|
78
|
+
// Utilities
|
|
79
|
+
// =============================================================================
|
|
80
|
+
|
|
81
|
+
interface LoadPageResult {
|
|
82
|
+
content: string;
|
|
83
|
+
contentType: string;
|
|
84
|
+
finalUrl: string;
|
|
85
|
+
ok: boolean;
|
|
86
|
+
status?: number;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
interface LoadPageOptions {
|
|
90
|
+
timeout?: number;
|
|
91
|
+
headers?: Record<string, string>;
|
|
92
|
+
maxBytes?: number;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Check if response indicates bot blocking (Cloudflare, etc.)
|
|
97
|
+
*/
|
|
98
|
+
function isBotBlocked(status: number, content: string): boolean {
|
|
99
|
+
if (status === 403 || status === 503) {
|
|
100
|
+
const lower = content.toLowerCase();
|
|
101
|
+
return (
|
|
102
|
+
lower.includes("cloudflare") ||
|
|
103
|
+
lower.includes("captcha") ||
|
|
104
|
+
lower.includes("challenge") ||
|
|
105
|
+
lower.includes("blocked") ||
|
|
106
|
+
lower.includes("access denied") ||
|
|
107
|
+
lower.includes("bot detection")
|
|
108
|
+
);
|
|
109
|
+
}
|
|
110
|
+
return false;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Fetch a page with timeout, size limit, and automatic retry with browser UA if blocked
|
|
115
|
+
*/
|
|
116
|
+
async function loadPage(url: string, options: LoadPageOptions = {}): Promise<LoadPageResult> {
|
|
117
|
+
const { timeout = 20, headers = {}, maxBytes = MAX_BYTES } = options;
|
|
118
|
+
|
|
119
|
+
for (let attempt = 0; attempt < USER_AGENTS.length; attempt++) {
|
|
120
|
+
const userAgent = USER_AGENTS[attempt];
|
|
121
|
+
|
|
122
|
+
try {
|
|
123
|
+
const controller = new AbortController();
|
|
124
|
+
const timeoutId = setTimeout(() => controller.abort(), timeout * 1000);
|
|
125
|
+
|
|
126
|
+
const response = await fetch(url, {
|
|
127
|
+
signal: controller.signal,
|
|
128
|
+
headers: {
|
|
129
|
+
"User-Agent": userAgent,
|
|
130
|
+
Accept: "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
|
|
131
|
+
"Accept-Language": "en-US,en;q=0.5",
|
|
132
|
+
...headers,
|
|
133
|
+
},
|
|
134
|
+
redirect: "follow",
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
clearTimeout(timeoutId);
|
|
138
|
+
|
|
139
|
+
const contentType = response.headers.get("content-type")?.split(";")[0]?.trim().toLowerCase() ?? "";
|
|
140
|
+
const finalUrl = response.url;
|
|
141
|
+
|
|
142
|
+
// Read with size limit
|
|
143
|
+
const reader = response.body?.getReader();
|
|
144
|
+
if (!reader) {
|
|
145
|
+
return { content: "", contentType, finalUrl, ok: false, status: response.status };
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
const chunks: Uint8Array[] = [];
|
|
149
|
+
let totalSize = 0;
|
|
150
|
+
|
|
151
|
+
while (true) {
|
|
152
|
+
const { done, value } = await reader.read();
|
|
153
|
+
if (done) break;
|
|
154
|
+
|
|
155
|
+
chunks.push(value);
|
|
156
|
+
totalSize += value.length;
|
|
157
|
+
|
|
158
|
+
if (totalSize > maxBytes) {
|
|
159
|
+
reader.cancel();
|
|
160
|
+
break;
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
const decoder = new TextDecoder();
|
|
165
|
+
const content = decoder.decode(Buffer.concat(chunks));
|
|
166
|
+
|
|
167
|
+
// Check if we got blocked and should retry with browser UA
|
|
168
|
+
if (isBotBlocked(response.status, content) && attempt < USER_AGENTS.length - 1) {
|
|
169
|
+
continue;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (!response.ok) {
|
|
173
|
+
return { content, contentType, finalUrl, ok: false, status: response.status };
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
return { content, contentType, finalUrl, ok: true, status: response.status };
|
|
177
|
+
} catch (_err) {
|
|
178
|
+
// On last attempt, return failure
|
|
179
|
+
if (attempt === USER_AGENTS.length - 1) {
|
|
180
|
+
return { content: "", contentType: "", finalUrl: url, ok: false };
|
|
181
|
+
}
|
|
182
|
+
// Otherwise retry with next UA
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
return { content: "", contentType: "", finalUrl: url, ok: false };
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Execute a command and return stdout
|
|
191
|
+
*/
|
|
192
|
+
function exec(
|
|
193
|
+
cmd: string,
|
|
194
|
+
args: string[],
|
|
195
|
+
options?: { timeout?: number; input?: string | Buffer },
|
|
196
|
+
): { stdout: string; stderr: string; ok: boolean } {
|
|
197
|
+
const timeout = (options?.timeout ?? DEFAULT_TIMEOUT) * 1000;
|
|
198
|
+
const result = spawnSync(cmd, args, {
|
|
199
|
+
encoding: options?.input instanceof Buffer ? "buffer" : "utf-8",
|
|
200
|
+
timeout,
|
|
201
|
+
maxBuffer: MAX_BYTES,
|
|
202
|
+
input: options?.input,
|
|
203
|
+
shell: true,
|
|
204
|
+
});
|
|
205
|
+
return {
|
|
206
|
+
stdout: result.stdout?.toString() ?? "",
|
|
207
|
+
stderr: result.stderr?.toString() ?? "",
|
|
208
|
+
ok: result.status === 0,
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
/**
|
|
213
|
+
* Check if a command exists (cross-platform)
|
|
214
|
+
*/
|
|
215
|
+
function hasCommand(cmd: string): boolean {
|
|
216
|
+
const checkCmd = isWindows ? "where" : "which";
|
|
217
|
+
const result = spawnSync(checkCmd, [cmd], { encoding: "utf-8", shell: true });
|
|
218
|
+
return result.status === 0;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
/**
|
|
222
|
+
* Extract origin from URL
|
|
223
|
+
*/
|
|
224
|
+
function getOrigin(url: string): string {
|
|
225
|
+
try {
|
|
226
|
+
const parsed = new URL(url);
|
|
227
|
+
return `${parsed.protocol}//${parsed.host}`;
|
|
228
|
+
} catch {
|
|
229
|
+
return "";
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
/**
|
|
234
|
+
* Normalize URL (add scheme if missing)
|
|
235
|
+
*/
|
|
236
|
+
function normalizeUrl(url: string): string {
|
|
237
|
+
if (!url.match(/^https?:\/\//i)) {
|
|
238
|
+
return `https://${url}`;
|
|
239
|
+
}
|
|
240
|
+
return url;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
/**
|
|
244
|
+
* Normalize MIME type (lowercase, strip charset/params)
|
|
245
|
+
*/
|
|
246
|
+
function normalizeMime(contentType: string): string {
|
|
247
|
+
return contentType.split(";")[0].trim().toLowerCase();
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
/**
|
|
251
|
+
* Get extension from URL or Content-Disposition
|
|
252
|
+
*/
|
|
253
|
+
function getExtensionHint(url: string, contentDisposition?: string): string {
|
|
254
|
+
// Try Content-Disposition filename first
|
|
255
|
+
if (contentDisposition) {
|
|
256
|
+
const match = contentDisposition.match(/filename[*]?=["']?([^"';\n]+)/i);
|
|
257
|
+
if (match) {
|
|
258
|
+
const ext = path.extname(match[1]).toLowerCase();
|
|
259
|
+
if (ext) return ext;
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// Fall back to URL path
|
|
264
|
+
try {
|
|
265
|
+
const pathname = new URL(url).pathname;
|
|
266
|
+
const ext = path.extname(pathname).toLowerCase();
|
|
267
|
+
if (ext) return ext;
|
|
268
|
+
} catch {}
|
|
269
|
+
|
|
270
|
+
return "";
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
/**
|
|
274
|
+
* Check if content type is convertible via markitdown
|
|
275
|
+
*/
|
|
276
|
+
function isConvertible(mime: string, extensionHint: string): boolean {
|
|
277
|
+
if (CONVERTIBLE_MIMES.has(mime)) return true;
|
|
278
|
+
if (mime === "application/octet-stream" && CONVERTIBLE_EXTENSIONS.has(extensionHint)) return true;
|
|
279
|
+
if (CONVERTIBLE_EXTENSIONS.has(extensionHint)) return true;
|
|
280
|
+
return false;
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
/**
|
|
284
|
+
* Check if content looks like HTML
|
|
285
|
+
*/
|
|
286
|
+
function looksLikeHtml(content: string): boolean {
|
|
287
|
+
const trimmed = content.trim().toLowerCase();
|
|
288
|
+
return (
|
|
289
|
+
trimmed.startsWith("<!doctype") ||
|
|
290
|
+
trimmed.startsWith("<html") ||
|
|
291
|
+
trimmed.startsWith("<head") ||
|
|
292
|
+
trimmed.startsWith("<body")
|
|
293
|
+
);
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
/**
|
|
297
|
+
* Convert binary file to markdown using markitdown
|
|
298
|
+
*/
|
|
299
|
+
function convertWithMarkitdown(
|
|
300
|
+
content: Buffer,
|
|
301
|
+
extensionHint: string,
|
|
302
|
+
timeout: number,
|
|
303
|
+
): { content: string; ok: boolean } {
|
|
304
|
+
if (!hasCommand("markitdown")) {
|
|
305
|
+
return { content: "", ok: false };
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
// Write to temp file with extension hint
|
|
309
|
+
const ext = extensionHint || ".bin";
|
|
310
|
+
const tmpFile = path.join(os.tmpdir(), `pi-convert-${Date.now()}${ext}`);
|
|
311
|
+
|
|
312
|
+
try {
|
|
313
|
+
fs.writeFileSync(tmpFile, content);
|
|
314
|
+
const result = exec("markitdown", [tmpFile], { timeout });
|
|
315
|
+
return { content: result.stdout, ok: result.ok };
|
|
316
|
+
} finally {
|
|
317
|
+
try {
|
|
318
|
+
fs.unlinkSync(tmpFile);
|
|
319
|
+
} catch {}
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
/**
|
|
324
|
+
* Try fetching URL with .md appended (llms.txt convention)
|
|
325
|
+
*/
|
|
326
|
+
async function tryMdSuffix(url: string, timeout: number): Promise<string | null> {
|
|
327
|
+
const candidates: string[] = [];
|
|
328
|
+
|
|
329
|
+
try {
|
|
330
|
+
const parsed = new URL(url);
|
|
331
|
+
const pathname = parsed.pathname;
|
|
332
|
+
|
|
333
|
+
if (pathname.endsWith("/")) {
|
|
334
|
+
// /foo/bar/ -> /foo/bar/index.html.md
|
|
335
|
+
candidates.push(`${parsed.origin}${pathname}index.html.md`);
|
|
336
|
+
} else if (pathname.includes(".")) {
|
|
337
|
+
// /foo/bar.html -> /foo/bar.html.md
|
|
338
|
+
candidates.push(`${parsed.origin}${pathname}.md`);
|
|
339
|
+
} else {
|
|
340
|
+
// /foo/bar -> /foo/bar.md
|
|
341
|
+
candidates.push(`${parsed.origin}${pathname}.md`);
|
|
342
|
+
}
|
|
343
|
+
} catch {
|
|
344
|
+
return null;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
for (const candidate of candidates) {
|
|
348
|
+
const result = await loadPage(candidate, { timeout: Math.min(timeout, 5) });
|
|
349
|
+
if (result.ok && result.content.trim().length > 100 && !looksLikeHtml(result.content)) {
|
|
350
|
+
return result.content;
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
return null;
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
/**
|
|
358
|
+
* Try to fetch LLM-friendly endpoints
|
|
359
|
+
*/
|
|
360
|
+
async function tryLlmEndpoints(origin: string, timeout: number): Promise<string | null> {
|
|
361
|
+
const endpoints = [`${origin}/.well-known/llms.txt`, `${origin}/llms.txt`, `${origin}/llms.md`];
|
|
362
|
+
|
|
363
|
+
for (const endpoint of endpoints) {
|
|
364
|
+
const result = await loadPage(endpoint, { timeout: Math.min(timeout, 5) });
|
|
365
|
+
if (result.ok && result.content.trim().length > 100 && !looksLikeHtml(result.content)) {
|
|
366
|
+
return result.content;
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
return null;
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
/**
|
|
373
|
+
* Try content negotiation for markdown/plain
|
|
374
|
+
*/
|
|
375
|
+
async function tryContentNegotiation(url: string, timeout: number): Promise<{ content: string; type: string } | null> {
|
|
376
|
+
const result = await loadPage(url, {
|
|
377
|
+
timeout,
|
|
378
|
+
headers: { Accept: "text/markdown, text/plain;q=0.9, text/html;q=0.8" },
|
|
379
|
+
});
|
|
380
|
+
|
|
381
|
+
if (!result.ok) return null;
|
|
382
|
+
|
|
383
|
+
const mime = normalizeMime(result.contentType);
|
|
384
|
+
if (mime.includes("markdown") || mime === "text/plain") {
|
|
385
|
+
return { content: result.content, type: result.contentType };
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
return null;
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
/**
|
|
392
|
+
* Parse alternate links from HTML head
|
|
393
|
+
*/
|
|
394
|
+
function parseAlternateLinks(html: string, pageUrl: string): string[] {
|
|
395
|
+
const links: string[] = [];
|
|
396
|
+
|
|
397
|
+
try {
|
|
398
|
+
const doc = parseHtml(html.slice(0, 262144));
|
|
399
|
+
const alternateLinks = doc.querySelectorAll('link[rel="alternate"]');
|
|
400
|
+
|
|
401
|
+
for (const link of alternateLinks) {
|
|
402
|
+
const href = link.getAttribute("href");
|
|
403
|
+
const type = link.getAttribute("type")?.toLowerCase() ?? "";
|
|
404
|
+
|
|
405
|
+
if (!href) continue;
|
|
406
|
+
|
|
407
|
+
// Skip site-wide feeds
|
|
408
|
+
if (
|
|
409
|
+
href.includes("RecentChanges") ||
|
|
410
|
+
href.includes("Special:") ||
|
|
411
|
+
href.includes("/feed/") ||
|
|
412
|
+
href.includes("action=feed")
|
|
413
|
+
) {
|
|
414
|
+
continue;
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
if (type.includes("markdown")) {
|
|
418
|
+
links.push(href);
|
|
419
|
+
} else if (
|
|
420
|
+
(type.includes("rss") || type.includes("atom") || type.includes("feed")) &&
|
|
421
|
+
(href.includes(new URL(pageUrl).pathname) || href.includes("comments"))
|
|
422
|
+
) {
|
|
423
|
+
links.push(href);
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
} catch {}
|
|
427
|
+
|
|
428
|
+
return links;
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
/**
|
|
432
|
+
* Extract document links from HTML (for PDF/DOCX wrapper pages)
|
|
433
|
+
*/
|
|
434
|
+
function extractDocumentLinks(html: string, baseUrl: string): string[] {
|
|
435
|
+
const links: string[] = [];
|
|
436
|
+
|
|
437
|
+
try {
|
|
438
|
+
const doc = parseHtml(html);
|
|
439
|
+
const anchors = doc.querySelectorAll("a[href]");
|
|
440
|
+
|
|
441
|
+
for (const anchor of anchors) {
|
|
442
|
+
const href = anchor.getAttribute("href");
|
|
443
|
+
if (!href) continue;
|
|
444
|
+
|
|
445
|
+
const ext = path.extname(href).toLowerCase();
|
|
446
|
+
if (CONVERTIBLE_EXTENSIONS.has(ext)) {
|
|
447
|
+
const resolved = href.startsWith("http") ? href : new URL(href, baseUrl).href;
|
|
448
|
+
links.push(resolved);
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
} catch {}
|
|
452
|
+
|
|
453
|
+
return links;
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
/**
|
|
457
|
+
* Strip CDATA wrapper and clean text
|
|
458
|
+
*/
|
|
459
|
+
function cleanFeedText(text: string): string {
|
|
460
|
+
return text
|
|
461
|
+
.replace(/<!\[CDATA\[/g, "")
|
|
462
|
+
.replace(/\]\]>/g, "")
|
|
463
|
+
.replace(/</g, "<")
|
|
464
|
+
.replace(/>/g, ">")
|
|
465
|
+
.replace(/&/g, "&")
|
|
466
|
+
.replace(/"/g, '"')
|
|
467
|
+
.replace(/<[^>]+>/g, "") // Strip HTML tags
|
|
468
|
+
.trim();
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
/**
|
|
472
|
+
* Parse RSS/Atom feed to markdown
|
|
473
|
+
*/
|
|
474
|
+
function parseFeedToMarkdown(content: string, maxItems = 10): string {
|
|
475
|
+
try {
|
|
476
|
+
const doc = parseHtml(content, { parseNoneClosedTags: true });
|
|
477
|
+
|
|
478
|
+
// Try RSS
|
|
479
|
+
const channel = doc.querySelector("channel");
|
|
480
|
+
if (channel) {
|
|
481
|
+
const title = cleanFeedText(channel.querySelector("title")?.text || "RSS Feed");
|
|
482
|
+
const items = channel.querySelectorAll("item").slice(0, maxItems);
|
|
483
|
+
|
|
484
|
+
let md = `# ${title}\n\n`;
|
|
485
|
+
for (const item of items) {
|
|
486
|
+
const itemTitle = cleanFeedText(item.querySelector("title")?.text || "Untitled");
|
|
487
|
+
const link = cleanFeedText(item.querySelector("link")?.text || "");
|
|
488
|
+
const pubDate = cleanFeedText(item.querySelector("pubDate")?.text || "");
|
|
489
|
+
const desc = cleanFeedText(item.querySelector("description")?.text || "");
|
|
490
|
+
|
|
491
|
+
md += `## ${itemTitle}\n`;
|
|
492
|
+
if (pubDate) md += `*${pubDate}*\n\n`;
|
|
493
|
+
if (desc) md += `${desc.slice(0, 500)}${desc.length > 500 ? "..." : ""}\n\n`;
|
|
494
|
+
if (link) md += `[Read more](${link})\n\n`;
|
|
495
|
+
md += "---\n\n";
|
|
496
|
+
}
|
|
497
|
+
return md;
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
// Try Atom
|
|
501
|
+
const feed = doc.querySelector("feed");
|
|
502
|
+
if (feed) {
|
|
503
|
+
const title = cleanFeedText(feed.querySelector("title")?.text || "Atom Feed");
|
|
504
|
+
const entries = feed.querySelectorAll("entry").slice(0, maxItems);
|
|
505
|
+
|
|
506
|
+
let md = `# ${title}\n\n`;
|
|
507
|
+
for (const entry of entries) {
|
|
508
|
+
const entryTitle = cleanFeedText(entry.querySelector("title")?.text || "Untitled");
|
|
509
|
+
const link = entry.querySelector("link")?.getAttribute("href") || "";
|
|
510
|
+
const updated = cleanFeedText(entry.querySelector("updated")?.text || "");
|
|
511
|
+
const summary = cleanFeedText(
|
|
512
|
+
entry.querySelector("summary")?.text || entry.querySelector("content")?.text || "",
|
|
513
|
+
);
|
|
514
|
+
|
|
515
|
+
md += `## ${entryTitle}\n`;
|
|
516
|
+
if (updated) md += `*${updated}*\n\n`;
|
|
517
|
+
if (summary) md += `${summary.slice(0, 500)}${summary.length > 500 ? "..." : ""}\n\n`;
|
|
518
|
+
if (link) md += `[Read more](${link})\n\n`;
|
|
519
|
+
md += "---\n\n";
|
|
520
|
+
}
|
|
521
|
+
return md;
|
|
522
|
+
}
|
|
523
|
+
} catch {}
|
|
524
|
+
|
|
525
|
+
return content; // Fall back to raw content
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
/**
|
|
529
|
+
* Render HTML to text using lynx
|
|
530
|
+
*/
|
|
531
|
+
function renderWithLynx(html: string, timeout: number): { content: string; ok: boolean } {
|
|
532
|
+
const tmpFile = path.join(os.tmpdir(), `pi-render-${Date.now()}.html`);
|
|
533
|
+
try {
|
|
534
|
+
fs.writeFileSync(tmpFile, html);
|
|
535
|
+
// Convert path to file URL (handles Windows paths correctly)
|
|
536
|
+
const normalizedPath = tmpFile.replace(/\\/g, "/");
|
|
537
|
+
const fileUrl = normalizedPath.startsWith("/") ? `file://${normalizedPath}` : `file:///${normalizedPath}`;
|
|
538
|
+
const result = exec("lynx", ["-dump", "-nolist", "-width", "120", fileUrl], { timeout });
|
|
539
|
+
return { content: result.stdout, ok: result.ok };
|
|
540
|
+
} finally {
|
|
541
|
+
try {
|
|
542
|
+
fs.unlinkSync(tmpFile);
|
|
543
|
+
} catch {}
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
/**
|
|
548
|
+
* Check if lynx output looks JS-gated or mostly navigation
|
|
549
|
+
*/
|
|
550
|
+
function isLowQualityOutput(content: string): boolean {
|
|
551
|
+
const lower = content.toLowerCase();
|
|
552
|
+
|
|
553
|
+
// JS-gated indicators
|
|
554
|
+
const jsGated = [
|
|
555
|
+
"enable javascript",
|
|
556
|
+
"javascript required",
|
|
557
|
+
"turn on javascript",
|
|
558
|
+
"please enable javascript",
|
|
559
|
+
"browser not supported",
|
|
560
|
+
];
|
|
561
|
+
if (content.length < 1024 && jsGated.some((t) => lower.includes(t))) {
|
|
562
|
+
return true;
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
// Mostly navigation (high link/menu density)
|
|
566
|
+
const lines = content.split("\n").filter((l) => l.trim());
|
|
567
|
+
const shortLines = lines.filter((l) => l.trim().length < 40);
|
|
568
|
+
if (lines.length > 10 && shortLines.length / lines.length > 0.7) {
|
|
569
|
+
return true;
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
return false;
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
/**
|
|
576
|
+
* Format JSON
|
|
577
|
+
*/
|
|
578
|
+
function formatJson(content: string): string {
|
|
579
|
+
try {
|
|
580
|
+
return JSON.stringify(JSON.parse(content), null, 2);
|
|
581
|
+
} catch {
|
|
582
|
+
return content;
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
/**
|
|
587
|
+
* Truncate and cleanup output
|
|
588
|
+
*/
|
|
589
|
+
function finalizeOutput(content: string): { content: string; truncated: boolean } {
|
|
590
|
+
const cleaned = content.replace(/\n{3,}/g, "\n\n").trim();
|
|
591
|
+
const truncated = cleaned.length > MAX_OUTPUT_CHARS;
|
|
592
|
+
return {
|
|
593
|
+
content: cleaned.slice(0, MAX_OUTPUT_CHARS),
|
|
594
|
+
truncated,
|
|
595
|
+
};
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
/**
|
|
599
|
+
* Fetch page as binary buffer (for convertible files)
|
|
600
|
+
*/
|
|
601
|
+
async function fetchBinary(
|
|
602
|
+
url: string,
|
|
603
|
+
timeout: number,
|
|
604
|
+
): Promise<{ buffer: Buffer; contentType: string; contentDisposition?: string; ok: boolean }> {
|
|
605
|
+
try {
|
|
606
|
+
const controller = new AbortController();
|
|
607
|
+
const timeoutId = setTimeout(() => controller.abort(), timeout * 1000);
|
|
608
|
+
|
|
609
|
+
const response = await fetch(url, {
|
|
610
|
+
signal: controller.signal,
|
|
611
|
+
headers: {
|
|
612
|
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 Chrome/131.0.0.0",
|
|
613
|
+
},
|
|
614
|
+
redirect: "follow",
|
|
615
|
+
});
|
|
616
|
+
|
|
617
|
+
clearTimeout(timeoutId);
|
|
618
|
+
|
|
619
|
+
if (!response.ok) {
|
|
620
|
+
return { buffer: Buffer.alloc(0), contentType: "", ok: false };
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
const contentType = response.headers.get("content-type") ?? "";
|
|
624
|
+
const contentDisposition = response.headers.get("content-disposition") ?? undefined;
|
|
625
|
+
const buffer = Buffer.from(await response.arrayBuffer());
|
|
626
|
+
|
|
627
|
+
return { buffer, contentType, contentDisposition, ok: true };
|
|
628
|
+
} catch {
|
|
629
|
+
return { buffer: Buffer.alloc(0), contentType: "", ok: false };
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
|
|
633
|
+
// =============================================================================
|
|
634
|
+
// GitHub Special Handling
|
|
635
|
+
// =============================================================================
|
|
636
|
+
|
|
637
|
+
interface GitHubUrl {
|
|
638
|
+
type: "blob" | "tree" | "repo" | "issue" | "issues" | "pull" | "pulls" | "discussion" | "discussions" | "other";
|
|
639
|
+
owner: string;
|
|
640
|
+
repo: string;
|
|
641
|
+
ref?: string;
|
|
642
|
+
path?: string;
|
|
643
|
+
number?: number;
|
|
644
|
+
}
|
|
645
|
+
|
|
646
|
+
/**
|
|
647
|
+
* Parse GitHub URL into components
|
|
648
|
+
*/
|
|
649
|
+
function parseGitHubUrl(url: string): GitHubUrl | null {
|
|
650
|
+
try {
|
|
651
|
+
const parsed = new URL(url);
|
|
652
|
+
if (parsed.hostname !== "github.com") return null;
|
|
653
|
+
|
|
654
|
+
const parts = parsed.pathname.split("/").filter(Boolean);
|
|
655
|
+
if (parts.length < 2) return null;
|
|
656
|
+
|
|
657
|
+
const [owner, repo, ...rest] = parts;
|
|
658
|
+
|
|
659
|
+
if (rest.length === 0) {
|
|
660
|
+
return { type: "repo", owner, repo };
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
const [section, ...subParts] = rest;
|
|
664
|
+
|
|
665
|
+
switch (section) {
|
|
666
|
+
case "blob":
|
|
667
|
+
case "tree": {
|
|
668
|
+
const [ref, ...pathParts] = subParts;
|
|
669
|
+
return { type: section, owner, repo, ref, path: pathParts.join("/") };
|
|
670
|
+
}
|
|
671
|
+
case "issues":
|
|
672
|
+
if (subParts.length > 0 && /^\d+$/.test(subParts[0])) {
|
|
673
|
+
return { type: "issue", owner, repo, number: parseInt(subParts[0], 10) };
|
|
674
|
+
}
|
|
675
|
+
return { type: "issues", owner, repo };
|
|
676
|
+
case "pull":
|
|
677
|
+
if (subParts.length > 0 && /^\d+$/.test(subParts[0])) {
|
|
678
|
+
return { type: "pull", owner, repo, number: parseInt(subParts[0], 10) };
|
|
679
|
+
}
|
|
680
|
+
return { type: "pulls", owner, repo };
|
|
681
|
+
case "pulls":
|
|
682
|
+
return { type: "pulls", owner, repo };
|
|
683
|
+
case "discussions":
|
|
684
|
+
if (subParts.length > 0 && /^\d+$/.test(subParts[0])) {
|
|
685
|
+
return { type: "discussion", owner, repo, number: parseInt(subParts[0], 10) };
|
|
686
|
+
}
|
|
687
|
+
return { type: "discussions", owner, repo };
|
|
688
|
+
default:
|
|
689
|
+
return { type: "other", owner, repo };
|
|
690
|
+
}
|
|
691
|
+
} catch {
|
|
692
|
+
return null;
|
|
693
|
+
}
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
/**
|
|
697
|
+
* Convert GitHub blob URL to raw URL
|
|
698
|
+
*/
|
|
699
|
+
function toRawGitHubUrl(gh: GitHubUrl): string {
|
|
700
|
+
return `https://raw.githubusercontent.com/${gh.owner}/${gh.repo}/refs/heads/${gh.ref}/${gh.path}`;
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
/**
|
|
704
|
+
* Fetch from GitHub API
|
|
705
|
+
*/
|
|
706
|
+
async function fetchGitHubApi(endpoint: string, timeout: number): Promise<{ data: unknown; ok: boolean }> {
|
|
707
|
+
try {
|
|
708
|
+
const controller = new AbortController();
|
|
709
|
+
const timeoutId = setTimeout(() => controller.abort(), timeout * 1000);
|
|
710
|
+
|
|
711
|
+
const headers: Record<string, string> = {
|
|
712
|
+
Accept: "application/vnd.github.v3+json",
|
|
713
|
+
"User-Agent": "pi-web-fetch/1.0",
|
|
714
|
+
};
|
|
715
|
+
|
|
716
|
+
// Use GITHUB_TOKEN if available
|
|
717
|
+
const token = process.env.GITHUB_TOKEN || process.env.GH_TOKEN;
|
|
718
|
+
if (token) {
|
|
719
|
+
headers.Authorization = `Bearer ${token}`;
|
|
720
|
+
}
|
|
721
|
+
|
|
722
|
+
const response = await fetch(`https://api.github.com${endpoint}`, {
|
|
723
|
+
signal: controller.signal,
|
|
724
|
+
headers,
|
|
725
|
+
});
|
|
726
|
+
|
|
727
|
+
clearTimeout(timeoutId);
|
|
728
|
+
|
|
729
|
+
if (!response.ok) {
|
|
730
|
+
return { data: null, ok: false };
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
return { data: await response.json(), ok: true };
|
|
734
|
+
} catch {
|
|
735
|
+
return { data: null, ok: false };
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
|
|
739
|
+
/**
|
|
740
|
+
* Render GitHub issue/PR to markdown
|
|
741
|
+
*/
|
|
742
|
+
async function renderGitHubIssue(gh: GitHubUrl, timeout: number): Promise<{ content: string; ok: boolean }> {
|
|
743
|
+
const endpoint =
|
|
744
|
+
gh.type === "pull"
|
|
745
|
+
? `/repos/${gh.owner}/${gh.repo}/pulls/${gh.number}`
|
|
746
|
+
: `/repos/${gh.owner}/${gh.repo}/issues/${gh.number}`;
|
|
747
|
+
|
|
748
|
+
const result = await fetchGitHubApi(endpoint, timeout);
|
|
749
|
+
if (!result.ok || !result.data) return { content: "", ok: false };
|
|
750
|
+
|
|
751
|
+
const issue = result.data as {
|
|
752
|
+
title: string;
|
|
753
|
+
number: number;
|
|
754
|
+
state: string;
|
|
755
|
+
user: { login: string };
|
|
756
|
+
created_at: string;
|
|
757
|
+
updated_at: string;
|
|
758
|
+
body: string | null;
|
|
759
|
+
labels: Array<{ name: string }>;
|
|
760
|
+
comments: number;
|
|
761
|
+
html_url: string;
|
|
762
|
+
};
|
|
763
|
+
|
|
764
|
+
let md = `# ${issue.title}\n\n`;
|
|
765
|
+
md += `**#${issue.number}** · ${issue.state} · opened by @${issue.user.login}\n`;
|
|
766
|
+
md += `Created: ${issue.created_at} · Updated: ${issue.updated_at}\n`;
|
|
767
|
+
if (issue.labels.length > 0) {
|
|
768
|
+
md += `Labels: ${issue.labels.map((l) => l.name).join(", ")}\n`;
|
|
769
|
+
}
|
|
770
|
+
md += `\n---\n\n`;
|
|
771
|
+
md += issue.body || "*No description provided.*";
|
|
772
|
+
md += `\n\n---\n\n`;
|
|
773
|
+
|
|
774
|
+
// Fetch comments if any
|
|
775
|
+
if (issue.comments > 0) {
|
|
776
|
+
const commentsResult = await fetchGitHubApi(
|
|
777
|
+
`/repos/${gh.owner}/${gh.repo}/issues/${gh.number}/comments?per_page=50`,
|
|
778
|
+
timeout,
|
|
779
|
+
);
|
|
780
|
+
if (commentsResult.ok && Array.isArray(commentsResult.data)) {
|
|
781
|
+
md += `## Comments (${issue.comments})\n\n`;
|
|
782
|
+
for (const comment of commentsResult.data as Array<{
|
|
783
|
+
user: { login: string };
|
|
784
|
+
created_at: string;
|
|
785
|
+
body: string;
|
|
786
|
+
}>) {
|
|
787
|
+
md += `### @${comment.user.login} · ${comment.created_at}\n\n`;
|
|
788
|
+
md += `${comment.body}\n\n---\n\n`;
|
|
789
|
+
}
|
|
790
|
+
}
|
|
791
|
+
}
|
|
792
|
+
|
|
793
|
+
return { content: md, ok: true };
|
|
794
|
+
}
|
|
795
|
+
|
|
796
|
+
/**
|
|
797
|
+
* Render GitHub issues list to markdown
|
|
798
|
+
*/
|
|
799
|
+
async function renderGitHubIssuesList(gh: GitHubUrl, timeout: number): Promise<{ content: string; ok: boolean }> {
|
|
800
|
+
const result = await fetchGitHubApi(`/repos/${gh.owner}/${gh.repo}/issues?state=open&per_page=30`, timeout);
|
|
801
|
+
if (!result.ok || !Array.isArray(result.data)) return { content: "", ok: false };
|
|
802
|
+
|
|
803
|
+
const issues = result.data as Array<{
|
|
804
|
+
number: number;
|
|
805
|
+
title: string;
|
|
806
|
+
state: string;
|
|
807
|
+
user: { login: string };
|
|
808
|
+
created_at: string;
|
|
809
|
+
comments: number;
|
|
810
|
+
labels: Array<{ name: string }>;
|
|
811
|
+
pull_request?: unknown;
|
|
812
|
+
}>;
|
|
813
|
+
|
|
814
|
+
let md = `# ${gh.owner}/${gh.repo} - Open Issues\n\n`;
|
|
815
|
+
|
|
816
|
+
for (const issue of issues) {
|
|
817
|
+
if (issue.pull_request) continue; // Skip PRs in issues list
|
|
818
|
+
const labels = issue.labels.length > 0 ? ` [${issue.labels.map((l) => l.name).join(", ")}]` : "";
|
|
819
|
+
md += `- **#${issue.number}** ${issue.title}${labels}\n`;
|
|
820
|
+
md += ` by @${issue.user.login} · ${issue.comments} comments · ${issue.created_at}\n\n`;
|
|
821
|
+
}
|
|
822
|
+
|
|
823
|
+
return { content: md, ok: true };
|
|
824
|
+
}
|
|
825
|
+
|
|
826
|
+
/**
|
|
827
|
+
* Render GitHub tree (directory) to markdown
|
|
828
|
+
*/
|
|
829
|
+
async function renderGitHubTree(gh: GitHubUrl, timeout: number): Promise<{ content: string; ok: boolean }> {
|
|
830
|
+
// Fetch repo info first to get default branch if ref not specified
|
|
831
|
+
const repoResult = await fetchGitHubApi(`/repos/${gh.owner}/${gh.repo}`, timeout);
|
|
832
|
+
if (!repoResult.ok) return { content: "", ok: false };
|
|
833
|
+
|
|
834
|
+
const repo = repoResult.data as {
|
|
835
|
+
full_name: string;
|
|
836
|
+
default_branch: string;
|
|
837
|
+
};
|
|
838
|
+
|
|
839
|
+
const ref = gh.ref || repo.default_branch;
|
|
840
|
+
const dirPath = gh.path || "";
|
|
841
|
+
|
|
842
|
+
let md = `# ${repo.full_name}/${dirPath || "(root)"}\n\n`;
|
|
843
|
+
md += `**Branch:** ${ref}\n\n`;
|
|
844
|
+
|
|
845
|
+
// Fetch directory contents
|
|
846
|
+
const contentsResult = await fetchGitHubApi(`/repos/${gh.owner}/${gh.repo}/contents/${dirPath}?ref=${ref}`, timeout);
|
|
847
|
+
|
|
848
|
+
if (contentsResult.ok && Array.isArray(contentsResult.data)) {
|
|
849
|
+
const items = contentsResult.data as Array<{
|
|
850
|
+
name: string;
|
|
851
|
+
type: "file" | "dir" | "symlink" | "submodule";
|
|
852
|
+
size?: number;
|
|
853
|
+
path: string;
|
|
854
|
+
}>;
|
|
855
|
+
|
|
856
|
+
// Sort: directories first, then files, alphabetically
|
|
857
|
+
items.sort((a, b) => {
|
|
858
|
+
if (a.type === "dir" && b.type !== "dir") return -1;
|
|
859
|
+
if (a.type !== "dir" && b.type === "dir") return 1;
|
|
860
|
+
return a.name.localeCompare(b.name);
|
|
861
|
+
});
|
|
862
|
+
|
|
863
|
+
md += `## Contents\n\n`;
|
|
864
|
+
md += "```\n";
|
|
865
|
+
for (const item of items) {
|
|
866
|
+
const prefix = item.type === "dir" ? "[dir] " : " ";
|
|
867
|
+
const size = item.size ? ` (${item.size} bytes)` : "";
|
|
868
|
+
md += `${prefix}${item.name}${item.type === "file" ? size : ""}\n`;
|
|
869
|
+
}
|
|
870
|
+
md += "```\n\n";
|
|
871
|
+
|
|
872
|
+
// Look for README in this directory
|
|
873
|
+
const readmeFile = items.find((item) => item.type === "file" && /^readme\.md$/i.test(item.name));
|
|
874
|
+
if (readmeFile) {
|
|
875
|
+
const readmePath = dirPath ? `${dirPath}/${readmeFile.name}` : readmeFile.name;
|
|
876
|
+
const rawUrl = `https://raw.githubusercontent.com/${gh.owner}/${gh.repo}/refs/heads/${ref}/${readmePath}`;
|
|
877
|
+
const readmeResult = await loadPage(rawUrl, { timeout });
|
|
878
|
+
if (readmeResult.ok) {
|
|
879
|
+
md += `---\n\n## README\n\n${readmeResult.content}`;
|
|
880
|
+
}
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
|
|
884
|
+
return { content: md, ok: true };
|
|
885
|
+
}
|
|
886
|
+
|
|
887
|
+
/**
|
|
888
|
+
* Render GitHub repo to markdown (file list + README)
|
|
889
|
+
*/
|
|
890
|
+
async function renderGitHubRepo(gh: GitHubUrl, timeout: number): Promise<{ content: string; ok: boolean }> {
|
|
891
|
+
// Fetch repo info
|
|
892
|
+
const repoResult = await fetchGitHubApi(`/repos/${gh.owner}/${gh.repo}`, timeout);
|
|
893
|
+
if (!repoResult.ok) return { content: "", ok: false };
|
|
894
|
+
|
|
895
|
+
const repo = repoResult.data as {
|
|
896
|
+
full_name: string;
|
|
897
|
+
description: string | null;
|
|
898
|
+
stargazers_count: number;
|
|
899
|
+
forks_count: number;
|
|
900
|
+
open_issues_count: number;
|
|
901
|
+
default_branch: string;
|
|
902
|
+
language: string | null;
|
|
903
|
+
license: { name: string } | null;
|
|
904
|
+
};
|
|
905
|
+
|
|
906
|
+
let md = `# ${repo.full_name}\n\n`;
|
|
907
|
+
if (repo.description) md += `${repo.description}\n\n`;
|
|
908
|
+
md += `Stars: ${repo.stargazers_count} · Forks: ${repo.forks_count} · Issues: ${repo.open_issues_count}\n`;
|
|
909
|
+
if (repo.language) md += `Language: ${repo.language}\n`;
|
|
910
|
+
if (repo.license) md += `License: ${repo.license.name}\n`;
|
|
911
|
+
md += `\n---\n\n`;
|
|
912
|
+
|
|
913
|
+
// Fetch file tree
|
|
914
|
+
const treeResult = await fetchGitHubApi(
|
|
915
|
+
`/repos/${gh.owner}/${gh.repo}/git/trees/${repo.default_branch}?recursive=1`,
|
|
916
|
+
timeout,
|
|
917
|
+
);
|
|
918
|
+
if (treeResult.ok && treeResult.data) {
|
|
919
|
+
const tree = (treeResult.data as { tree: Array<{ path: string; type: string }> }).tree;
|
|
920
|
+
md += `## Files\n\n`;
|
|
921
|
+
md += "```\n";
|
|
922
|
+
for (const item of tree.slice(0, 100)) {
|
|
923
|
+
const prefix = item.type === "tree" ? "[dir] " : " ";
|
|
924
|
+
md += `${prefix}${item.path}\n`;
|
|
925
|
+
}
|
|
926
|
+
if (tree.length > 100) {
|
|
927
|
+
md += `... and ${tree.length - 100} more files\n`;
|
|
928
|
+
}
|
|
929
|
+
md += "```\n\n";
|
|
930
|
+
}
|
|
931
|
+
|
|
932
|
+
// Fetch README
|
|
933
|
+
const readmeResult = await fetchGitHubApi(`/repos/${gh.owner}/${gh.repo}/readme`, timeout);
|
|
934
|
+
if (readmeResult.ok && readmeResult.data) {
|
|
935
|
+
const readme = readmeResult.data as { content: string; encoding: string };
|
|
936
|
+
if (readme.encoding === "base64") {
|
|
937
|
+
const decoded = Buffer.from(readme.content, "base64").toString("utf-8");
|
|
938
|
+
md += `## README\n\n${decoded}`;
|
|
939
|
+
}
|
|
940
|
+
}
|
|
941
|
+
|
|
942
|
+
return { content: md, ok: true };
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
/**
|
|
946
|
+
* Handle GitHub URLs specially
|
|
947
|
+
*/
|
|
948
|
+
async function handleGitHub(url: string, timeout: number): Promise<RenderResult | null> {
|
|
949
|
+
const gh = parseGitHubUrl(url);
|
|
950
|
+
if (!gh) return null;
|
|
951
|
+
|
|
952
|
+
const fetchedAt = new Date().toISOString();
|
|
953
|
+
const notes: string[] = [];
|
|
954
|
+
|
|
955
|
+
switch (gh.type) {
|
|
956
|
+
case "blob": {
|
|
957
|
+
// Convert to raw URL and fetch
|
|
958
|
+
const rawUrl = toRawGitHubUrl(gh);
|
|
959
|
+
notes.push(`Fetched raw: ${rawUrl}`);
|
|
960
|
+
const result = await loadPage(rawUrl, { timeout });
|
|
961
|
+
if (result.ok) {
|
|
962
|
+
const output = finalizeOutput(result.content);
|
|
963
|
+
return {
|
|
964
|
+
url,
|
|
965
|
+
finalUrl: rawUrl,
|
|
966
|
+
contentType: "text/plain",
|
|
967
|
+
method: "github-raw",
|
|
968
|
+
content: output.content,
|
|
969
|
+
fetchedAt,
|
|
970
|
+
truncated: output.truncated,
|
|
971
|
+
notes,
|
|
972
|
+
};
|
|
973
|
+
}
|
|
974
|
+
break;
|
|
975
|
+
}
|
|
976
|
+
|
|
977
|
+
case "tree": {
|
|
978
|
+
notes.push(`Fetched via GitHub API`);
|
|
979
|
+
const result = await renderGitHubTree(gh, timeout);
|
|
980
|
+
if (result.ok) {
|
|
981
|
+
const output = finalizeOutput(result.content);
|
|
982
|
+
return {
|
|
983
|
+
url,
|
|
984
|
+
finalUrl: url,
|
|
985
|
+
contentType: "text/markdown",
|
|
986
|
+
method: "github-tree",
|
|
987
|
+
content: output.content,
|
|
988
|
+
fetchedAt,
|
|
989
|
+
truncated: output.truncated,
|
|
990
|
+
notes,
|
|
991
|
+
};
|
|
992
|
+
}
|
|
993
|
+
break;
|
|
994
|
+
}
|
|
995
|
+
|
|
996
|
+
case "issue":
|
|
997
|
+
case "pull": {
|
|
998
|
+
notes.push(`Fetched via GitHub API`);
|
|
999
|
+
const result = await renderGitHubIssue(gh, timeout);
|
|
1000
|
+
if (result.ok) {
|
|
1001
|
+
const output = finalizeOutput(result.content);
|
|
1002
|
+
return {
|
|
1003
|
+
url,
|
|
1004
|
+
finalUrl: url,
|
|
1005
|
+
contentType: "text/markdown",
|
|
1006
|
+
method: gh.type === "pull" ? "github-pr" : "github-issue",
|
|
1007
|
+
content: output.content,
|
|
1008
|
+
fetchedAt,
|
|
1009
|
+
truncated: output.truncated,
|
|
1010
|
+
notes,
|
|
1011
|
+
};
|
|
1012
|
+
}
|
|
1013
|
+
break;
|
|
1014
|
+
}
|
|
1015
|
+
|
|
1016
|
+
case "issues": {
|
|
1017
|
+
notes.push(`Fetched via GitHub API`);
|
|
1018
|
+
const result = await renderGitHubIssuesList(gh, timeout);
|
|
1019
|
+
if (result.ok) {
|
|
1020
|
+
const output = finalizeOutput(result.content);
|
|
1021
|
+
return {
|
|
1022
|
+
url,
|
|
1023
|
+
finalUrl: url,
|
|
1024
|
+
contentType: "text/markdown",
|
|
1025
|
+
method: "github-issues",
|
|
1026
|
+
content: output.content,
|
|
1027
|
+
fetchedAt,
|
|
1028
|
+
truncated: output.truncated,
|
|
1029
|
+
notes,
|
|
1030
|
+
};
|
|
1031
|
+
}
|
|
1032
|
+
break;
|
|
1033
|
+
}
|
|
1034
|
+
|
|
1035
|
+
case "repo": {
|
|
1036
|
+
notes.push(`Fetched via GitHub API`);
|
|
1037
|
+
const result = await renderGitHubRepo(gh, timeout);
|
|
1038
|
+
if (result.ok) {
|
|
1039
|
+
const output = finalizeOutput(result.content);
|
|
1040
|
+
return {
|
|
1041
|
+
url,
|
|
1042
|
+
finalUrl: url,
|
|
1043
|
+
contentType: "text/markdown",
|
|
1044
|
+
method: "github-repo",
|
|
1045
|
+
content: output.content,
|
|
1046
|
+
fetchedAt,
|
|
1047
|
+
truncated: output.truncated,
|
|
1048
|
+
notes,
|
|
1049
|
+
};
|
|
1050
|
+
}
|
|
1051
|
+
break;
|
|
1052
|
+
}
|
|
1053
|
+
}
|
|
1054
|
+
|
|
1055
|
+
// Fall back to null (let normal rendering handle it)
|
|
1056
|
+
return null;
|
|
1057
|
+
}
|
|
1058
|
+
|
|
1059
|
+
// =============================================================================
|
|
1060
|
+
// Twitter/X Special Handling (via Nitter)
|
|
1061
|
+
// =============================================================================
|
|
1062
|
+
|
|
1063
|
+
// Active Nitter instances - check https://status.d420.de/instances for current status
|
|
1064
|
+
const NITTER_INSTANCES = [
|
|
1065
|
+
"nitter.privacyredirect.com",
|
|
1066
|
+
"nitter.tiekoetter.com",
|
|
1067
|
+
"nitter.poast.org",
|
|
1068
|
+
"nitter.woodland.cafe",
|
|
1069
|
+
];
|
|
1070
|
+
|
|
1071
|
+
/**
|
|
1072
|
+
* Handle Twitter/X URLs via Nitter
|
|
1073
|
+
*/
|
|
1074
|
+
async function handleTwitter(url: string, timeout: number): Promise<RenderResult | null> {
|
|
1075
|
+
try {
|
|
1076
|
+
const parsed = new URL(url);
|
|
1077
|
+
if (!["twitter.com", "x.com", "www.twitter.com", "www.x.com"].includes(parsed.hostname)) {
|
|
1078
|
+
return null;
|
|
1079
|
+
}
|
|
1080
|
+
|
|
1081
|
+
const fetchedAt = new Date().toISOString();
|
|
1082
|
+
|
|
1083
|
+
// Try Nitter instances
|
|
1084
|
+
for (const instance of NITTER_INSTANCES) {
|
|
1085
|
+
const nitterUrl = `https://${instance}${parsed.pathname}`;
|
|
1086
|
+
const result = await loadPage(nitterUrl, { timeout: Math.min(timeout, 10) });
|
|
1087
|
+
|
|
1088
|
+
if (result.ok && result.content.length > 500) {
|
|
1089
|
+
// Parse the Nitter HTML
|
|
1090
|
+
const doc = parseHtml(result.content);
|
|
1091
|
+
|
|
1092
|
+
// Extract tweet content
|
|
1093
|
+
const tweetContent = doc.querySelector(".tweet-content")?.text?.trim();
|
|
1094
|
+
const fullname = doc.querySelector(".fullname")?.text?.trim();
|
|
1095
|
+
const username = doc.querySelector(".username")?.text?.trim();
|
|
1096
|
+
const date = doc.querySelector(".tweet-date a")?.text?.trim();
|
|
1097
|
+
const stats = doc.querySelector(".tweet-stats")?.text?.trim();
|
|
1098
|
+
|
|
1099
|
+
if (tweetContent) {
|
|
1100
|
+
let md = `# Tweet by ${fullname || "Unknown"} (${username || "@?"})\n\n`;
|
|
1101
|
+
if (date) md += `*${date}*\n\n`;
|
|
1102
|
+
md += `${tweetContent}\n\n`;
|
|
1103
|
+
if (stats) md += `---\n${stats.replace(/\s+/g, " ")}\n`;
|
|
1104
|
+
|
|
1105
|
+
// Check for replies/thread
|
|
1106
|
+
const replies = doc.querySelectorAll(".timeline-item .tweet-content");
|
|
1107
|
+
if (replies.length > 1) {
|
|
1108
|
+
md += `\n---\n\n## Thread/Replies\n\n`;
|
|
1109
|
+
for (const reply of Array.from(replies).slice(1, 10)) {
|
|
1110
|
+
const replyUser = reply.parentNode?.querySelector(".username")?.text?.trim();
|
|
1111
|
+
md += `**${replyUser || "@?"}**: ${reply.text?.trim()}\n\n`;
|
|
1112
|
+
}
|
|
1113
|
+
}
|
|
1114
|
+
|
|
1115
|
+
const output = finalizeOutput(md);
|
|
1116
|
+
return {
|
|
1117
|
+
url,
|
|
1118
|
+
finalUrl: nitterUrl,
|
|
1119
|
+
contentType: "text/markdown",
|
|
1120
|
+
method: "twitter-nitter",
|
|
1121
|
+
content: output.content,
|
|
1122
|
+
fetchedAt,
|
|
1123
|
+
truncated: output.truncated,
|
|
1124
|
+
notes: [`Via Nitter: ${instance}`],
|
|
1125
|
+
};
|
|
1126
|
+
}
|
|
1127
|
+
}
|
|
1128
|
+
}
|
|
1129
|
+
} catch {}
|
|
1130
|
+
|
|
1131
|
+
// X.com blocks all bots - return a helpful error instead of falling through
|
|
1132
|
+
return {
|
|
1133
|
+
url,
|
|
1134
|
+
finalUrl: url,
|
|
1135
|
+
contentType: "text/plain",
|
|
1136
|
+
method: "twitter-blocked",
|
|
1137
|
+
content:
|
|
1138
|
+
"Twitter/X blocks automated access. Nitter instances were unavailable.\n\nTry:\n- Opening the link in a browser\n- Using a different Nitter instance manually\n- Checking if the tweet is available via an archive service",
|
|
1139
|
+
fetchedAt: new Date().toISOString(),
|
|
1140
|
+
truncated: false,
|
|
1141
|
+
notes: ["X.com blocks bots; Nitter instances unavailable"],
|
|
1142
|
+
};
|
|
1143
|
+
}
|
|
1144
|
+
|
|
1145
|
+
// =============================================================================
|
|
1146
|
+
// Stack Overflow Special Handling
|
|
1147
|
+
// =============================================================================
|
|
1148
|
+
|
|
1149
|
+
interface SOQuestion {
|
|
1150
|
+
title: string;
|
|
1151
|
+
body: string;
|
|
1152
|
+
score: number;
|
|
1153
|
+
owner: { display_name: string };
|
|
1154
|
+
creation_date: number;
|
|
1155
|
+
tags: string[];
|
|
1156
|
+
answer_count: number;
|
|
1157
|
+
is_answered: boolean;
|
|
1158
|
+
}
|
|
1159
|
+
|
|
1160
|
+
interface SOAnswer {
|
|
1161
|
+
body: string;
|
|
1162
|
+
score: number;
|
|
1163
|
+
is_accepted: boolean;
|
|
1164
|
+
owner: { display_name: string };
|
|
1165
|
+
creation_date: number;
|
|
1166
|
+
}
|
|
1167
|
+
|
|
1168
|
+
/**
|
|
1169
|
+
* Convert basic HTML to markdown (for SO bodies)
|
|
1170
|
+
*/
|
|
1171
|
+
function htmlToBasicMarkdown(html: string): string {
|
|
1172
|
+
return html
|
|
1173
|
+
.replace(/<pre><code[^>]*>/g, "\n```\n")
|
|
1174
|
+
.replace(/<\/code><\/pre>/g, "\n```\n")
|
|
1175
|
+
.replace(/<code>/g, "`")
|
|
1176
|
+
.replace(/<\/code>/g, "`")
|
|
1177
|
+
.replace(/<strong>/g, "**")
|
|
1178
|
+
.replace(/<\/strong>/g, "**")
|
|
1179
|
+
.replace(/<em>/g, "*")
|
|
1180
|
+
.replace(/<\/em>/g, "*")
|
|
1181
|
+
.replace(/<a href="([^"]+)"[^>]*>([^<]+)<\/a>/g, "[$2]($1)")
|
|
1182
|
+
.replace(/<p>/g, "\n\n")
|
|
1183
|
+
.replace(/<\/p>/g, "")
|
|
1184
|
+
.replace(/<br\s*\/?>/g, "\n")
|
|
1185
|
+
.replace(/<li>/g, "- ")
|
|
1186
|
+
.replace(/<\/li>/g, "\n")
|
|
1187
|
+
.replace(/<\/?[uo]l>/g, "\n")
|
|
1188
|
+
.replace(/<h(\d)>/g, (_, n) => `\n${"#".repeat(parseInt(n, 10))} `)
|
|
1189
|
+
.replace(/<\/h\d>/g, "\n")
|
|
1190
|
+
.replace(/<blockquote>/g, "\n> ")
|
|
1191
|
+
.replace(/<\/blockquote>/g, "\n")
|
|
1192
|
+
.replace(/<[^>]+>/g, "") // Strip remaining tags
|
|
1193
|
+
.replace(/</g, "<")
|
|
1194
|
+
.replace(/>/g, ">")
|
|
1195
|
+
.replace(/&/g, "&")
|
|
1196
|
+
.replace(/"/g, '"')
|
|
1197
|
+
.replace(/'/g, "'")
|
|
1198
|
+
.replace(/\n{3,}/g, "\n\n")
|
|
1199
|
+
.trim();
|
|
1200
|
+
}
|
|
1201
|
+
|
|
1202
|
+
/**
|
|
1203
|
+
* Handle Stack Overflow URLs via API
|
|
1204
|
+
*/
|
|
1205
|
+
async function handleStackOverflow(url: string, timeout: number): Promise<RenderResult | null> {
|
|
1206
|
+
try {
|
|
1207
|
+
const parsed = new URL(url);
|
|
1208
|
+
if (!parsed.hostname.includes("stackoverflow.com") && !parsed.hostname.includes("stackexchange.com")) {
|
|
1209
|
+
return null;
|
|
1210
|
+
}
|
|
1211
|
+
|
|
1212
|
+
// Extract question ID from URL patterns like /questions/12345/...
|
|
1213
|
+
const match = parsed.pathname.match(/\/questions\/(\d+)/);
|
|
1214
|
+
if (!match) return null;
|
|
1215
|
+
|
|
1216
|
+
const questionId = match[1];
|
|
1217
|
+
const site = parsed.hostname.includes("stackoverflow") ? "stackoverflow" : parsed.hostname.split(".")[0];
|
|
1218
|
+
const fetchedAt = new Date().toISOString();
|
|
1219
|
+
|
|
1220
|
+
// Fetch question with answers
|
|
1221
|
+
const apiUrl = `https://api.stackexchange.com/2.3/questions/${questionId}?order=desc&sort=votes&site=${site}&filter=withbody`;
|
|
1222
|
+
const qResult = await loadPage(apiUrl, { timeout });
|
|
1223
|
+
|
|
1224
|
+
if (!qResult.ok) return null;
|
|
1225
|
+
|
|
1226
|
+
const qData = JSON.parse(qResult.content) as { items: SOQuestion[] };
|
|
1227
|
+
if (!qData.items?.length) return null;
|
|
1228
|
+
|
|
1229
|
+
const question = qData.items[0];
|
|
1230
|
+
|
|
1231
|
+
let md = `# ${question.title}\n\n`;
|
|
1232
|
+
md += `**Score:** ${question.score} · **Answers:** ${question.answer_count}`;
|
|
1233
|
+
md += question.is_answered ? " (Answered)" : "";
|
|
1234
|
+
md += `\n**Tags:** ${question.tags.join(", ")}\n`;
|
|
1235
|
+
md += `**Asked by:** ${question.owner.display_name} · ${
|
|
1236
|
+
new Date(question.creation_date * 1000).toISOString().split("T")[0]
|
|
1237
|
+
}\n\n`;
|
|
1238
|
+
md += `---\n\n## Question\n\n${htmlToBasicMarkdown(question.body)}\n\n`;
|
|
1239
|
+
|
|
1240
|
+
// Fetch answers
|
|
1241
|
+
const aUrl = `https://api.stackexchange.com/2.3/questions/${questionId}/answers?order=desc&sort=votes&site=${site}&filter=withbody`;
|
|
1242
|
+
const aResult = await loadPage(aUrl, { timeout });
|
|
1243
|
+
|
|
1244
|
+
if (aResult.ok) {
|
|
1245
|
+
const aData = JSON.parse(aResult.content) as { items: SOAnswer[] };
|
|
1246
|
+
if (aData.items?.length) {
|
|
1247
|
+
md += `---\n\n## Answers\n\n`;
|
|
1248
|
+
for (const answer of aData.items.slice(0, 5)) {
|
|
1249
|
+
const accepted = answer.is_accepted ? " (Accepted)" : "";
|
|
1250
|
+
md += `### Score: ${answer.score}${accepted} · by ${answer.owner.display_name}\n\n`;
|
|
1251
|
+
md += `${htmlToBasicMarkdown(answer.body)}\n\n---\n\n`;
|
|
1252
|
+
}
|
|
1253
|
+
}
|
|
1254
|
+
}
|
|
1255
|
+
|
|
1256
|
+
const output = finalizeOutput(md);
|
|
1257
|
+
return {
|
|
1258
|
+
url,
|
|
1259
|
+
finalUrl: url,
|
|
1260
|
+
contentType: "text/markdown",
|
|
1261
|
+
method: "stackoverflow",
|
|
1262
|
+
content: output.content,
|
|
1263
|
+
fetchedAt,
|
|
1264
|
+
truncated: output.truncated,
|
|
1265
|
+
notes: ["Fetched via Stack Exchange API"],
|
|
1266
|
+
};
|
|
1267
|
+
} catch {}
|
|
1268
|
+
|
|
1269
|
+
return null;
|
|
1270
|
+
}
|
|
1271
|
+
|
|
1272
|
+
// =============================================================================
|
|
1273
|
+
// Wikipedia Special Handling
|
|
1274
|
+
// =============================================================================
|
|
1275
|
+
|
|
1276
|
+
/**
|
|
1277
|
+
* Handle Wikipedia URLs via API
|
|
1278
|
+
*/
|
|
1279
|
+
async function handleWikipedia(url: string, timeout: number): Promise<RenderResult | null> {
|
|
1280
|
+
try {
|
|
1281
|
+
const parsed = new URL(url);
|
|
1282
|
+
// Match *.wikipedia.org
|
|
1283
|
+
const wikiMatch = parsed.hostname.match(/^(\w+)\.wikipedia\.org$/);
|
|
1284
|
+
if (!wikiMatch) return null;
|
|
1285
|
+
|
|
1286
|
+
const lang = wikiMatch[1];
|
|
1287
|
+
const titleMatch = parsed.pathname.match(/\/wiki\/(.+)/);
|
|
1288
|
+
if (!titleMatch) return null;
|
|
1289
|
+
|
|
1290
|
+
const title = decodeURIComponent(titleMatch[1]);
|
|
1291
|
+
const fetchedAt = new Date().toISOString();
|
|
1292
|
+
|
|
1293
|
+
// Use Wikipedia API to get plain text extract
|
|
1294
|
+
const apiUrl = `https://${lang}.wikipedia.org/api/rest_v1/page/summary/${encodeURIComponent(title)}`;
|
|
1295
|
+
const summaryResult = await loadPage(apiUrl, { timeout });
|
|
1296
|
+
|
|
1297
|
+
let md = "";
|
|
1298
|
+
|
|
1299
|
+
if (summaryResult.ok) {
|
|
1300
|
+
const summary = JSON.parse(summaryResult.content) as {
|
|
1301
|
+
title: string;
|
|
1302
|
+
description?: string;
|
|
1303
|
+
extract: string;
|
|
1304
|
+
};
|
|
1305
|
+
md = `# ${summary.title}\n\n`;
|
|
1306
|
+
if (summary.description) md += `*${summary.description}*\n\n`;
|
|
1307
|
+
md += `${summary.extract}\n\n---\n\n`;
|
|
1308
|
+
}
|
|
1309
|
+
|
|
1310
|
+
// Get full article content via mobile-html or parse API
|
|
1311
|
+
const contentUrl = `https://${lang}.wikipedia.org/api/rest_v1/page/mobile-html/${encodeURIComponent(title)}`;
|
|
1312
|
+
const contentResult = await loadPage(contentUrl, { timeout });
|
|
1313
|
+
|
|
1314
|
+
if (contentResult.ok) {
|
|
1315
|
+
const doc = parseHtml(contentResult.content);
|
|
1316
|
+
|
|
1317
|
+
// Extract main content sections
|
|
1318
|
+
const sections = doc.querySelectorAll("section");
|
|
1319
|
+
for (const section of sections) {
|
|
1320
|
+
const heading = section.querySelector("h2, h3, h4");
|
|
1321
|
+
const headingText = heading?.text?.trim();
|
|
1322
|
+
|
|
1323
|
+
// Skip certain sections
|
|
1324
|
+
if (
|
|
1325
|
+
headingText &&
|
|
1326
|
+
["References", "External links", "See also", "Notes", "Further reading"].includes(headingText)
|
|
1327
|
+
) {
|
|
1328
|
+
continue;
|
|
1329
|
+
}
|
|
1330
|
+
|
|
1331
|
+
if (headingText) {
|
|
1332
|
+
const level = heading?.tagName === "H2" ? "##" : "###";
|
|
1333
|
+
md += `${level} ${headingText}\n\n`;
|
|
1334
|
+
}
|
|
1335
|
+
|
|
1336
|
+
const paragraphs = section.querySelectorAll("p");
|
|
1337
|
+
for (const p of paragraphs) {
|
|
1338
|
+
const text = p.text?.trim();
|
|
1339
|
+
if (text && text.length > 20) {
|
|
1340
|
+
md += `${text}\n\n`;
|
|
1341
|
+
}
|
|
1342
|
+
}
|
|
1343
|
+
}
|
|
1344
|
+
}
|
|
1345
|
+
|
|
1346
|
+
if (!md) return null;
|
|
1347
|
+
|
|
1348
|
+
const output = finalizeOutput(md);
|
|
1349
|
+
return {
|
|
1350
|
+
url,
|
|
1351
|
+
finalUrl: url,
|
|
1352
|
+
contentType: "text/markdown",
|
|
1353
|
+
method: "wikipedia",
|
|
1354
|
+
content: output.content,
|
|
1355
|
+
fetchedAt,
|
|
1356
|
+
truncated: output.truncated,
|
|
1357
|
+
notes: ["Fetched via Wikipedia API"],
|
|
1358
|
+
};
|
|
1359
|
+
} catch {}
|
|
1360
|
+
|
|
1361
|
+
return null;
|
|
1362
|
+
}
|
|
1363
|
+
|
|
1364
|
+
// =============================================================================
|
|
1365
|
+
// Reddit Special Handling
|
|
1366
|
+
// =============================================================================
|
|
1367
|
+
|
|
1368
|
+
interface RedditPost {
|
|
1369
|
+
title: string;
|
|
1370
|
+
selftext: string;
|
|
1371
|
+
author: string;
|
|
1372
|
+
score: number;
|
|
1373
|
+
num_comments: number;
|
|
1374
|
+
created_utc: number;
|
|
1375
|
+
subreddit: string;
|
|
1376
|
+
url: string;
|
|
1377
|
+
is_self: boolean;
|
|
1378
|
+
}
|
|
1379
|
+
|
|
1380
|
+
interface RedditComment {
|
|
1381
|
+
body: string;
|
|
1382
|
+
author: string;
|
|
1383
|
+
score: number;
|
|
1384
|
+
created_utc: number;
|
|
1385
|
+
replies?: { data: { children: Array<{ data: RedditComment }> } };
|
|
1386
|
+
}
|
|
1387
|
+
|
|
1388
|
+
/**
|
|
1389
|
+
* Handle Reddit URLs via JSON API
|
|
1390
|
+
*/
|
|
1391
|
+
async function handleReddit(url: string, timeout: number): Promise<RenderResult | null> {
|
|
1392
|
+
try {
|
|
1393
|
+
const parsed = new URL(url);
|
|
1394
|
+
if (!parsed.hostname.includes("reddit.com")) return null;
|
|
1395
|
+
|
|
1396
|
+
const fetchedAt = new Date().toISOString();
|
|
1397
|
+
|
|
1398
|
+
// Append .json to get JSON response
|
|
1399
|
+
let jsonUrl = `${url.replace(/\/$/, "")}.json`;
|
|
1400
|
+
if (parsed.search) {
|
|
1401
|
+
jsonUrl = `${url.replace(/\/$/, "").replace(parsed.search, "")}.json${parsed.search}`;
|
|
1402
|
+
}
|
|
1403
|
+
|
|
1404
|
+
const result = await loadPage(jsonUrl, { timeout });
|
|
1405
|
+
if (!result.ok) return null;
|
|
1406
|
+
|
|
1407
|
+
const data = JSON.parse(result.content);
|
|
1408
|
+
let md = "";
|
|
1409
|
+
|
|
1410
|
+
// Handle different Reddit URL types
|
|
1411
|
+
if (Array.isArray(data) && data.length >= 1) {
|
|
1412
|
+
// Post page (with comments)
|
|
1413
|
+
const postData = data[0]?.data?.children?.[0]?.data as RedditPost | undefined;
|
|
1414
|
+
if (postData) {
|
|
1415
|
+
md = `# ${postData.title}\n\n`;
|
|
1416
|
+
md += `**r/${postData.subreddit}** · u/${postData.author} · ${postData.score} points · ${postData.num_comments} comments\n`;
|
|
1417
|
+
md += `*${new Date(postData.created_utc * 1000).toISOString().split("T")[0]}*\n\n`;
|
|
1418
|
+
|
|
1419
|
+
if (postData.is_self && postData.selftext) {
|
|
1420
|
+
md += `---\n\n${postData.selftext}\n\n`;
|
|
1421
|
+
} else if (!postData.is_self) {
|
|
1422
|
+
md += `**Link:** ${postData.url}\n\n`;
|
|
1423
|
+
}
|
|
1424
|
+
|
|
1425
|
+
// Add comments if available
|
|
1426
|
+
if (data.length >= 2 && data[1]?.data?.children) {
|
|
1427
|
+
md += `---\n\n## Top Comments\n\n`;
|
|
1428
|
+
const comments = data[1].data.children.filter((c: { kind: string }) => c.kind === "t1").slice(0, 10);
|
|
1429
|
+
|
|
1430
|
+
for (const { data: comment } of comments as Array<{ data: RedditComment }>) {
|
|
1431
|
+
md += `### u/${comment.author} · ${comment.score} points\n\n`;
|
|
1432
|
+
md += `${comment.body}\n\n---\n\n`;
|
|
1433
|
+
}
|
|
1434
|
+
}
|
|
1435
|
+
}
|
|
1436
|
+
} else if (data?.data?.children) {
|
|
1437
|
+
// Subreddit or listing page
|
|
1438
|
+
const posts = data.data.children.slice(0, 20) as Array<{ data: RedditPost }>;
|
|
1439
|
+
const subreddit = posts[0]?.data?.subreddit;
|
|
1440
|
+
|
|
1441
|
+
md = `# r/${subreddit || "Reddit"}\n\n`;
|
|
1442
|
+
for (const { data: post } of posts) {
|
|
1443
|
+
md += `- **${post.title}** (${post.score} pts, ${post.num_comments} comments)\n`;
|
|
1444
|
+
md += ` by u/${post.author}\n\n`;
|
|
1445
|
+
}
|
|
1446
|
+
}
|
|
1447
|
+
|
|
1448
|
+
if (!md) return null;
|
|
1449
|
+
|
|
1450
|
+
const output = finalizeOutput(md);
|
|
1451
|
+
return {
|
|
1452
|
+
url,
|
|
1453
|
+
finalUrl: url,
|
|
1454
|
+
contentType: "text/markdown",
|
|
1455
|
+
method: "reddit",
|
|
1456
|
+
content: output.content,
|
|
1457
|
+
fetchedAt,
|
|
1458
|
+
truncated: output.truncated,
|
|
1459
|
+
notes: ["Fetched via Reddit JSON API"],
|
|
1460
|
+
};
|
|
1461
|
+
} catch {}
|
|
1462
|
+
|
|
1463
|
+
return null;
|
|
1464
|
+
}
|
|
1465
|
+
|
|
1466
|
+
// =============================================================================
|
|
1467
|
+
// NPM Special Handling
|
|
1468
|
+
// =============================================================================
|
|
1469
|
+
|
|
1470
|
+
/**
|
|
1471
|
+
* Handle NPM URLs via registry API
|
|
1472
|
+
*/
|
|
1473
|
+
async function handleNpm(url: string, timeout: number): Promise<RenderResult | null> {
|
|
1474
|
+
try {
|
|
1475
|
+
const parsed = new URL(url);
|
|
1476
|
+
if (parsed.hostname !== "www.npmjs.com" && parsed.hostname !== "npmjs.com") return null;
|
|
1477
|
+
|
|
1478
|
+
// Extract package name from /package/[scope/]name
|
|
1479
|
+
const match = parsed.pathname.match(/^\/package\/(.+?)(?:\/|$)/);
|
|
1480
|
+
if (!match) return null;
|
|
1481
|
+
|
|
1482
|
+
let packageName = decodeURIComponent(match[1]);
|
|
1483
|
+
// Handle scoped packages: /package/@scope/name
|
|
1484
|
+
if (packageName.startsWith("@")) {
|
|
1485
|
+
const scopeMatch = parsed.pathname.match(/^\/package\/(@[^/]+\/[^/]+)/);
|
|
1486
|
+
if (scopeMatch) packageName = decodeURIComponent(scopeMatch[1]);
|
|
1487
|
+
}
|
|
1488
|
+
|
|
1489
|
+
const fetchedAt = new Date().toISOString();
|
|
1490
|
+
|
|
1491
|
+
// Fetch from npm registry - use /latest endpoint for smaller response
|
|
1492
|
+
const latestUrl = `https://registry.npmjs.org/${packageName}/latest`;
|
|
1493
|
+
const result = await loadPage(latestUrl, { timeout });
|
|
1494
|
+
|
|
1495
|
+
if (!result.ok) return null;
|
|
1496
|
+
|
|
1497
|
+
let pkg: {
|
|
1498
|
+
name: string;
|
|
1499
|
+
version: string;
|
|
1500
|
+
description?: string;
|
|
1501
|
+
license?: string;
|
|
1502
|
+
homepage?: string;
|
|
1503
|
+
repository?: { url: string } | string;
|
|
1504
|
+
keywords?: string[];
|
|
1505
|
+
maintainers?: Array<{ name: string }>;
|
|
1506
|
+
dependencies?: Record<string, string>;
|
|
1507
|
+
readme?: string;
|
|
1508
|
+
};
|
|
1509
|
+
|
|
1510
|
+
try {
|
|
1511
|
+
pkg = JSON.parse(result.content);
|
|
1512
|
+
} catch {
|
|
1513
|
+
return null; // JSON parse failed (truncated response)
|
|
1514
|
+
}
|
|
1515
|
+
|
|
1516
|
+
let md = `# ${pkg.name}\n\n`;
|
|
1517
|
+
if (pkg.description) md += `${pkg.description}\n\n`;
|
|
1518
|
+
|
|
1519
|
+
md += `**Latest:** ${pkg.version || "unknown"}`;
|
|
1520
|
+
if (pkg.license) md += ` · **License:** ${typeof pkg.license === "string" ? pkg.license : pkg.license}`;
|
|
1521
|
+
md += "\n\n";
|
|
1522
|
+
|
|
1523
|
+
if (pkg.homepage) md += `**Homepage:** ${pkg.homepage}\n`;
|
|
1524
|
+
const repoUrl = typeof pkg.repository === "string" ? pkg.repository : pkg.repository?.url;
|
|
1525
|
+
if (repoUrl) md += `**Repository:** ${repoUrl.replace(/^git\+/, "").replace(/\.git$/, "")}\n`;
|
|
1526
|
+
if (pkg.keywords?.length) md += `**Keywords:** ${pkg.keywords.join(", ")}\n`;
|
|
1527
|
+
if (pkg.maintainers?.length) md += `**Maintainers:** ${pkg.maintainers.map((m) => m.name).join(", ")}\n`;
|
|
1528
|
+
|
|
1529
|
+
if (pkg.dependencies && Object.keys(pkg.dependencies).length > 0) {
|
|
1530
|
+
md += `\n## Dependencies\n\n`;
|
|
1531
|
+
for (const [dep, version] of Object.entries(pkg.dependencies)) {
|
|
1532
|
+
md += `- ${dep}: ${version}\n`;
|
|
1533
|
+
}
|
|
1534
|
+
}
|
|
1535
|
+
|
|
1536
|
+
if (pkg.readme) {
|
|
1537
|
+
md += `\n---\n\n## README\n\n${pkg.readme}\n`;
|
|
1538
|
+
}
|
|
1539
|
+
|
|
1540
|
+
const output = finalizeOutput(md);
|
|
1541
|
+
return {
|
|
1542
|
+
url,
|
|
1543
|
+
finalUrl: url,
|
|
1544
|
+
contentType: "text/markdown",
|
|
1545
|
+
method: "npm",
|
|
1546
|
+
content: output.content,
|
|
1547
|
+
fetchedAt,
|
|
1548
|
+
truncated: output.truncated,
|
|
1549
|
+
notes: ["Fetched via npm registry"],
|
|
1550
|
+
};
|
|
1551
|
+
} catch {}
|
|
1552
|
+
|
|
1553
|
+
return null;
|
|
1554
|
+
}
|
|
1555
|
+
|
|
1556
|
+
// =============================================================================
|
|
1557
|
+
// arXiv Special Handling
|
|
1558
|
+
// =============================================================================
|
|
1559
|
+
|
|
1560
|
+
/**
|
|
1561
|
+
* Handle arXiv URLs - fetch abstract + optionally PDF
|
|
1562
|
+
*/
|
|
1563
|
+
async function handleArxiv(url: string, timeout: number): Promise<RenderResult | null> {
|
|
1564
|
+
try {
|
|
1565
|
+
const parsed = new URL(url);
|
|
1566
|
+
if (parsed.hostname !== "arxiv.org") return null;
|
|
1567
|
+
|
|
1568
|
+
// Extract paper ID from various URL formats
|
|
1569
|
+
// /abs/1234.56789, /pdf/1234.56789, /abs/cs/0123456
|
|
1570
|
+
const match = parsed.pathname.match(/\/(abs|pdf)\/(.+?)(?:\.pdf)?$/);
|
|
1571
|
+
if (!match) return null;
|
|
1572
|
+
|
|
1573
|
+
const paperId = match[2];
|
|
1574
|
+
const fetchedAt = new Date().toISOString();
|
|
1575
|
+
const notes: string[] = [];
|
|
1576
|
+
|
|
1577
|
+
// Fetch metadata via arXiv API
|
|
1578
|
+
const apiUrl = `https://export.arxiv.org/api/query?id_list=${paperId}`;
|
|
1579
|
+
const result = await loadPage(apiUrl, { timeout });
|
|
1580
|
+
|
|
1581
|
+
if (!result.ok) return null;
|
|
1582
|
+
|
|
1583
|
+
// Parse the Atom feed response
|
|
1584
|
+
const doc = parseHtml(result.content, { parseNoneClosedTags: true });
|
|
1585
|
+
const entry = doc.querySelector("entry");
|
|
1586
|
+
|
|
1587
|
+
if (!entry) return null;
|
|
1588
|
+
|
|
1589
|
+
const title = entry.querySelector("title")?.text?.trim()?.replace(/\s+/g, " ");
|
|
1590
|
+
const summary = entry.querySelector("summary")?.text?.trim();
|
|
1591
|
+
const authors = entry
|
|
1592
|
+
.querySelectorAll("author name")
|
|
1593
|
+
.map((n) => n.text?.trim())
|
|
1594
|
+
.filter(Boolean);
|
|
1595
|
+
const published = entry.querySelector("published")?.text?.trim()?.split("T")[0];
|
|
1596
|
+
const categories = entry
|
|
1597
|
+
.querySelectorAll("category")
|
|
1598
|
+
.map((c) => c.getAttribute("term"))
|
|
1599
|
+
.filter(Boolean);
|
|
1600
|
+
const pdfLink = entry.querySelector('link[title="pdf"]')?.getAttribute("href");
|
|
1601
|
+
|
|
1602
|
+
let md = `# ${title || "arXiv Paper"}\n\n`;
|
|
1603
|
+
if (authors.length) md += `**Authors:** ${authors.join(", ")}\n`;
|
|
1604
|
+
if (published) md += `**Published:** ${published}\n`;
|
|
1605
|
+
if (categories.length) md += `**Categories:** ${categories.join(", ")}\n`;
|
|
1606
|
+
md += `**arXiv:** ${paperId}\n\n`;
|
|
1607
|
+
md += `---\n\n## Abstract\n\n${summary || "No abstract available."}\n\n`;
|
|
1608
|
+
|
|
1609
|
+
// If it was a PDF link or we want full content, try to fetch and convert PDF
|
|
1610
|
+
if (match[1] === "pdf" || parsed.pathname.includes(".pdf")) {
|
|
1611
|
+
if (pdfLink) {
|
|
1612
|
+
notes.push("Fetching PDF for full content...");
|
|
1613
|
+
const pdfResult = await fetchBinary(pdfLink, timeout);
|
|
1614
|
+
if (pdfResult.ok) {
|
|
1615
|
+
const converted = convertWithMarkitdown(pdfResult.buffer, ".pdf", timeout);
|
|
1616
|
+
if (converted.ok && converted.content.length > 500) {
|
|
1617
|
+
md += `---\n\n## Full Paper\n\n${converted.content}\n`;
|
|
1618
|
+
notes.push("PDF converted via markitdown");
|
|
1619
|
+
}
|
|
1620
|
+
}
|
|
1621
|
+
}
|
|
1622
|
+
}
|
|
1623
|
+
|
|
1624
|
+
const output = finalizeOutput(md);
|
|
1625
|
+
return {
|
|
1626
|
+
url,
|
|
1627
|
+
finalUrl: url,
|
|
1628
|
+
contentType: "text/markdown",
|
|
1629
|
+
method: "arxiv",
|
|
1630
|
+
content: output.content,
|
|
1631
|
+
fetchedAt,
|
|
1632
|
+
truncated: output.truncated,
|
|
1633
|
+
notes: notes.length ? notes : ["Fetched via arXiv API"],
|
|
1634
|
+
};
|
|
1635
|
+
} catch {}
|
|
1636
|
+
|
|
1637
|
+
return null;
|
|
1638
|
+
}
|
|
1639
|
+
|
|
1640
|
+
// =============================================================================
|
|
1641
|
+
// IACR ePrint Special Handling
|
|
1642
|
+
// =============================================================================
|
|
1643
|
+
|
|
1644
|
+
/**
|
|
1645
|
+
* Handle IACR Cryptology ePrint Archive URLs
|
|
1646
|
+
*/
|
|
1647
|
+
async function handleIacr(url: string, timeout: number): Promise<RenderResult | null> {
|
|
1648
|
+
try {
|
|
1649
|
+
const parsed = new URL(url);
|
|
1650
|
+
if (parsed.hostname !== "eprint.iacr.org") return null;
|
|
1651
|
+
|
|
1652
|
+
// Extract paper ID from /year/number or /year/number.pdf
|
|
1653
|
+
const match = parsed.pathname.match(/\/(\d{4})\/(\d+)(?:\.pdf)?$/);
|
|
1654
|
+
if (!match) return null;
|
|
1655
|
+
|
|
1656
|
+
const [, year, number] = match;
|
|
1657
|
+
const paperId = `${year}/${number}`;
|
|
1658
|
+
const fetchedAt = new Date().toISOString();
|
|
1659
|
+
const notes: string[] = [];
|
|
1660
|
+
|
|
1661
|
+
// Fetch the HTML page for metadata
|
|
1662
|
+
const pageUrl = `https://eprint.iacr.org/${paperId}`;
|
|
1663
|
+
const result = await loadPage(pageUrl, { timeout });
|
|
1664
|
+
|
|
1665
|
+
if (!result.ok) return null;
|
|
1666
|
+
|
|
1667
|
+
const doc = parseHtml(result.content);
|
|
1668
|
+
|
|
1669
|
+
// Extract metadata from the page
|
|
1670
|
+
const title =
|
|
1671
|
+
doc.querySelector("h3.mb-3")?.text?.trim() ||
|
|
1672
|
+
doc.querySelector('meta[name="citation_title"]')?.getAttribute("content");
|
|
1673
|
+
const authors = doc
|
|
1674
|
+
.querySelectorAll('meta[name="citation_author"]')
|
|
1675
|
+
.map((m) => m.getAttribute("content"))
|
|
1676
|
+
.filter(Boolean);
|
|
1677
|
+
// Abstract is in <p> after <h5>Abstract</h5>
|
|
1678
|
+
const abstractHeading = doc.querySelectorAll("h5").find((h) => h.text?.includes("Abstract"));
|
|
1679
|
+
const abstract =
|
|
1680
|
+
abstractHeading?.parentNode?.querySelector("p")?.text?.trim() ||
|
|
1681
|
+
doc.querySelector('meta[name="description"]')?.getAttribute("content");
|
|
1682
|
+
const keywords = doc.querySelector(".keywords")?.text?.replace("Keywords:", "").trim();
|
|
1683
|
+
const pubDate = doc.querySelector('meta[name="citation_publication_date"]')?.getAttribute("content");
|
|
1684
|
+
|
|
1685
|
+
let md = `# ${title || "IACR ePrint Paper"}\n\n`;
|
|
1686
|
+
if (authors.length) md += `**Authors:** ${authors.join(", ")}\n`;
|
|
1687
|
+
if (pubDate) md += `**Date:** ${pubDate}\n`;
|
|
1688
|
+
md += `**ePrint:** ${paperId}\n`;
|
|
1689
|
+
if (keywords) md += `**Keywords:** ${keywords}\n`;
|
|
1690
|
+
md += `\n---\n\n## Abstract\n\n${abstract || "No abstract available."}\n\n`;
|
|
1691
|
+
|
|
1692
|
+
// If it was a PDF link, try to fetch and convert PDF
|
|
1693
|
+
if (parsed.pathname.endsWith(".pdf")) {
|
|
1694
|
+
const pdfUrl = `https://eprint.iacr.org/${paperId}.pdf`;
|
|
1695
|
+
notes.push("Fetching PDF for full content...");
|
|
1696
|
+
const pdfResult = await fetchBinary(pdfUrl, timeout);
|
|
1697
|
+
if (pdfResult.ok) {
|
|
1698
|
+
const converted = convertWithMarkitdown(pdfResult.buffer, ".pdf", timeout);
|
|
1699
|
+
if (converted.ok && converted.content.length > 500) {
|
|
1700
|
+
md += `---\n\n## Full Paper\n\n${converted.content}\n`;
|
|
1701
|
+
notes.push("PDF converted via markitdown");
|
|
1702
|
+
}
|
|
1703
|
+
}
|
|
1704
|
+
}
|
|
1705
|
+
|
|
1706
|
+
const output = finalizeOutput(md);
|
|
1707
|
+
return {
|
|
1708
|
+
url,
|
|
1709
|
+
finalUrl: url,
|
|
1710
|
+
contentType: "text/markdown",
|
|
1711
|
+
method: "iacr",
|
|
1712
|
+
content: output.content,
|
|
1713
|
+
fetchedAt,
|
|
1714
|
+
truncated: output.truncated,
|
|
1715
|
+
notes: notes.length ? notes : ["Fetched from IACR ePrint Archive"],
|
|
1716
|
+
};
|
|
1717
|
+
} catch {}
|
|
1718
|
+
|
|
1719
|
+
return null;
|
|
1720
|
+
}
|
|
1721
|
+
|
|
1722
|
+
// =============================================================================
|
|
1723
|
+
// GitHub Gist Special Handling
|
|
1724
|
+
// =============================================================================
|
|
1725
|
+
|
|
1726
|
+
/**
|
|
1727
|
+
* Handle GitHub Gist URLs via API
|
|
1728
|
+
*/
|
|
1729
|
+
async function handleGitHubGist(url: string, timeout: number): Promise<RenderResult | null> {
|
|
1730
|
+
try {
|
|
1731
|
+
const parsed = new URL(url);
|
|
1732
|
+
if (parsed.hostname !== "gist.github.com") return null;
|
|
1733
|
+
|
|
1734
|
+
// Extract gist ID from /username/gistId or just /gistId
|
|
1735
|
+
const parts = parsed.pathname.split("/").filter(Boolean);
|
|
1736
|
+
if (parts.length === 0) return null;
|
|
1737
|
+
|
|
1738
|
+
// Gist ID is always the last path segment (or only segment for anonymous gists)
|
|
1739
|
+
const gistId = parts[parts.length - 1];
|
|
1740
|
+
if (!gistId || !/^[a-f0-9]+$/i.test(gistId)) return null;
|
|
1741
|
+
|
|
1742
|
+
const fetchedAt = new Date().toISOString();
|
|
1743
|
+
|
|
1744
|
+
// Fetch via GitHub API
|
|
1745
|
+
const result = await fetchGitHubApi(`/gists/${gistId}`, timeout);
|
|
1746
|
+
if (!result.ok || !result.data) return null;
|
|
1747
|
+
|
|
1748
|
+
const gist = result.data as {
|
|
1749
|
+
description: string | null;
|
|
1750
|
+
owner?: { login: string };
|
|
1751
|
+
created_at: string;
|
|
1752
|
+
updated_at: string;
|
|
1753
|
+
files: Record<string, { filename: string; language: string | null; size: number; content: string }>;
|
|
1754
|
+
html_url: string;
|
|
1755
|
+
};
|
|
1756
|
+
|
|
1757
|
+
const files = Object.values(gist.files);
|
|
1758
|
+
const owner = gist.owner?.login || "anonymous";
|
|
1759
|
+
|
|
1760
|
+
let md = `# Gist by ${owner}\n\n`;
|
|
1761
|
+
if (gist.description) md += `${gist.description}\n\n`;
|
|
1762
|
+
md += `**Created:** ${gist.created_at} · **Updated:** ${gist.updated_at}\n`;
|
|
1763
|
+
md += `**Files:** ${files.length}\n\n`;
|
|
1764
|
+
|
|
1765
|
+
for (const file of files) {
|
|
1766
|
+
const lang = file.language?.toLowerCase() || "";
|
|
1767
|
+
md += `---\n\n## ${file.filename}\n\n`;
|
|
1768
|
+
md += `\`\`\`${lang}\n${file.content}\n\`\`\`\n\n`;
|
|
1769
|
+
}
|
|
1770
|
+
|
|
1771
|
+
const output = finalizeOutput(md);
|
|
1772
|
+
return {
|
|
1773
|
+
url,
|
|
1774
|
+
finalUrl: url,
|
|
1775
|
+
contentType: "text/markdown",
|
|
1776
|
+
method: "github-gist",
|
|
1777
|
+
content: output.content,
|
|
1778
|
+
fetchedAt,
|
|
1779
|
+
truncated: output.truncated,
|
|
1780
|
+
notes: ["Fetched via GitHub API"],
|
|
1781
|
+
};
|
|
1782
|
+
} catch {}
|
|
1783
|
+
|
|
1784
|
+
return null;
|
|
1785
|
+
}
|
|
1786
|
+
|
|
1787
|
+
// =============================================================================
|
|
1788
|
+
// Unified Special Handler Dispatch
|
|
1789
|
+
// =============================================================================
|
|
1790
|
+
|
|
1791
|
+
/**
|
|
1792
|
+
* Try all special handlers
|
|
1793
|
+
*/
|
|
1794
|
+
async function handleSpecialUrls(url: string, timeout: number): Promise<RenderResult | null> {
|
|
1795
|
+
// Order matters - more specific first
|
|
1796
|
+
return (
|
|
1797
|
+
(await handleGitHubGist(url, timeout)) ||
|
|
1798
|
+
(await handleGitHub(url, timeout)) ||
|
|
1799
|
+
(await handleTwitter(url, timeout)) ||
|
|
1800
|
+
(await handleStackOverflow(url, timeout)) ||
|
|
1801
|
+
(await handleWikipedia(url, timeout)) ||
|
|
1802
|
+
(await handleReddit(url, timeout)) ||
|
|
1803
|
+
(await handleNpm(url, timeout)) ||
|
|
1804
|
+
(await handleArxiv(url, timeout)) ||
|
|
1805
|
+
(await handleIacr(url, timeout))
|
|
1806
|
+
);
|
|
1807
|
+
}
|
|
1808
|
+
|
|
1809
|
+
// =============================================================================
|
|
1810
|
+
// Main Render Function
|
|
1811
|
+
// =============================================================================
|
|
1812
|
+
|
|
1813
|
+
/**
|
|
1814
|
+
* Main render function implementing the full pipeline
|
|
1815
|
+
*/
|
|
1816
|
+
async function renderUrl(url: string, timeout: number, raw: boolean = false): Promise<RenderResult> {
|
|
1817
|
+
const notes: string[] = [];
|
|
1818
|
+
const fetchedAt = new Date().toISOString();
|
|
1819
|
+
|
|
1820
|
+
// Step 0: Try special handlers for known sites (unless raw mode)
|
|
1821
|
+
if (!raw) {
|
|
1822
|
+
const specialResult = await handleSpecialUrls(url, timeout);
|
|
1823
|
+
if (specialResult) return specialResult;
|
|
1824
|
+
}
|
|
1825
|
+
|
|
1826
|
+
// Step 1: Normalize URL
|
|
1827
|
+
url = normalizeUrl(url);
|
|
1828
|
+
const origin = getOrigin(url);
|
|
1829
|
+
|
|
1830
|
+
// Step 2: Fetch page
|
|
1831
|
+
const response = await loadPage(url, { timeout });
|
|
1832
|
+
if (!response.ok) {
|
|
1833
|
+
return {
|
|
1834
|
+
url,
|
|
1835
|
+
finalUrl: url,
|
|
1836
|
+
contentType: "unknown",
|
|
1837
|
+
method: "failed",
|
|
1838
|
+
content: "",
|
|
1839
|
+
fetchedAt,
|
|
1840
|
+
truncated: false,
|
|
1841
|
+
notes: ["Failed to fetch URL"],
|
|
1842
|
+
};
|
|
1843
|
+
}
|
|
1844
|
+
|
|
1845
|
+
const { finalUrl, content: rawContent } = response;
|
|
1846
|
+
const mime = normalizeMime(response.contentType);
|
|
1847
|
+
const extHint = getExtensionHint(finalUrl);
|
|
1848
|
+
|
|
1849
|
+
// Step 3: Handle convertible binary files (PDF, DOCX, etc.)
|
|
1850
|
+
if (isConvertible(mime, extHint)) {
|
|
1851
|
+
const binary = await fetchBinary(finalUrl, timeout);
|
|
1852
|
+
if (binary.ok) {
|
|
1853
|
+
const ext = getExtensionHint(finalUrl, binary.contentDisposition) || extHint;
|
|
1854
|
+
const converted = convertWithMarkitdown(binary.buffer, ext, timeout);
|
|
1855
|
+
if (converted.ok && converted.content.trim().length > 50) {
|
|
1856
|
+
notes.push(`Converted with markitdown`);
|
|
1857
|
+
const output = finalizeOutput(converted.content);
|
|
1858
|
+
return {
|
|
1859
|
+
url,
|
|
1860
|
+
finalUrl,
|
|
1861
|
+
contentType: mime,
|
|
1862
|
+
method: "markitdown",
|
|
1863
|
+
content: output.content,
|
|
1864
|
+
fetchedAt,
|
|
1865
|
+
truncated: output.truncated,
|
|
1866
|
+
notes,
|
|
1867
|
+
};
|
|
1868
|
+
}
|
|
1869
|
+
}
|
|
1870
|
+
notes.push("markitdown conversion failed");
|
|
1871
|
+
}
|
|
1872
|
+
|
|
1873
|
+
// Step 4: Handle non-HTML text content
|
|
1874
|
+
const isHtml = mime.includes("html") || mime.includes("xhtml");
|
|
1875
|
+
const isJson = mime.includes("json");
|
|
1876
|
+
const isXml = mime.includes("xml") && !isHtml;
|
|
1877
|
+
const isText = mime.includes("text/plain") || mime.includes("text/markdown");
|
|
1878
|
+
const isFeed = mime.includes("rss") || mime.includes("atom") || mime.includes("feed");
|
|
1879
|
+
|
|
1880
|
+
if (isJson) {
|
|
1881
|
+
const output = finalizeOutput(formatJson(rawContent));
|
|
1882
|
+
return {
|
|
1883
|
+
url,
|
|
1884
|
+
finalUrl,
|
|
1885
|
+
contentType: mime,
|
|
1886
|
+
method: "json",
|
|
1887
|
+
content: output.content,
|
|
1888
|
+
fetchedAt,
|
|
1889
|
+
truncated: output.truncated,
|
|
1890
|
+
notes,
|
|
1891
|
+
};
|
|
1892
|
+
}
|
|
1893
|
+
|
|
1894
|
+
if (isFeed || (isXml && (rawContent.includes("<rss") || rawContent.includes("<feed")))) {
|
|
1895
|
+
const parsed = parseFeedToMarkdown(rawContent);
|
|
1896
|
+
const output = finalizeOutput(parsed);
|
|
1897
|
+
return {
|
|
1898
|
+
url,
|
|
1899
|
+
finalUrl,
|
|
1900
|
+
contentType: mime,
|
|
1901
|
+
method: "feed",
|
|
1902
|
+
content: output.content,
|
|
1903
|
+
fetchedAt,
|
|
1904
|
+
truncated: output.truncated,
|
|
1905
|
+
notes,
|
|
1906
|
+
};
|
|
1907
|
+
}
|
|
1908
|
+
|
|
1909
|
+
if (isText && !looksLikeHtml(rawContent)) {
|
|
1910
|
+
const output = finalizeOutput(rawContent);
|
|
1911
|
+
return {
|
|
1912
|
+
url,
|
|
1913
|
+
finalUrl,
|
|
1914
|
+
contentType: mime,
|
|
1915
|
+
method: "text",
|
|
1916
|
+
content: output.content,
|
|
1917
|
+
fetchedAt,
|
|
1918
|
+
truncated: output.truncated,
|
|
1919
|
+
notes,
|
|
1920
|
+
};
|
|
1921
|
+
}
|
|
1922
|
+
|
|
1923
|
+
// Step 5: For HTML, try digestible formats first (unless raw mode)
|
|
1924
|
+
if (isHtml && !raw) {
|
|
1925
|
+
// 5A: Check for page-specific markdown alternate
|
|
1926
|
+
const alternates = parseAlternateLinks(rawContent, finalUrl);
|
|
1927
|
+
const markdownAlt = alternates.find((alt) => alt.endsWith(".md") || alt.includes("markdown"));
|
|
1928
|
+
if (markdownAlt) {
|
|
1929
|
+
const resolved = markdownAlt.startsWith("http") ? markdownAlt : new URL(markdownAlt, finalUrl).href;
|
|
1930
|
+
const altResult = await loadPage(resolved, { timeout });
|
|
1931
|
+
if (altResult.ok && altResult.content.trim().length > 100 && !looksLikeHtml(altResult.content)) {
|
|
1932
|
+
notes.push(`Used markdown alternate: ${resolved}`);
|
|
1933
|
+
const output = finalizeOutput(altResult.content);
|
|
1934
|
+
return {
|
|
1935
|
+
url,
|
|
1936
|
+
finalUrl,
|
|
1937
|
+
contentType: "text/markdown",
|
|
1938
|
+
method: "alternate-markdown",
|
|
1939
|
+
content: output.content,
|
|
1940
|
+
fetchedAt,
|
|
1941
|
+
truncated: output.truncated,
|
|
1942
|
+
notes,
|
|
1943
|
+
};
|
|
1944
|
+
}
|
|
1945
|
+
}
|
|
1946
|
+
|
|
1947
|
+
// 5B: Try URL.md suffix (llms.txt convention)
|
|
1948
|
+
const mdSuffix = await tryMdSuffix(finalUrl, timeout);
|
|
1949
|
+
if (mdSuffix) {
|
|
1950
|
+
notes.push("Found .md suffix version");
|
|
1951
|
+
const output = finalizeOutput(mdSuffix);
|
|
1952
|
+
return {
|
|
1953
|
+
url,
|
|
1954
|
+
finalUrl,
|
|
1955
|
+
contentType: "text/markdown",
|
|
1956
|
+
method: "md-suffix",
|
|
1957
|
+
content: output.content,
|
|
1958
|
+
fetchedAt,
|
|
1959
|
+
truncated: output.truncated,
|
|
1960
|
+
notes,
|
|
1961
|
+
};
|
|
1962
|
+
}
|
|
1963
|
+
|
|
1964
|
+
// 5C: LLM-friendly endpoints
|
|
1965
|
+
const llmContent = await tryLlmEndpoints(origin, timeout);
|
|
1966
|
+
if (llmContent) {
|
|
1967
|
+
notes.push("Found llms.txt");
|
|
1968
|
+
const output = finalizeOutput(llmContent);
|
|
1969
|
+
return {
|
|
1970
|
+
url,
|
|
1971
|
+
finalUrl,
|
|
1972
|
+
contentType: "text/plain",
|
|
1973
|
+
method: "llms.txt",
|
|
1974
|
+
content: output.content,
|
|
1975
|
+
fetchedAt,
|
|
1976
|
+
truncated: output.truncated,
|
|
1977
|
+
notes,
|
|
1978
|
+
};
|
|
1979
|
+
}
|
|
1980
|
+
|
|
1981
|
+
// 5D: Content negotiation
|
|
1982
|
+
const negotiated = await tryContentNegotiation(url, timeout);
|
|
1983
|
+
if (negotiated) {
|
|
1984
|
+
notes.push(`Content negotiation returned ${negotiated.type}`);
|
|
1985
|
+
const output = finalizeOutput(negotiated.content);
|
|
1986
|
+
return {
|
|
1987
|
+
url,
|
|
1988
|
+
finalUrl,
|
|
1989
|
+
contentType: normalizeMime(negotiated.type),
|
|
1990
|
+
method: "content-negotiation",
|
|
1991
|
+
content: output.content,
|
|
1992
|
+
fetchedAt,
|
|
1993
|
+
truncated: output.truncated,
|
|
1994
|
+
notes,
|
|
1995
|
+
};
|
|
1996
|
+
}
|
|
1997
|
+
|
|
1998
|
+
// 5E: Check for feed alternates
|
|
1999
|
+
const feedAlternates = alternates.filter((alt) => !alt.endsWith(".md") && !alt.includes("markdown"));
|
|
2000
|
+
for (const altUrl of feedAlternates.slice(0, 2)) {
|
|
2001
|
+
const resolved = altUrl.startsWith("http") ? altUrl : new URL(altUrl, finalUrl).href;
|
|
2002
|
+
const altResult = await loadPage(resolved, { timeout });
|
|
2003
|
+
if (altResult.ok && altResult.content.trim().length > 200) {
|
|
2004
|
+
notes.push(`Used feed alternate: ${resolved}`);
|
|
2005
|
+
const parsed = parseFeedToMarkdown(altResult.content);
|
|
2006
|
+
const output = finalizeOutput(parsed);
|
|
2007
|
+
return {
|
|
2008
|
+
url,
|
|
2009
|
+
finalUrl,
|
|
2010
|
+
contentType: "application/feed",
|
|
2011
|
+
method: "alternate-feed",
|
|
2012
|
+
content: output.content,
|
|
2013
|
+
fetchedAt,
|
|
2014
|
+
truncated: output.truncated,
|
|
2015
|
+
notes,
|
|
2016
|
+
};
|
|
2017
|
+
}
|
|
2018
|
+
}
|
|
2019
|
+
|
|
2020
|
+
// Step 6: Render HTML with lynx
|
|
2021
|
+
if (!hasCommand("lynx")) {
|
|
2022
|
+
notes.push("lynx not installed");
|
|
2023
|
+
const output = finalizeOutput(rawContent);
|
|
2024
|
+
return {
|
|
2025
|
+
url,
|
|
2026
|
+
finalUrl,
|
|
2027
|
+
contentType: mime,
|
|
2028
|
+
method: "raw-html",
|
|
2029
|
+
content: output.content,
|
|
2030
|
+
fetchedAt,
|
|
2031
|
+
truncated: output.truncated,
|
|
2032
|
+
notes,
|
|
2033
|
+
};
|
|
2034
|
+
}
|
|
2035
|
+
|
|
2036
|
+
const lynxResult = renderWithLynx(rawContent, timeout);
|
|
2037
|
+
if (!lynxResult.ok) {
|
|
2038
|
+
notes.push("lynx failed");
|
|
2039
|
+
const output = finalizeOutput(rawContent);
|
|
2040
|
+
return {
|
|
2041
|
+
url,
|
|
2042
|
+
finalUrl,
|
|
2043
|
+
contentType: mime,
|
|
2044
|
+
method: "raw-html",
|
|
2045
|
+
content: output.content,
|
|
2046
|
+
fetchedAt,
|
|
2047
|
+
truncated: output.truncated,
|
|
2048
|
+
notes,
|
|
2049
|
+
};
|
|
2050
|
+
}
|
|
2051
|
+
|
|
2052
|
+
// Step 7: If lynx output is low quality, try extracting document links
|
|
2053
|
+
if (isLowQualityOutput(lynxResult.content)) {
|
|
2054
|
+
const docLinks = extractDocumentLinks(rawContent, finalUrl);
|
|
2055
|
+
if (docLinks.length > 0) {
|
|
2056
|
+
const docUrl = docLinks[0];
|
|
2057
|
+
const binary = await fetchBinary(docUrl, timeout);
|
|
2058
|
+
if (binary.ok) {
|
|
2059
|
+
const ext = getExtensionHint(docUrl, binary.contentDisposition);
|
|
2060
|
+
const converted = convertWithMarkitdown(binary.buffer, ext, timeout);
|
|
2061
|
+
if (converted.ok && converted.content.trim().length > lynxResult.content.length) {
|
|
2062
|
+
notes.push(`Extracted and converted document: ${docUrl}`);
|
|
2063
|
+
const output = finalizeOutput(converted.content);
|
|
2064
|
+
return {
|
|
2065
|
+
url,
|
|
2066
|
+
finalUrl,
|
|
2067
|
+
contentType: "application/document",
|
|
2068
|
+
method: "extracted-document",
|
|
2069
|
+
content: output.content,
|
|
2070
|
+
fetchedAt,
|
|
2071
|
+
truncated: output.truncated,
|
|
2072
|
+
notes,
|
|
2073
|
+
};
|
|
2074
|
+
}
|
|
2075
|
+
}
|
|
2076
|
+
}
|
|
2077
|
+
notes.push("Page appears to require JavaScript or is mostly navigation");
|
|
2078
|
+
}
|
|
2079
|
+
|
|
2080
|
+
const output = finalizeOutput(lynxResult.content);
|
|
2081
|
+
return {
|
|
2082
|
+
url,
|
|
2083
|
+
finalUrl,
|
|
2084
|
+
contentType: mime,
|
|
2085
|
+
method: "lynx",
|
|
2086
|
+
content: output.content,
|
|
2087
|
+
fetchedAt,
|
|
2088
|
+
truncated: output.truncated,
|
|
2089
|
+
notes,
|
|
2090
|
+
};
|
|
2091
|
+
}
|
|
2092
|
+
|
|
2093
|
+
// Fallback: return raw content
|
|
2094
|
+
const output = finalizeOutput(rawContent);
|
|
2095
|
+
return {
|
|
2096
|
+
url,
|
|
2097
|
+
finalUrl,
|
|
2098
|
+
contentType: mime,
|
|
2099
|
+
method: "raw",
|
|
2100
|
+
content: output.content,
|
|
2101
|
+
fetchedAt,
|
|
2102
|
+
truncated: output.truncated,
|
|
2103
|
+
notes,
|
|
2104
|
+
};
|
|
2105
|
+
}
|
|
2106
|
+
|
|
2107
|
+
// =============================================================================
|
|
2108
|
+
// Tool Definition
|
|
2109
|
+
// =============================================================================
|
|
2110
|
+
|
|
2111
|
+
const webFetchSchema = Type.Object({
|
|
2112
|
+
url: Type.String({ description: "The URL to fetch and render" }),
|
|
2113
|
+
timeout: Type.Optional(Type.Number({ description: "Timeout in seconds (default: 20, max: 120)" })),
|
|
2114
|
+
raw: Type.Optional(
|
|
2115
|
+
Type.Boolean({ description: "Return raw content without site-specific rendering or LLM-friendly transforms" }),
|
|
2116
|
+
),
|
|
2117
|
+
});
|
|
2118
|
+
|
|
2119
|
+
export interface WebFetchToolDetails {
|
|
2120
|
+
url: string;
|
|
2121
|
+
finalUrl: string;
|
|
2122
|
+
contentType: string;
|
|
2123
|
+
method: string;
|
|
2124
|
+
truncated: boolean;
|
|
2125
|
+
notes: string[];
|
|
2126
|
+
}
|
|
2127
|
+
|
|
2128
|
+
export function createWebFetchTool(_cwd: string): AgentTool<typeof webFetchSchema> {
|
|
2129
|
+
return {
|
|
2130
|
+
name: "web_fetch",
|
|
2131
|
+
label: "web_fetch",
|
|
2132
|
+
description: `Fetches content from a specified URL and processes it using an AI model
|
|
2133
|
+
- Takes a URL and a prompt as input
|
|
2134
|
+
- Fetches the URL content, converts HTML to markdown
|
|
2135
|
+
- Processes the content with the prompt using a small, fast model
|
|
2136
|
+
- Returns the model's response about the content
|
|
2137
|
+
- Use this tool when you need to retrieve and analyze web content
|
|
2138
|
+
|
|
2139
|
+
Features:
|
|
2140
|
+
- Site-specific handlers for GitHub (issues, PRs, repos, gists), Stack Overflow, Wikipedia, Reddit, NPM, arXiv, IACR, and Twitter/X
|
|
2141
|
+
- Automatic detection and use of LLM-friendly endpoints (llms.txt, .md suffixes)
|
|
2142
|
+
- Binary file conversion (PDF, DOCX, etc.) via markitdown if available
|
|
2143
|
+
- HTML to text rendering via lynx if available
|
|
2144
|
+
- RSS/Atom feed parsing
|
|
2145
|
+
- JSON pretty-printing
|
|
2146
|
+
|
|
2147
|
+
Usage notes:
|
|
2148
|
+
- IMPORTANT: If an MCP-provided web fetch tool is available, prefer using that tool instead of this one, as it may have fewer restrictions.
|
|
2149
|
+
- The URL must be a fully-formed valid URL
|
|
2150
|
+
- HTTP URLs will be automatically upgraded to HTTPS
|
|
2151
|
+
- The prompt should describe what information you want to extract from the page
|
|
2152
|
+
- This tool is read-only and does not modify any files
|
|
2153
|
+
- Results may be summarized if the content is very large
|
|
2154
|
+
- Includes a self-cleaning 15-minute cache for faster responses when repeatedly accessing the same URL
|
|
2155
|
+
- When a URL redirects to a different host, the tool will inform you and provide the redirect URL in a special format. You should then make a new WebFetch request with the redirect URL to fetch the content.`,
|
|
2156
|
+
parameters: webFetchSchema,
|
|
2157
|
+
execute: async (
|
|
2158
|
+
_toolCallId: string,
|
|
2159
|
+
{ url, timeout = DEFAULT_TIMEOUT, raw = false }: { url: string; timeout?: number; raw?: boolean },
|
|
2160
|
+
) => {
|
|
2161
|
+
// Clamp timeout
|
|
2162
|
+
const effectiveTimeout = Math.min(Math.max(timeout, 1), 120);
|
|
2163
|
+
|
|
2164
|
+
const result = await renderUrl(url, effectiveTimeout, raw);
|
|
2165
|
+
|
|
2166
|
+
// Format output
|
|
2167
|
+
let output = "";
|
|
2168
|
+
output += `URL: ${result.finalUrl}\n`;
|
|
2169
|
+
output += `Content-Type: ${result.contentType}\n`;
|
|
2170
|
+
output += `Method: ${result.method}\n`;
|
|
2171
|
+
if (result.truncated) {
|
|
2172
|
+
output += `Warning: Output was truncated\n`;
|
|
2173
|
+
}
|
|
2174
|
+
if (result.notes.length > 0) {
|
|
2175
|
+
output += `Notes: ${result.notes.join("; ")}\n`;
|
|
2176
|
+
}
|
|
2177
|
+
output += `\n---\n\n`;
|
|
2178
|
+
output += result.content;
|
|
2179
|
+
|
|
2180
|
+
const details: WebFetchToolDetails = {
|
|
2181
|
+
url: result.url,
|
|
2182
|
+
finalUrl: result.finalUrl,
|
|
2183
|
+
contentType: result.contentType,
|
|
2184
|
+
method: result.method,
|
|
2185
|
+
truncated: result.truncated,
|
|
2186
|
+
notes: result.notes,
|
|
2187
|
+
};
|
|
2188
|
+
|
|
2189
|
+
return {
|
|
2190
|
+
content: [{ type: "text", text: output }],
|
|
2191
|
+
details,
|
|
2192
|
+
};
|
|
2193
|
+
},
|
|
2194
|
+
};
|
|
2195
|
+
}
|
|
2196
|
+
|
|
2197
|
+
/** Default web fetch tool using process.cwd() - for backwards compatibility */
|
|
2198
|
+
export const webFetchTool = createWebFetchTool(process.cwd());
|
|
2199
|
+
|
|
2200
|
+
// =============================================================================
|
|
2201
|
+
// TUI Rendering
|
|
2202
|
+
// =============================================================================
|
|
2203
|
+
|
|
2204
|
+
import type { Component } from "@oh-my-pi/pi-tui";
|
|
2205
|
+
import { Text } from "@oh-my-pi/pi-tui";
|
|
2206
|
+
import type { Theme } from "../../modes/interactive/theme/theme.js";
|
|
2207
|
+
import type { CustomTool, CustomToolContext, RenderResultOptions } from "../custom-tools/types.js";
|
|
2208
|
+
|
|
2209
|
+
// Tree formatting constants
|
|
2210
|
+
const TREE_MID = "├─";
|
|
2211
|
+
const TREE_END = "└─";
|
|
2212
|
+
const TREE_PIPE = "│";
|
|
2213
|
+
const TREE_HOOK = "⎿";
|
|
2214
|
+
|
|
2215
|
+
/** Truncate text to max length with ellipsis */
|
|
2216
|
+
function truncate(text: string, maxLen: number): string {
|
|
2217
|
+
if (text.length <= maxLen) return text;
|
|
2218
|
+
return `${text.slice(0, maxLen - 1)}…`;
|
|
2219
|
+
}
|
|
2220
|
+
|
|
2221
|
+
/** Extract domain from URL */
|
|
2222
|
+
function getDomain(url: string): string {
|
|
2223
|
+
try {
|
|
2224
|
+
const u = new URL(url);
|
|
2225
|
+
return u.hostname.replace(/^www\./, "");
|
|
2226
|
+
} catch {
|
|
2227
|
+
return url;
|
|
2228
|
+
}
|
|
2229
|
+
}
|
|
2230
|
+
|
|
2231
|
+
/** Get first N lines of text as preview */
|
|
2232
|
+
function getPreviewLines(text: string, maxLines: number, maxLineLen: number): string[] {
|
|
2233
|
+
const lines = text.split("\n").filter((l) => l.trim());
|
|
2234
|
+
return lines.slice(0, maxLines).map((l) => truncate(l.trim(), maxLineLen));
|
|
2235
|
+
}
|
|
2236
|
+
|
|
2237
|
+
/** Render web fetch call (URL preview) */
|
|
2238
|
+
export function renderWebFetchCall(args: { url: string; timeout?: number; raw?: boolean }, theme: Theme): Component {
|
|
2239
|
+
const domain = getDomain(args.url);
|
|
2240
|
+
const path = truncate(args.url.replace(/^https?:\/\/[^/]+/, ""), 50);
|
|
2241
|
+
const text = `${theme.fg("toolTitle", "Web Fetch")} ${theme.fg("accent", domain)}${theme.fg("dim", path)}`;
|
|
2242
|
+
return new Text(text, 0, 0);
|
|
2243
|
+
}
|
|
2244
|
+
|
|
2245
|
+
/** Render web fetch result with tree-based layout */
|
|
2246
|
+
export function renderWebFetchResult(
|
|
2247
|
+
result: { content: Array<{ type: string; text?: string }>; details?: WebFetchToolDetails },
|
|
2248
|
+
options: RenderResultOptions,
|
|
2249
|
+
theme: Theme,
|
|
2250
|
+
): Component {
|
|
2251
|
+
const { expanded } = options;
|
|
2252
|
+
const details = result.details;
|
|
2253
|
+
|
|
2254
|
+
if (!details) {
|
|
2255
|
+
return new Text(theme.fg("error", "No response data"), 0, 0);
|
|
2256
|
+
}
|
|
2257
|
+
|
|
2258
|
+
const domain = getDomain(details.finalUrl);
|
|
2259
|
+
const hasRedirect = details.url !== details.finalUrl;
|
|
2260
|
+
const hasNotes = details.notes.length > 0;
|
|
2261
|
+
|
|
2262
|
+
// Build header: ● Web Fetch (domain) · method
|
|
2263
|
+
const icon = details.truncated ? theme.fg("warning", "●") : theme.fg("success", "●");
|
|
2264
|
+
const expandHint = expanded ? "" : theme.fg("dim", " (Ctrl+O to expand)");
|
|
2265
|
+
let text = `${icon} ${theme.fg("toolTitle", "Web Fetch")} ${theme.fg("accent", `(${domain})`)} · ${theme.fg(
|
|
2266
|
+
"dim",
|
|
2267
|
+
details.method,
|
|
2268
|
+
)}${expandHint}`;
|
|
2269
|
+
|
|
2270
|
+
// Get content text
|
|
2271
|
+
const contentText = result.content[0]?.text ?? "";
|
|
2272
|
+
// Extract just the content part (after the --- separator)
|
|
2273
|
+
const contentBody = contentText.includes("---\n\n")
|
|
2274
|
+
? contentText.split("---\n\n").slice(1).join("---\n\n")
|
|
2275
|
+
: contentText;
|
|
2276
|
+
|
|
2277
|
+
if (!expanded) {
|
|
2278
|
+
// Collapsed view: show metadata + 3 preview lines
|
|
2279
|
+
if (hasRedirect) {
|
|
2280
|
+
text += `\n ${theme.fg("dim", TREE_PIPE)} ${theme.fg("muted", "→")} ${theme.fg("mdLinkUrl", details.finalUrl)}`;
|
|
2281
|
+
}
|
|
2282
|
+
if (details.truncated) {
|
|
2283
|
+
text += `\n ${theme.fg("dim", TREE_PIPE)} ${theme.fg("warning", "⚠ truncated")}`;
|
|
2284
|
+
}
|
|
2285
|
+
|
|
2286
|
+
const previewLines = getPreviewLines(contentBody, 3, 100);
|
|
2287
|
+
for (const line of previewLines) {
|
|
2288
|
+
text += `\n ${theme.fg("dim", TREE_PIPE)} ${theme.fg("dim", line)}`;
|
|
2289
|
+
}
|
|
2290
|
+
const totalLines = contentBody.split("\n").filter((l) => l.trim()).length;
|
|
2291
|
+
if (totalLines > 3) {
|
|
2292
|
+
text += `\n ${theme.fg("dim", TREE_END)} ${theme.fg("muted", `… ${totalLines - 3} more lines`)}`;
|
|
2293
|
+
} else {
|
|
2294
|
+
text += `\n ${theme.fg("dim", TREE_END)} ${theme.fg("muted", details.contentType)}`;
|
|
2295
|
+
}
|
|
2296
|
+
} else {
|
|
2297
|
+
// Expanded view: full metadata tree + content
|
|
2298
|
+
const hasMeta = hasRedirect || hasNotes || details.truncated;
|
|
2299
|
+
|
|
2300
|
+
if (hasMeta) {
|
|
2301
|
+
// Metadata section
|
|
2302
|
+
text += `\n ${theme.fg("dim", TREE_MID)} ${theme.fg("accent", "Metadata")}`;
|
|
2303
|
+
text += `\n ${theme.fg("dim", TREE_PIPE)} ${theme.fg("dim", TREE_MID)} ${theme.fg("muted", "Content-Type:")} ${
|
|
2304
|
+
details.contentType
|
|
2305
|
+
}`;
|
|
2306
|
+
if (hasRedirect) {
|
|
2307
|
+
text += `\n ${theme.fg("dim", TREE_PIPE)} ${theme.fg("dim", TREE_MID)} ${theme.fg(
|
|
2308
|
+
"muted",
|
|
2309
|
+
"Redirected from:",
|
|
2310
|
+
)} ${theme.fg("mdLinkUrl", details.url)}`;
|
|
2311
|
+
text += `\n ${theme.fg("dim", TREE_PIPE)} ${theme.fg("dim", `${TREE_PIPE} ${TREE_HOOK} `)}${theme.fg(
|
|
2312
|
+
"mdLinkUrl",
|
|
2313
|
+
details.finalUrl,
|
|
2314
|
+
)}`;
|
|
2315
|
+
}
|
|
2316
|
+
if (details.truncated) {
|
|
2317
|
+
text += `\n ${theme.fg("dim", TREE_PIPE)} ${theme.fg("dim", TREE_MID)} ${theme.fg(
|
|
2318
|
+
"warning",
|
|
2319
|
+
"⚠ Output was truncated",
|
|
2320
|
+
)}`;
|
|
2321
|
+
}
|
|
2322
|
+
if (hasNotes) {
|
|
2323
|
+
const notesBranch = TREE_END;
|
|
2324
|
+
text += `\n ${theme.fg("dim", TREE_PIPE)} ${theme.fg("dim", notesBranch)} ${theme.fg(
|
|
2325
|
+
"muted",
|
|
2326
|
+
"Notes:",
|
|
2327
|
+
)} ${details.notes.join("; ")}`;
|
|
2328
|
+
}
|
|
2329
|
+
}
|
|
2330
|
+
|
|
2331
|
+
// Content section
|
|
2332
|
+
text += `\n ${theme.fg("dim", TREE_END)} ${theme.fg("accent", "Content")}`;
|
|
2333
|
+
const contentLines = contentBody.split("\n");
|
|
2334
|
+
for (let i = 0; i < contentLines.length; i++) {
|
|
2335
|
+
const line = contentLines[i];
|
|
2336
|
+
const isLast = i === contentLines.length - 1;
|
|
2337
|
+
text += `\n ${isLast ? " " : theme.fg("dim", " ")} ${line}`;
|
|
2338
|
+
}
|
|
2339
|
+
}
|
|
2340
|
+
|
|
2341
|
+
return new Text(text, 0, 0);
|
|
2342
|
+
}
|
|
2343
|
+
|
|
2344
|
+
type WebFetchParams = { url: string; timeout?: number; raw?: boolean };
|
|
2345
|
+
|
|
2346
|
+
/** Web fetch tool as CustomTool (for TUI rendering support) */
|
|
2347
|
+
export const webFetchCustomTool: CustomTool<typeof webFetchSchema, WebFetchToolDetails> = {
|
|
2348
|
+
name: "web_fetch",
|
|
2349
|
+
label: "Web Fetch",
|
|
2350
|
+
description: webFetchTool.description,
|
|
2351
|
+
parameters: webFetchSchema,
|
|
2352
|
+
|
|
2353
|
+
async execute(
|
|
2354
|
+
toolCallId: string,
|
|
2355
|
+
params: WebFetchParams,
|
|
2356
|
+
_onUpdate,
|
|
2357
|
+
_ctx: CustomToolContext,
|
|
2358
|
+
_signal?: AbortSignal,
|
|
2359
|
+
) {
|
|
2360
|
+
return webFetchTool.execute(toolCallId, params);
|
|
2361
|
+
},
|
|
2362
|
+
|
|
2363
|
+
renderCall(args: WebFetchParams, theme: Theme) {
|
|
2364
|
+
return renderWebFetchCall(args, theme);
|
|
2365
|
+
},
|
|
2366
|
+
|
|
2367
|
+
renderResult(result, options: RenderResultOptions, theme: Theme) {
|
|
2368
|
+
return renderWebFetchResult(result, options, theme);
|
|
2369
|
+
},
|
|
2370
|
+
};
|