@kassol/mcp-searxng 1.0.3-custom.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/proxy.js ADDED
@@ -0,0 +1,215 @@
1
+ import { Agent, ProxyAgent } from "undici";
2
+ import { getConnectOptions } from "./tls-config.js";
3
+ /**
4
+ * Checks if a target URL should bypass the proxy based on NO_PROXY environment variable.
5
+ *
6
+ * @param targetUrl - The URL to check against NO_PROXY rules
7
+ * @returns true if the URL should bypass the proxy, false otherwise
8
+ */
9
+ function shouldBypassProxy(targetUrl) {
10
+ const noProxy = process.env.NO_PROXY || process.env.no_proxy;
11
+ if (!noProxy) {
12
+ return false;
13
+ }
14
+ // Wildcard bypass
15
+ if (noProxy.trim() === '*') {
16
+ return true;
17
+ }
18
+ let hostname;
19
+ try {
20
+ const url = new URL(targetUrl);
21
+ hostname = url.hostname.toLowerCase();
22
+ }
23
+ catch (error) {
24
+ // Invalid URL, don't bypass
25
+ return false;
26
+ }
27
+ // Parse comma-separated list of bypass patterns
28
+ const bypassPatterns = noProxy.split(',').map(pattern => pattern.trim().toLowerCase());
29
+ for (const pattern of bypassPatterns) {
30
+ if (!pattern)
31
+ continue;
32
+ // Exact hostname match
33
+ if (hostname === pattern) {
34
+ return true;
35
+ }
36
+ // Domain suffix match with leading dot (e.g., .example.com matches sub.example.com)
37
+ if (pattern.startsWith('.') && hostname.endsWith(pattern)) {
38
+ return true;
39
+ }
40
+ // Domain suffix match without leading dot (e.g., example.com matches sub.example.com and example.com)
41
+ if (!pattern.startsWith('.')) {
42
+ // Exact match
43
+ if (hostname === pattern) {
44
+ return true;
45
+ }
46
+ // Subdomain match
47
+ if (hostname.endsWith(`.${pattern}`)) {
48
+ return true;
49
+ }
50
+ }
51
+ }
52
+ return false;
53
+ }
54
+ /**
55
+ * Proxy configuration type for separating search and URL reader proxies.
56
+ */
57
+ export const ProxyType = {
58
+ SEARCH: 'search',
59
+ URL_READER: 'url_reader',
60
+ };
61
+ /**
62
+ * Gets proxy URL for the specified proxy type.
63
+ * Checks type-specific proxy first, then falls back to global proxy.
64
+ *
65
+ * @param type - The type of proxy to get ('search' or 'url_reader')
66
+ * @param targetUrl - Optional target URL whose protocol is used to select between HTTP and HTTPS proxies
67
+ * @returns The proxy URL or undefined if not configured
68
+ */
69
+ function getProxyUrl(type, targetUrl) {
70
+ let isHttps = false;
71
+ if (targetUrl) {
72
+ try {
73
+ const url = new URL(targetUrl);
74
+ isHttps = url.protocol === 'https:';
75
+ }
76
+ catch {
77
+ isHttps = false;
78
+ }
79
+ }
80
+ if (type === ProxyType.SEARCH) {
81
+ if (isHttps) {
82
+ return process.env.SEARCH_HTTPS_PROXY ||
83
+ process.env.SEARCH_HTTP_PROXY ||
84
+ process.env.search_https_proxy ||
85
+ process.env.search_http_proxy ||
86
+ process.env.HTTPS_PROXY ||
87
+ process.env.HTTP_PROXY ||
88
+ process.env.https_proxy ||
89
+ process.env.http_proxy;
90
+ }
91
+ return process.env.SEARCH_HTTP_PROXY ||
92
+ process.env.SEARCH_HTTPS_PROXY ||
93
+ process.env.search_http_proxy ||
94
+ process.env.search_https_proxy ||
95
+ // Fallback to global proxies
96
+ process.env.HTTP_PROXY ||
97
+ process.env.HTTPS_PROXY ||
98
+ process.env.http_proxy ||
99
+ process.env.https_proxy;
100
+ }
101
+ if (type === ProxyType.URL_READER) {
102
+ if (isHttps) {
103
+ return process.env.URL_READER_HTTPS_PROXY ||
104
+ process.env.URL_READER_HTTP_PROXY ||
105
+ process.env.url_reader_https_proxy ||
106
+ process.env.url_reader_http_proxy ||
107
+ process.env.HTTPS_PROXY ||
108
+ process.env.HTTP_PROXY ||
109
+ process.env.https_proxy ||
110
+ process.env.http_proxy;
111
+ }
112
+ return process.env.URL_READER_HTTP_PROXY ||
113
+ process.env.URL_READER_HTTPS_PROXY ||
114
+ process.env.url_reader_http_proxy ||
115
+ process.env.url_reader_https_proxy ||
116
+ // Fallback to global proxies
117
+ process.env.HTTP_PROXY ||
118
+ process.env.HTTPS_PROXY ||
119
+ process.env.http_proxy ||
120
+ process.env.https_proxy;
121
+ }
122
+ if (isHttps) {
123
+ return process.env.HTTPS_PROXY ||
124
+ process.env.HTTP_PROXY ||
125
+ process.env.https_proxy ||
126
+ process.env.http_proxy;
127
+ }
128
+ return process.env.HTTP_PROXY ||
129
+ process.env.HTTPS_PROXY ||
130
+ process.env.http_proxy ||
131
+ process.env.https_proxy;
132
+ }
133
+ /**
134
+ * Creates a proxy agent dispatcher for Node.js fetch API.
135
+ *
136
+ * Node.js fetch uses Undici under the hood, which requires a 'dispatcher' option
137
+ * instead of 'agent'. This function creates a ProxyAgent compatible with fetch.
138
+ *
139
+ * Environment variables checked (in order, depending on URL protocol):
140
+ * - For type 'search' and HTTPS URLs:
141
+ * SEARCH_HTTPS_PROXY, SEARCH_HTTP_PROXY, search_https_proxy, search_http_proxy,
142
+ * then HTTPS_PROXY, HTTP_PROXY, https_proxy, http_proxy
143
+ * - For type 'search' and HTTP/unknown URLs:
144
+ * SEARCH_HTTP_PROXY, SEARCH_HTTPS_PROXY, search_http_proxy, search_https_proxy,
145
+ * then HTTP_PROXY, HTTPS_PROXY, http_proxy, https_proxy
146
+ * - For type 'url_reader' and HTTPS URLs:
147
+ * URL_READER_HTTPS_PROXY, URL_READER_HTTP_PROXY, url_reader_https_proxy, url_reader_http_proxy,
148
+ * then HTTPS_PROXY, HTTP_PROXY, https_proxy, http_proxy
149
+ * - For type 'url_reader' and HTTP/unknown URLs:
150
+ * URL_READER_HTTP_PROXY, URL_READER_HTTPS_PROXY, url_reader_http_proxy, url_reader_https_proxy,
151
+ * then HTTP_PROXY, HTTPS_PROXY, http_proxy, https_proxy
152
+ * - For no specific type and HTTPS URLs:
153
+ * HTTPS_PROXY, HTTP_PROXY, https_proxy, http_proxy
154
+ * - For no specific type and HTTP/unknown URLs:
155
+ * HTTP_PROXY, HTTPS_PROXY, http_proxy, https_proxy
156
+ * - NO_PROXY / no_proxy: Comma-separated list of hosts to bypass proxy
157
+ *
158
+ * @param targetUrl - Optional target URL to check against NO_PROXY rules
159
+ * @param type - Optional proxy type ('search' or 'url_reader') for separate proxy configs
160
+ * @returns ProxyAgent dispatcher for fetch, or undefined if no proxy configured or bypassed
161
+ */
162
+ export function createProxyAgent(targetUrl, type) {
163
+ const proxyUrl = getProxyUrl(type, targetUrl);
164
+ if (!proxyUrl) {
165
+ return undefined;
166
+ }
167
+ // Check if target URL should bypass proxy
168
+ if (targetUrl && shouldBypassProxy(targetUrl)) {
169
+ return undefined;
170
+ }
171
+ // Validate and normalize proxy URL
172
+ let parsedProxyUrl;
173
+ try {
174
+ parsedProxyUrl = new URL(proxyUrl);
175
+ }
176
+ catch (error) {
177
+ throw new Error(`Invalid proxy URL: ${proxyUrl}. ` +
178
+ "Please provide a valid URL (e.g., http://proxy:8080 or http://user:pass@proxy:8080)");
179
+ }
180
+ // Ensure proxy protocol is supported
181
+ if (!['http:', 'https:'].includes(parsedProxyUrl.protocol)) {
182
+ throw new Error(`Unsupported proxy protocol: ${parsedProxyUrl.protocol}. ` +
183
+ "Only HTTP and HTTPS proxies are supported.");
184
+ }
185
+ // Reconstruct base proxy URL preserving credentials
186
+ const auth = parsedProxyUrl.username ?
187
+ (parsedProxyUrl.password ? `${parsedProxyUrl.username}:${parsedProxyUrl.password}@` : `${parsedProxyUrl.username}@`) :
188
+ '';
189
+ const normalizedProxyUrl = `${parsedProxyUrl.protocol}//${auth}${parsedProxyUrl.host}`;
190
+ // Create and return Undici ProxyAgent compatible with fetch's dispatcher option
191
+ return new ProxyAgent({ uri: normalizedProxyUrl, connect: getConnectOptions() });
192
+ }
193
+ /**
194
+ * Returns a singleton undici Agent with system CA certificates in the connect
195
+ * options. Used as a dispatcher when no proxy is configured, to ensure
196
+ * undici's fetch uses system CAs instead of only Node's compiled-in bundle.
197
+ *
198
+ * The agent (and the CA bundle disk read) is created once and reused across
199
+ * requests to avoid repeated synchronous I/O and connection pool proliferation.
200
+ *
201
+ * Returns undefined if no system CA bundle is found — callers should treat
202
+ * undefined as "use Node's default behavior".
203
+ */
204
+ let _defaultAgentInitialized = false;
205
+ let _defaultAgent;
206
+ export function createDefaultAgent() {
207
+ if (!_defaultAgentInitialized) {
208
+ _defaultAgentInitialized = true;
209
+ const connectOpts = getConnectOptions();
210
+ if (Object.keys(connectOpts).length > 0) {
211
+ _defaultAgent = new Agent({ connect: connectOpts });
212
+ }
213
+ }
214
+ return _defaultAgent;
215
+ }
@@ -0,0 +1,2 @@
1
+ export declare function createConfigResource(): string;
2
+ export declare function createHelpResource(): string;
@@ -0,0 +1,114 @@
1
+ import { getCurrentLogLevel } from "./logging.js";
2
+ import { packageVersion } from "./index.js";
3
+ import { getHttpSecurityConfig } from "./http-security.js";
4
+ export function createConfigResource() {
5
+ const security = getHttpSecurityConfig();
6
+ const showFullConfig = !security.harden || security.exposeFullConfig;
7
+ const config = {
8
+ serverInfo: {
9
+ name: "kassol/mcp-searxng",
10
+ version: packageVersion,
11
+ description: "MCP server for SearXNG integration"
12
+ },
13
+ environment: {
14
+ ...(showFullConfig
15
+ ? { searxngUrl: process.env.SEARXNG_URL || "(not configured)" }
16
+ : { searxngUrlConfigured: !!process.env.SEARXNG_URL }),
17
+ hasAuth: !!(process.env.AUTH_USERNAME && process.env.AUTH_PASSWORD),
18
+ hasSearxngHeaders: !!process.env.SEARXNG_HEADERS,
19
+ hasUrlReaderHeaders: !!process.env.URL_READER_HEADERS,
20
+ hasProxy: !!(process.env.HTTP_PROXY || process.env.HTTPS_PROXY || process.env.http_proxy || process.env.https_proxy),
21
+ hasNoProxy: !!(process.env.NO_PROXY || process.env.no_proxy),
22
+ nodeVersion: process.version,
23
+ currentLogLevel: getCurrentLogLevel()
24
+ },
25
+ capabilities: {
26
+ tools: ["searxng_web_search", "web_url_read"],
27
+ logging: true,
28
+ resources: true,
29
+ transports: process.env.MCP_HTTP_PORT ? ["stdio", "http"] : ["stdio"]
30
+ }
31
+ };
32
+ return JSON.stringify(config, null, 2);
33
+ }
34
+ export function createHelpResource() {
35
+ return `# SearXNG MCP Server Help
36
+
37
+ ## Overview
38
+ This is a Model Context Protocol (MCP) server that provides web search capabilities through SearXNG and URL content reading functionality.
39
+
40
+ ## Available Tools
41
+
42
+ ### 1. searxng_web_search
43
+ Performs web searches using the configured SearXNG instance.
44
+
45
+ **Parameters:**
46
+ - \`query\` (required): The search query string
47
+ - \`pageno\` (optional): Page number (default: 1)
48
+ - \`time_range\` (optional): Filter by time - "day", "month", or "year"
49
+ - \`language\` (optional): Language code like "en", "fr", "de" (default: "all")
50
+ - \`safesearch\` (optional): Safe search level - 0 (none), 1 (moderate), 2 (strict)
51
+
52
+ ### 2. web_url_read
53
+ Reads and converts web page content to Markdown format.
54
+
55
+ **Parameters:**
56
+ - \`url\` (required): The URL to fetch and convert
57
+
58
+ ## Configuration
59
+
60
+ ### Required Environment Variables
61
+ - \`SEARXNG_URL\`: URL of your SearXNG instance (e.g., http://localhost:8080)
62
+
63
+ ### Optional Environment Variables
64
+ - \`AUTH_USERNAME\` & \`AUTH_PASSWORD\`: Basic authentication for SearXNG
65
+ - \`USER_AGENT\`: Global User-Agent header for outgoing requests
66
+ - \`URL_READER_USER_AGENT\`: User-Agent for \`web_url_read\`, overrides \`USER_AGENT\`
67
+ - \`SEARXNG_HEADERS\`: Extra JSON headers for SearXNG search requests
68
+ - \`URL_READER_HEADERS\`: Extra JSON headers for URL read requests
69
+ - \`HTTP_PROXY\` / \`HTTPS_PROXY\`: Proxy server configuration
70
+ - \`NO_PROXY\` / \`no_proxy\`: Comma-separated list of hosts to bypass proxy
71
+ - \`MCP_HTTP_PORT\`: Enable HTTP transport on specified port
72
+
73
+ ## Transport Modes
74
+
75
+ ### STDIO (Default)
76
+ Standard input/output transport for desktop clients like Claude Desktop.
77
+
78
+ ### HTTP (Optional)
79
+ RESTful HTTP transport for web applications. Set \`MCP_HTTP_PORT\` to enable.
80
+
81
+ ### Hardened HTTP Mode (Optional)
82
+ Default behavior remains compatible for existing deployments.
83
+ For network-exposed HTTP transport, enable:
84
+ - \`MCP_HTTP_HARDEN\`
85
+ - \`MCP_HTTP_AUTH_TOKEN\`
86
+ - \`MCP_HTTP_ALLOWED_ORIGINS\`
87
+
88
+ ## Usage Examples
89
+
90
+ ### Search for recent news
91
+ \`\`\`
92
+ Tool: searxng_web_search
93
+ Args: {"query": "latest AI developments", "time_range": "day"}
94
+ \`\`\`
95
+
96
+ ### Read a specific article
97
+ \`\`\`
98
+ Tool: web_url_read
99
+ Args: {"url": "https://example.com/article"}
100
+ \`\`\`
101
+
102
+ ## Troubleshooting
103
+
104
+ 1. **"SEARXNG_URL not set"**: Configure the SEARXNG_URL environment variable
105
+ 2. **Network errors**: Check if SearXNG is running and accessible
106
+ 3. **Empty results**: Try different search terms or check SearXNG instance
107
+ 4. **Timeout errors**: The server has a 10-second timeout for URL fetching
108
+
109
+ Use logging level "debug" for detailed request information.
110
+
111
+ ## Current Configuration
112
+ See the "Current Configuration" resource for live settings.
113
+ `;
114
+ }
@@ -0,0 +1,2 @@
1
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2
+ export declare function performWebSearch(mcpServer: McpServer, query: string, pageno?: number, time_range?: string, language?: string, safesearch?: number): Promise<string>;
package/dist/search.js ADDED
@@ -0,0 +1,133 @@
1
+ import { createProxyAgent, createDefaultAgent, ProxyType } from "./proxy.js";
2
+ import { logMessage } from "./logging.js";
3
+ import { mergeHeaders, parseHeadersFromEnv } from "./headers.js";
4
+ import { MCPSearXNGError, validateEnvironment, createNetworkError, createServerError, createJSONError, createDataError, createNoResultsMessage } from "./error-handler.js";
5
+ export async function performWebSearch(mcpServer, query, pageno = 1, time_range, language = "all", safesearch) {
6
+ const startTime = Date.now();
7
+ // Build detailed log message with all parameters
8
+ const searchParams = [
9
+ `page ${pageno}`,
10
+ `lang: ${language}`,
11
+ time_range ? `time: ${time_range}` : null,
12
+ safesearch ? `safesearch: ${safesearch}` : null
13
+ ].filter(Boolean).join(", ");
14
+ logMessage(mcpServer, "info", `Starting web search: "${query}" (${searchParams})`);
15
+ const validationError = validateEnvironment();
16
+ if (validationError) {
17
+ logMessage(mcpServer, "error", "Configuration invalid");
18
+ throw new MCPSearXNGError(validationError);
19
+ }
20
+ const searxngUrl = process.env.SEARXNG_URL;
21
+ const parsedUrl = new URL(searxngUrl.endsWith('/') ? searxngUrl : searxngUrl + '/');
22
+ const url = new URL('search', parsedUrl);
23
+ url.searchParams.set("q", query);
24
+ url.searchParams.set("format", "json");
25
+ url.searchParams.set("pageno", pageno.toString());
26
+ if (time_range !== undefined &&
27
+ ["day", "month", "year"].includes(time_range)) {
28
+ url.searchParams.set("time_range", time_range);
29
+ }
30
+ if (language && language !== "all") {
31
+ url.searchParams.set("language", language);
32
+ }
33
+ if (safesearch !== undefined && [0, 1, 2].includes(safesearch)) {
34
+ url.searchParams.set("safesearch", safesearch.toString());
35
+ }
36
+ // Prepare request options with headers
37
+ const requestOptions = {
38
+ method: "GET"
39
+ };
40
+ // Add proxy or default dispatcher (includes system CA certs for TLS)
41
+ const proxyAgent = createProxyAgent(url.toString(), ProxyType.SEARCH);
42
+ const dispatcher = proxyAgent ?? createDefaultAgent();
43
+ if (dispatcher) {
44
+ requestOptions.dispatcher = dispatcher;
45
+ }
46
+ // Add basic authentication if credentials are provided
47
+ const username = process.env.AUTH_USERNAME;
48
+ const password = process.env.AUTH_PASSWORD;
49
+ if (username && password) {
50
+ const base64Auth = Buffer.from(`${username}:${password}`).toString('base64');
51
+ requestOptions.headers = {
52
+ ...requestOptions.headers,
53
+ 'Authorization': `Basic ${base64Auth}`
54
+ };
55
+ }
56
+ // Add User-Agent header if configured
57
+ const userAgent = process.env.USER_AGENT;
58
+ if (userAgent) {
59
+ requestOptions.headers = {
60
+ ...requestOptions.headers,
61
+ 'User-Agent': userAgent
62
+ };
63
+ }
64
+ const additionalHeaders = parseHeadersFromEnv("SEARXNG_HEADERS");
65
+ if (Object.keys(additionalHeaders).length > 0) {
66
+ requestOptions.headers = mergeHeaders(requestOptions.headers, additionalHeaders);
67
+ }
68
+ // Fetch with enhanced error handling
69
+ let response;
70
+ try {
71
+ logMessage(mcpServer, "info", `Making request to: ${url.toString()}`);
72
+ response = await fetch(url.toString(), requestOptions);
73
+ }
74
+ catch (error) {
75
+ logMessage(mcpServer, "error", `Network error during search request: ${error.message}`, { query, url: url.toString() });
76
+ const context = {
77
+ url: url.toString(),
78
+ searxngUrl,
79
+ proxyAgent: !!dispatcher,
80
+ username
81
+ };
82
+ throw createNetworkError(error, context);
83
+ }
84
+ if (!response.ok) {
85
+ let responseBody;
86
+ try {
87
+ responseBody = await response.text();
88
+ }
89
+ catch {
90
+ responseBody = '[Could not read response body]';
91
+ }
92
+ const context = {
93
+ url: url.toString(),
94
+ searxngUrl
95
+ };
96
+ throw createServerError(response.status, response.statusText, responseBody, context);
97
+ }
98
+ // Parse JSON response
99
+ let data;
100
+ try {
101
+ data = (await response.json());
102
+ }
103
+ catch (error) {
104
+ let responseText;
105
+ try {
106
+ responseText = await response.text();
107
+ }
108
+ catch {
109
+ responseText = '[Could not read response text]';
110
+ }
111
+ const context = { url: url.toString() };
112
+ throw createJSONError(responseText, context);
113
+ }
114
+ if (!data.results) {
115
+ const context = { url: url.toString(), query };
116
+ throw createDataError(data, context);
117
+ }
118
+ const results = data.results.map((result) => ({
119
+ title: result.title || "",
120
+ content: result.content || "",
121
+ url: result.url || "",
122
+ score: result.score || 0,
123
+ }));
124
+ if (results.length === 0) {
125
+ logMessage(mcpServer, "info", `No results found for query: "${query}"`);
126
+ return createNoResultsMessage(query);
127
+ }
128
+ const duration = Date.now() - startTime;
129
+ logMessage(mcpServer, "info", `Search completed: "${query}" (${searchParams}) - ${results.length} results in ${duration}ms`);
130
+ return results
131
+ .map((r) => `Title: ${r.title}\nDescription: ${r.content}\nURL: ${r.url}\nRelevance Score: ${r.score.toFixed(3)}`)
132
+ .join("\n\n");
133
+ }
@@ -0,0 +1,19 @@
1
+ /**
2
+ * Reads system CA certificates from well-known bundle paths.
3
+ * Returns null on Windows (no universal file path) or if no bundle is found.
4
+ *
5
+ * On Windows, users should set NODE_EXTRA_CA_CERTS pointing to a PEM file.
6
+ */
7
+ export declare function getSystemCACerts(): string | null;
8
+ /**
9
+ * Returns undici `connect` options with system CA certs, or an empty object
10
+ * if no system CA bundle is found (undici uses Node's compiled-in Mozilla
11
+ * bundle in that case).
12
+ *
13
+ * Usage:
14
+ * new Agent({ connect: getConnectOptions() })
15
+ * new ProxyAgent({ uri: proxyUrl, connect: getConnectOptions() })
16
+ */
17
+ export declare function getConnectOptions(): {
18
+ ca: string;
19
+ } | Record<string, never>;
@@ -0,0 +1,49 @@
1
+ import { existsSync, readFileSync } from "node:fs";
2
+ import { platform } from "node:process";
3
+ /**
4
+ * Ordered list of well-known system CA bundle paths.
5
+ * Checked in order; first path that exists and is readable wins.
6
+ */
7
+ const CA_BUNDLE_PATHS = [
8
+ "/etc/ssl/certs/ca-certificates.crt", // Debian/Ubuntu/WSL2
9
+ "/etc/pki/tls/certs/ca-bundle.crt", // RHEL/CentOS/Fedora
10
+ "/etc/ssl/ca-bundle.pem", // OpenSUSE
11
+ "/etc/ssl/cert.pem", // Alpine, macOS
12
+ ];
13
+ /**
14
+ * Reads system CA certificates from well-known bundle paths.
15
+ * Returns null on Windows (no universal file path) or if no bundle is found.
16
+ *
17
+ * On Windows, users should set NODE_EXTRA_CA_CERTS pointing to a PEM file.
18
+ */
19
+ export function getSystemCACerts() {
20
+ // Windows has no universal CA bundle path; skip auto-detection
21
+ if (platform === "win32") {
22
+ return null;
23
+ }
24
+ for (const caPath of CA_BUNDLE_PATHS) {
25
+ if (existsSync(caPath)) {
26
+ try {
27
+ return readFileSync(caPath, "utf8");
28
+ }
29
+ catch {
30
+ // File exists but is unreadable (permissions); try next
31
+ continue;
32
+ }
33
+ }
34
+ }
35
+ return null;
36
+ }
37
+ /**
38
+ * Returns undici `connect` options with system CA certs, or an empty object
39
+ * if no system CA bundle is found (undici uses Node's compiled-in Mozilla
40
+ * bundle in that case).
41
+ *
42
+ * Usage:
43
+ * new Agent({ connect: getConnectOptions() })
44
+ * new ProxyAgent({ uri: proxyUrl, connect: getConnectOptions() })
45
+ */
46
+ export function getConnectOptions() {
47
+ const ca = getSystemCACerts();
48
+ return ca !== null ? { ca } : {};
49
+ }
@@ -0,0 +1,18 @@
1
+ import { Tool } from "@modelcontextprotocol/sdk/types.js";
2
+ export interface SearXNGWeb {
3
+ results: Array<{
4
+ title: string;
5
+ content: string;
6
+ url: string;
7
+ score: number;
8
+ }>;
9
+ }
10
+ export declare function isSearXNGWebSearchArgs(args: unknown): args is {
11
+ query: string;
12
+ pageno?: number;
13
+ time_range?: string;
14
+ language?: string;
15
+ safesearch?: number;
16
+ };
17
+ export declare const WEB_SEARCH_TOOL: Tool;
18
+ export declare const READ_URL_TOOL: Tool;
package/dist/types.js ADDED
@@ -0,0 +1,87 @@
1
+ export function isSearXNGWebSearchArgs(args) {
2
+ return (typeof args === "object" &&
3
+ args !== null &&
4
+ "query" in args &&
5
+ typeof args.query === "string");
6
+ }
7
+ export const WEB_SEARCH_TOOL = {
8
+ name: "searxng_web_search",
9
+ description: "Performs a web search using the SearXNG API, ideal for general queries, news, articles, and online content. " +
10
+ "Use this for broad information gathering, recent events, or when you need diverse web sources.",
11
+ annotations: {
12
+ readOnlyHint: true,
13
+ openWorldHint: true,
14
+ },
15
+ inputSchema: {
16
+ type: "object",
17
+ properties: {
18
+ query: {
19
+ type: "string",
20
+ description: "The search query. This is the main input for the web search",
21
+ },
22
+ pageno: {
23
+ type: "number",
24
+ description: "Search page number (starts at 1)",
25
+ default: 1,
26
+ },
27
+ time_range: {
28
+ type: "string",
29
+ description: "Time range of search (day, month, year)",
30
+ enum: ["day", "month", "year"],
31
+ },
32
+ language: {
33
+ type: "string",
34
+ description: "Language code for search results (e.g., 'en', 'fr', 'de'). Default is instance-dependent.",
35
+ default: "all",
36
+ },
37
+ safesearch: {
38
+ type: "number",
39
+ description: "Safe search filter level (0: None, 1: Moderate, 2: Strict)",
40
+ enum: [0, 1, 2],
41
+ default: 0,
42
+ },
43
+ },
44
+ required: ["query"],
45
+ },
46
+ };
47
+ export const READ_URL_TOOL = {
48
+ name: "web_url_read",
49
+ description: "Read the content from an URL. " +
50
+ "Use this for further information retrieving to understand the content of each URL.",
51
+ annotations: {
52
+ readOnlyHint: true,
53
+ openWorldHint: true,
54
+ },
55
+ inputSchema: {
56
+ type: "object",
57
+ properties: {
58
+ url: {
59
+ type: "string",
60
+ description: "URL",
61
+ },
62
+ startChar: {
63
+ type: "number",
64
+ description: "Starting character position for content extraction (default: 0)",
65
+ minimum: 0,
66
+ },
67
+ maxLength: {
68
+ type: "number",
69
+ description: "Maximum number of characters to return",
70
+ minimum: 1,
71
+ },
72
+ section: {
73
+ type: "string",
74
+ description: "Extract content under a specific heading (searches for heading text)",
75
+ },
76
+ paragraphRange: {
77
+ type: "string",
78
+ description: "Return specific paragraph ranges (e.g., '1-5', '3', '10-')",
79
+ },
80
+ readHeadings: {
81
+ type: "boolean",
82
+ description: "Return only a list of headings instead of full content",
83
+ },
84
+ },
85
+ required: ["url"],
86
+ },
87
+ };
@@ -0,0 +1,10 @@
1
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2
+ interface PaginationOptions {
3
+ startChar?: number;
4
+ maxLength?: number;
5
+ section?: string;
6
+ paragraphRange?: string;
7
+ readHeadings?: boolean;
8
+ }
9
+ export declare function fetchAndConvertToMarkdown(mcpServer: McpServer, url: string, timeoutMs?: number, paginationOptions?: PaginationOptions): Promise<string>;
10
+ export {};