@arabold/docs-mcp-server 1.10.0 → 1.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +85 -245
- package/dist/{chunk-VTO2ED43.js → chunk-VF2RUEVV.js} +37 -41
- package/dist/chunk-VF2RUEVV.js.map +1 -0
- package/dist/cli.js +4 -4
- package/dist/cli.js.map +1 -1
- package/dist/server.js +567 -362
- package/dist/server.js.map +1 -1
- package/package.json +2 -2
- package/dist/chunk-VTO2ED43.js.map +0 -1
package/dist/server.js
CHANGED
|
@@ -1,14 +1,17 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import {
|
|
3
3
|
CancelJobTool,
|
|
4
|
+
DEFAULT_HTTP_PORT,
|
|
4
5
|
DEFAULT_MAX_DEPTH,
|
|
5
6
|
DEFAULT_MAX_PAGES,
|
|
7
|
+
DEFAULT_PROTOCOL,
|
|
6
8
|
DocumentManagementService,
|
|
7
9
|
FetchUrlTool,
|
|
8
10
|
FileFetcher,
|
|
9
11
|
FindVersionTool,
|
|
10
12
|
GetJobInfoTool,
|
|
11
13
|
HttpFetcher,
|
|
14
|
+
LibraryNotFoundError,
|
|
12
15
|
ListJobsTool,
|
|
13
16
|
ListLibrariesTool,
|
|
14
17
|
PipelineJobStatus,
|
|
@@ -19,13 +22,68 @@ import {
|
|
|
19
22
|
VersionNotFoundError,
|
|
20
23
|
logger,
|
|
21
24
|
setLogLevel
|
|
22
|
-
} from "./chunk-
|
|
25
|
+
} from "./chunk-VF2RUEVV.js";
|
|
23
26
|
import "./chunk-YCXNASA6.js";
|
|
24
27
|
|
|
28
|
+
// src/server.ts
|
|
29
|
+
import { program } from "commander";
|
|
30
|
+
|
|
25
31
|
// src/mcp/index.ts
|
|
26
32
|
import "dotenv/config";
|
|
33
|
+
|
|
34
|
+
// src/mcp/services.ts
|
|
35
|
+
var docService;
|
|
36
|
+
var pipelineManager;
|
|
37
|
+
async function initializeServices() {
|
|
38
|
+
if (docService || pipelineManager) {
|
|
39
|
+
logger.warn("Services already initialized.");
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
docService = new DocumentManagementService();
|
|
43
|
+
try {
|
|
44
|
+
await docService.initialize();
|
|
45
|
+
logger.debug("DocumentManagementService initialized.");
|
|
46
|
+
pipelineManager = new PipelineManager(docService);
|
|
47
|
+
await pipelineManager.start();
|
|
48
|
+
logger.debug("PipelineManager initialized and started.");
|
|
49
|
+
} catch (error) {
|
|
50
|
+
logger.error(`Failed to initialize services: ${error}`);
|
|
51
|
+
await shutdownServices();
|
|
52
|
+
throw error;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
async function shutdownServices() {
|
|
56
|
+
if (pipelineManager) {
|
|
57
|
+
await pipelineManager.stop();
|
|
58
|
+
logger.info("PipelineManager stopped.");
|
|
59
|
+
pipelineManager = void 0;
|
|
60
|
+
}
|
|
61
|
+
if (docService) {
|
|
62
|
+
await docService.shutdown();
|
|
63
|
+
logger.info("DocumentManagementService shutdown.");
|
|
64
|
+
docService = void 0;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
function getDocService() {
|
|
68
|
+
if (!docService) {
|
|
69
|
+
throw new Error("DocumentManagementService has not been initialized.");
|
|
70
|
+
}
|
|
71
|
+
return docService;
|
|
72
|
+
}
|
|
73
|
+
function getPipelineManager() {
|
|
74
|
+
if (!pipelineManager) {
|
|
75
|
+
throw new Error("PipelineManager has not been initialized.");
|
|
76
|
+
}
|
|
77
|
+
return pipelineManager;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// src/mcp/startHttpServer.ts
|
|
81
|
+
import * as http from "node:http";
|
|
82
|
+
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
|
|
83
|
+
import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
|
|
84
|
+
|
|
85
|
+
// src/mcp/mcpServer.ts
|
|
27
86
|
import { McpServer, ResourceTemplate } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
28
|
-
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
29
87
|
import { z } from "zod";
|
|
30
88
|
|
|
31
89
|
// src/mcp/utils.ts
|
|
@@ -52,176 +110,176 @@ function createError(text) {
|
|
|
52
110
|
};
|
|
53
111
|
}
|
|
54
112
|
|
|
55
|
-
// src/mcp/
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
search: new SearchTool(docService),
|
|
68
|
-
listJobs: new ListJobsTool(pipelineManager),
|
|
69
|
-
getJobInfo: new GetJobInfoTool(pipelineManager),
|
|
70
|
-
cancelJob: new CancelJobTool(pipelineManager),
|
|
71
|
-
remove: new RemoveTool(docService),
|
|
72
|
-
// FetchUrlTool now uses middleware pipeline internally
|
|
73
|
-
fetchUrl: new FetchUrlTool(new HttpFetcher(), new FileFetcher())
|
|
74
|
-
};
|
|
75
|
-
const server = new McpServer(
|
|
76
|
-
{
|
|
77
|
-
name: "docs-mcp-server",
|
|
78
|
-
version: "0.1.0"
|
|
79
|
-
},
|
|
80
|
-
{
|
|
81
|
-
capabilities: {
|
|
82
|
-
tools: {},
|
|
83
|
-
prompts: {},
|
|
84
|
-
resources: {}
|
|
85
|
-
}
|
|
113
|
+
// src/mcp/mcpServer.ts
|
|
114
|
+
function createMcpServerInstance(tools) {
|
|
115
|
+
const server = new McpServer(
|
|
116
|
+
{
|
|
117
|
+
name: "docs-mcp-server",
|
|
118
|
+
version: "0.1.0"
|
|
119
|
+
},
|
|
120
|
+
{
|
|
121
|
+
capabilities: {
|
|
122
|
+
tools: {},
|
|
123
|
+
prompts: {},
|
|
124
|
+
resources: {}
|
|
86
125
|
}
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
});
|
|
116
|
-
if ("jobId" in result) {
|
|
117
|
-
return createResponse(`\u{1F680} Scraping job started with ID: ${result.jobId}.`);
|
|
126
|
+
}
|
|
127
|
+
);
|
|
128
|
+
server.tool(
|
|
129
|
+
"scrape_docs",
|
|
130
|
+
"Scrape and index documentation from a URL",
|
|
131
|
+
{
|
|
132
|
+
url: z.string().url().describe("URL of the documentation to scrape"),
|
|
133
|
+
library: z.string().describe("Name of the library"),
|
|
134
|
+
version: z.string().optional().describe("Version of the library"),
|
|
135
|
+
maxPages: z.number().optional().default(DEFAULT_MAX_PAGES).describe(`Maximum number of pages to scrape (default: ${DEFAULT_MAX_PAGES})`),
|
|
136
|
+
maxDepth: z.number().optional().default(DEFAULT_MAX_DEPTH).describe(`Maximum navigation depth (default: ${DEFAULT_MAX_DEPTH})`),
|
|
137
|
+
scope: z.enum(["subpages", "hostname", "domain"]).optional().default("subpages").describe("Defines the crawling boundary: 'subpages', 'hostname', or 'domain'"),
|
|
138
|
+
followRedirects: z.boolean().optional().default(true).describe("Whether to follow HTTP redirects (3xx responses)")
|
|
139
|
+
},
|
|
140
|
+
async ({ url, library, version, maxPages, maxDepth, scope, followRedirects }) => {
|
|
141
|
+
try {
|
|
142
|
+
const result = await tools.scrape.execute({
|
|
143
|
+
url,
|
|
144
|
+
library,
|
|
145
|
+
version,
|
|
146
|
+
waitForCompletion: false,
|
|
147
|
+
// Don't wait for completion
|
|
148
|
+
// onProgress: undefined, // Explicitly undefined or omitted
|
|
149
|
+
options: {
|
|
150
|
+
maxPages,
|
|
151
|
+
maxDepth,
|
|
152
|
+
scope,
|
|
153
|
+
followRedirects
|
|
118
154
|
}
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
);
|
|
122
|
-
} catch (error) {
|
|
123
|
-
return createError(
|
|
124
|
-
`Failed to scrape documentation: ${error instanceof Error ? error.message : String(error)}`
|
|
125
|
-
);
|
|
155
|
+
});
|
|
156
|
+
if ("jobId" in result) {
|
|
157
|
+
return createResponse(`\u{1F680} Scraping job started with ID: ${result.jobId}.`);
|
|
126
158
|
}
|
|
159
|
+
return createResponse(
|
|
160
|
+
`Scraping finished immediately (unexpectedly) with ${result.pagesScraped} pages.`
|
|
161
|
+
);
|
|
162
|
+
} catch (error) {
|
|
163
|
+
return createError(
|
|
164
|
+
`Failed to scrape documentation: ${error instanceof Error ? error.message : String(error)}`
|
|
165
|
+
);
|
|
127
166
|
}
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
)
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
167
|
+
}
|
|
168
|
+
);
|
|
169
|
+
server.tool(
|
|
170
|
+
"search_docs",
|
|
171
|
+
'Searches up-to-date documentation for a library. Examples:\n\n- {library: "react", query: "hooks lifecycle"} -> matches latest version of React\n- {library: "react", version: "18.0.0", query: "hooks lifecycle"} -> matches React 18.0.0 or earlier\n- {library: "typescript", version: "5.x", query: "ReturnType example"} -> any TypeScript 5.x.x version\n- {library: "typescript", version: "5.2.x", query: "ReturnType example"} -> any TypeScript 5.2.x version',
|
|
172
|
+
{
|
|
173
|
+
library: z.string().describe("Name of the library"),
|
|
174
|
+
version: z.string().optional().describe(
|
|
175
|
+
"Version of the library (supports exact versions like '18.0.0' or X-Range patterns like '5.x', '5.2.x')"
|
|
176
|
+
),
|
|
177
|
+
query: z.string().describe("Search query"),
|
|
178
|
+
limit: z.number().optional().default(5).describe("Maximum number of results")
|
|
179
|
+
},
|
|
180
|
+
async ({ library, version, query, limit }) => {
|
|
181
|
+
try {
|
|
182
|
+
const result = await tools.search.execute({
|
|
183
|
+
library,
|
|
184
|
+
version,
|
|
185
|
+
query,
|
|
186
|
+
limit,
|
|
187
|
+
exactMatch: false
|
|
188
|
+
// Always false for MCP interface
|
|
189
|
+
});
|
|
190
|
+
const formattedResults = result.results.map(
|
|
191
|
+
(r, i) => `
|
|
152
192
|
------------------------------------------------------------
|
|
153
193
|
Result ${i + 1}: ${r.url}
|
|
154
194
|
|
|
155
195
|
${r.content}
|
|
156
196
|
`
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
197
|
+
);
|
|
198
|
+
if (formattedResults.length === 0) {
|
|
199
|
+
return createResponse(`No results found for '${query}' in ${library}.`);
|
|
200
|
+
}
|
|
201
|
+
return createResponse(
|
|
202
|
+
`Search results for '${query}' in ${library}:
|
|
160
203
|
${formattedResults.join("")}`
|
|
204
|
+
);
|
|
205
|
+
} catch (error) {
|
|
206
|
+
if (error instanceof LibraryNotFoundError) {
|
|
207
|
+
return createResponse(
|
|
208
|
+
[
|
|
209
|
+
`Library "${library}" not found.`,
|
|
210
|
+
error.suggestions?.length ? `Did you mean: ${error.suggestions?.join(", ")}?` : void 0
|
|
211
|
+
].join(" ")
|
|
161
212
|
);
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
`Failed to search documentation: ${error instanceof Error ? error.message : String(error)}`
|
|
213
|
+
}
|
|
214
|
+
if (error instanceof VersionNotFoundError) {
|
|
215
|
+
const indexedVersions = error.availableVersions.filter((v) => v.indexed).map((v) => v.version);
|
|
216
|
+
return createResponse(
|
|
217
|
+
[
|
|
218
|
+
`Version "${version}" not found.`,
|
|
219
|
+
indexedVersions.length > 0 ? `Available indexed versions for ${library}: ${indexedVersions.join(", ")}` : void 0
|
|
220
|
+
].join(" ")
|
|
171
221
|
);
|
|
172
222
|
}
|
|
223
|
+
return createError(
|
|
224
|
+
`Failed to search documentation: ${error instanceof Error ? error.message : String(error)}`
|
|
225
|
+
);
|
|
173
226
|
}
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
227
|
+
}
|
|
228
|
+
);
|
|
229
|
+
server.tool("list_libraries", "List all indexed libraries", {}, async () => {
|
|
230
|
+
try {
|
|
231
|
+
const result = await tools.listLibraries.execute();
|
|
232
|
+
if (result.libraries.length === 0) {
|
|
233
|
+
return createResponse("No libraries indexed yet.");
|
|
234
|
+
}
|
|
235
|
+
return createResponse(
|
|
236
|
+
`Indexed libraries:
|
|
237
|
+
|
|
180
238
|
${result.libraries.map((lib) => `- ${lib.name}`).join("\n")}`
|
|
181
|
-
|
|
239
|
+
);
|
|
240
|
+
} catch (error) {
|
|
241
|
+
return createError(
|
|
242
|
+
`Failed to list libraries: ${error instanceof Error ? error.message : String(error)}`
|
|
243
|
+
);
|
|
244
|
+
}
|
|
245
|
+
});
|
|
246
|
+
server.tool(
|
|
247
|
+
"find_version",
|
|
248
|
+
"Find best matching version for a library",
|
|
249
|
+
{
|
|
250
|
+
library: z.string().describe("Name of the library"),
|
|
251
|
+
targetVersion: z.string().optional().describe(
|
|
252
|
+
"Pattern to match (supports exact versions like '18.0.0' or X-Range patterns like '5.x', '5.2.x')"
|
|
253
|
+
)
|
|
254
|
+
},
|
|
255
|
+
async ({ library, targetVersion }) => {
|
|
256
|
+
try {
|
|
257
|
+
const message = await tools.findVersion.execute({
|
|
258
|
+
library,
|
|
259
|
+
targetVersion
|
|
260
|
+
});
|
|
261
|
+
if (!message) {
|
|
262
|
+
return createError("No matching version found");
|
|
263
|
+
}
|
|
264
|
+
return createResponse(message);
|
|
182
265
|
} catch (error) {
|
|
183
266
|
return createError(
|
|
184
|
-
`Failed to
|
|
267
|
+
`Failed to find version: ${error instanceof Error ? error.message : String(error)}`
|
|
185
268
|
);
|
|
186
269
|
}
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
library,
|
|
201
|
-
targetVersion
|
|
202
|
-
});
|
|
203
|
-
if (!version) {
|
|
204
|
-
return createError("No matching version found");
|
|
205
|
-
}
|
|
206
|
-
return createResponse(`Found matching version: ${version}`);
|
|
207
|
-
} catch (error) {
|
|
208
|
-
return createError(
|
|
209
|
-
`Failed to find version: ${error instanceof Error ? error.message : String(error)}`
|
|
210
|
-
);
|
|
211
|
-
}
|
|
212
|
-
}
|
|
213
|
-
);
|
|
214
|
-
server.tool(
|
|
215
|
-
"list_jobs",
|
|
216
|
-
"List pipeline jobs, optionally filtering by status.",
|
|
217
|
-
{
|
|
218
|
-
status: z.nativeEnum(PipelineJobStatus).optional().describe("Optional status to filter jobs by.")
|
|
219
|
-
},
|
|
220
|
-
async ({ status }) => {
|
|
221
|
-
try {
|
|
222
|
-
const result = await tools.listJobs.execute({ status });
|
|
223
|
-
const formattedJobs = result.jobs.map(
|
|
224
|
-
(job) => `- ID: ${job.id}
|
|
270
|
+
}
|
|
271
|
+
);
|
|
272
|
+
server.tool(
|
|
273
|
+
"list_jobs",
|
|
274
|
+
"List pipeline jobs, optionally filtering by status.",
|
|
275
|
+
{
|
|
276
|
+
status: z.nativeEnum(PipelineJobStatus).optional().describe("Optional status to filter jobs by.")
|
|
277
|
+
},
|
|
278
|
+
async ({ status }) => {
|
|
279
|
+
try {
|
|
280
|
+
const result = await tools.listJobs.execute({ status });
|
|
281
|
+
const formattedJobs = result.jobs.map(
|
|
282
|
+
(job) => `- ID: ${job.id}
|
|
225
283
|
Status: ${job.status}
|
|
226
284
|
Library: ${job.library}
|
|
227
285
|
Version: ${job.version}
|
|
@@ -229,244 +287,391 @@ ${result.libraries.map((lib) => `- ${lib.name}`).join("\n")}`
|
|
|
229
287
|
Started: ${job.startedAt}` : ""}${job.finishedAt ? `
|
|
230
288
|
Finished: ${job.finishedAt}` : ""}${job.error ? `
|
|
231
289
|
Error: ${job.error}` : ""}`
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
290
|
+
).join("\n\n");
|
|
291
|
+
return createResponse(
|
|
292
|
+
result.jobs.length > 0 ? `Current Jobs:
|
|
235
293
|
|
|
236
|
-
${formattedJobs}` : "No jobs found
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
}
|
|
294
|
+
${formattedJobs}` : "No jobs found."
|
|
295
|
+
);
|
|
296
|
+
} catch (error) {
|
|
297
|
+
return createError(
|
|
298
|
+
`Failed to list jobs: ${error instanceof Error ? error.message : String(error)}`
|
|
299
|
+
);
|
|
243
300
|
}
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
|
|
301
|
+
}
|
|
302
|
+
);
|
|
303
|
+
server.tool(
|
|
304
|
+
"get_job_info",
|
|
305
|
+
"Get the simplified info for a specific pipeline job.",
|
|
306
|
+
{
|
|
307
|
+
jobId: z.string().uuid().describe("The ID of the job to query.")
|
|
308
|
+
},
|
|
309
|
+
async ({ jobId }) => {
|
|
310
|
+
try {
|
|
311
|
+
const result = await tools.getJobInfo.execute({ jobId });
|
|
312
|
+
if (!result.job) {
|
|
313
|
+
return createError(`Job with ID ${jobId} not found.`);
|
|
314
|
+
}
|
|
315
|
+
const job = result.job;
|
|
316
|
+
const formattedJob = `- ID: ${job.id}
|
|
259
317
|
Status: ${job.status}
|
|
260
318
|
Library: ${job.library}@${job.version}
|
|
261
319
|
Created: ${job.createdAt}${job.startedAt ? `
|
|
262
320
|
Started: ${job.startedAt}` : ""}${job.finishedAt ? `
|
|
263
321
|
Finished: ${job.finishedAt}` : ""}${job.error ? `
|
|
264
322
|
Error: ${job.error}` : ""}`;
|
|
265
|
-
|
|
323
|
+
return createResponse(`Job Info:
|
|
266
324
|
|
|
267
325
|
${formattedJob}`);
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
}
|
|
273
|
-
}
|
|
274
|
-
);
|
|
275
|
-
server.tool(
|
|
276
|
-
"fetch_url",
|
|
277
|
-
"Fetch a single URL and convert its content to Markdown",
|
|
278
|
-
{
|
|
279
|
-
url: z.string().url().describe("The URL to fetch and convert to markdown"),
|
|
280
|
-
followRedirects: z.boolean().optional().default(true).describe("Whether to follow HTTP redirects (3xx responses)")
|
|
281
|
-
},
|
|
282
|
-
async ({ url, followRedirects }) => {
|
|
283
|
-
try {
|
|
284
|
-
const result = await tools.fetchUrl.execute({ url, followRedirects });
|
|
285
|
-
return createResponse(result);
|
|
286
|
-
} catch (error) {
|
|
287
|
-
return createError(
|
|
288
|
-
`Failed to fetch URL: ${error instanceof Error ? error.message : String(error)}`
|
|
289
|
-
);
|
|
290
|
-
}
|
|
326
|
+
} catch (error) {
|
|
327
|
+
return createError(
|
|
328
|
+
`Failed to get job info for ${jobId}: ${error instanceof Error ? error.message : String(error)}`
|
|
329
|
+
);
|
|
291
330
|
}
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
);
|
|
310
|
-
}
|
|
331
|
+
}
|
|
332
|
+
);
|
|
333
|
+
server.tool(
|
|
334
|
+
"fetch_url",
|
|
335
|
+
"Fetch a single URL and convert its content to Markdown",
|
|
336
|
+
{
|
|
337
|
+
url: z.string().url().describe("The URL to fetch and convert to markdown"),
|
|
338
|
+
followRedirects: z.boolean().optional().default(true).describe("Whether to follow HTTP redirects (3xx responses)")
|
|
339
|
+
},
|
|
340
|
+
async ({ url, followRedirects }) => {
|
|
341
|
+
try {
|
|
342
|
+
const result = await tools.fetchUrl.execute({ url, followRedirects });
|
|
343
|
+
return createResponse(result);
|
|
344
|
+
} catch (error) {
|
|
345
|
+
return createError(
|
|
346
|
+
`Failed to fetch URL: ${error instanceof Error ? error.message : String(error)}`
|
|
347
|
+
);
|
|
311
348
|
}
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
349
|
+
}
|
|
350
|
+
);
|
|
351
|
+
server.tool(
|
|
352
|
+
"cancel_job",
|
|
353
|
+
"Attempt to cancel a queued or running pipeline job.",
|
|
354
|
+
{
|
|
355
|
+
jobId: z.string().uuid().describe("The ID of the job to cancel.")
|
|
356
|
+
},
|
|
357
|
+
async ({ jobId }) => {
|
|
358
|
+
try {
|
|
359
|
+
const result = await tools.cancelJob.execute({ jobId });
|
|
360
|
+
if (result.success) {
|
|
323
361
|
return createResponse(result.message);
|
|
324
|
-
} catch (error) {
|
|
325
|
-
return createError(
|
|
326
|
-
`Failed to remove documents: ${error instanceof Error ? error.message : String(error)}`
|
|
327
|
-
);
|
|
328
362
|
}
|
|
363
|
+
return createError(result.message);
|
|
364
|
+
} catch (error) {
|
|
365
|
+
return createError(
|
|
366
|
+
`Failed to cancel job ${jobId}: ${error instanceof Error ? error.message : String(error)}`
|
|
367
|
+
);
|
|
329
368
|
}
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
}
|
|
348
|
-
}
|
|
349
|
-
]
|
|
350
|
-
};
|
|
369
|
+
}
|
|
370
|
+
);
|
|
371
|
+
server.tool(
|
|
372
|
+
"remove_docs",
|
|
373
|
+
"Remove indexed documentation for a library version.",
|
|
374
|
+
{
|
|
375
|
+
library: z.string().describe("Name of the library"),
|
|
376
|
+
version: z.string().optional().describe("Version of the library (optional, removes unversioned if omitted)")
|
|
377
|
+
},
|
|
378
|
+
async ({ library, version }) => {
|
|
379
|
+
try {
|
|
380
|
+
const result = await tools.remove.execute({ library, version });
|
|
381
|
+
return createResponse(result.message);
|
|
382
|
+
} catch (error) {
|
|
383
|
+
return createError(
|
|
384
|
+
`Failed to remove documents: ${error instanceof Error ? error.message : String(error)}`
|
|
385
|
+
);
|
|
351
386
|
}
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
387
|
+
}
|
|
388
|
+
);
|
|
389
|
+
server.prompt(
|
|
390
|
+
"docs",
|
|
391
|
+
"Search indexed documentation",
|
|
392
|
+
{
|
|
393
|
+
library: z.string().describe("Name of the library"),
|
|
394
|
+
version: z.string().optional().describe("Version of the library"),
|
|
395
|
+
query: z.string().describe("Search query")
|
|
396
|
+
},
|
|
397
|
+
async ({ library, version, query }) => {
|
|
398
|
+
return {
|
|
399
|
+
messages: [
|
|
400
|
+
{
|
|
401
|
+
role: "user",
|
|
402
|
+
content: {
|
|
403
|
+
type: "text",
|
|
404
|
+
text: `Please search ${library} ${version || ""} documentation for this query: ${query}`
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
]
|
|
408
|
+
};
|
|
409
|
+
}
|
|
410
|
+
);
|
|
411
|
+
server.resource(
|
|
412
|
+
"libraries",
|
|
413
|
+
"docs://libraries",
|
|
414
|
+
{
|
|
415
|
+
description: "List all indexed libraries"
|
|
416
|
+
},
|
|
417
|
+
async (uri) => {
|
|
418
|
+
const result = await tools.listLibraries.execute();
|
|
419
|
+
return {
|
|
420
|
+
contents: result.libraries.map((lib) => ({
|
|
421
|
+
uri: new URL(lib.name, uri).href,
|
|
422
|
+
text: lib.name
|
|
423
|
+
}))
|
|
424
|
+
};
|
|
425
|
+
}
|
|
426
|
+
);
|
|
427
|
+
server.resource(
|
|
428
|
+
"versions",
|
|
429
|
+
new ResourceTemplate("docs://libraries/{library}/versions", {
|
|
430
|
+
list: void 0
|
|
431
|
+
}),
|
|
432
|
+
{
|
|
433
|
+
description: "List all indexed versions for a library"
|
|
434
|
+
},
|
|
435
|
+
async (uri, { library }) => {
|
|
436
|
+
const result = await tools.listLibraries.execute();
|
|
437
|
+
const lib = result.libraries.find((l) => l.name === library);
|
|
438
|
+
if (!lib) {
|
|
439
|
+
return { contents: [] };
|
|
367
440
|
}
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
}
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
441
|
+
return {
|
|
442
|
+
contents: lib.versions.map((v) => ({
|
|
443
|
+
uri: new URL(v.version, uri).href,
|
|
444
|
+
text: v.version
|
|
445
|
+
}))
|
|
446
|
+
};
|
|
447
|
+
}
|
|
448
|
+
);
|
|
449
|
+
server.resource(
|
|
450
|
+
"jobs",
|
|
451
|
+
"docs://jobs",
|
|
452
|
+
{
|
|
453
|
+
description: "List pipeline jobs, optionally filtering by status.",
|
|
454
|
+
mimeType: "application/json"
|
|
455
|
+
},
|
|
456
|
+
async (uri) => {
|
|
457
|
+
const statusParam = uri.searchParams.get("status");
|
|
458
|
+
let statusFilter;
|
|
459
|
+
if (statusParam) {
|
|
460
|
+
const validation = z.nativeEnum(PipelineJobStatus).safeParse(statusParam);
|
|
461
|
+
if (validation.success) {
|
|
462
|
+
statusFilter = validation.data;
|
|
463
|
+
} else {
|
|
464
|
+
logger.warn(`Invalid status parameter received: ${statusParam}`);
|
|
382
465
|
}
|
|
383
|
-
return {
|
|
384
|
-
contents: lib.versions.map((v) => ({
|
|
385
|
-
uri: new URL(v.version, uri).href,
|
|
386
|
-
text: v.version
|
|
387
|
-
}))
|
|
388
|
-
};
|
|
389
466
|
}
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
async (uri) => {
|
|
399
|
-
const statusParam = uri.searchParams.get("status");
|
|
400
|
-
let statusFilter;
|
|
401
|
-
if (statusParam) {
|
|
402
|
-
const validation = z.nativeEnum(PipelineJobStatus).safeParse(statusParam);
|
|
403
|
-
if (validation.success) {
|
|
404
|
-
statusFilter = validation.data;
|
|
405
|
-
} else {
|
|
406
|
-
logger.warn(`Invalid status parameter received: ${statusParam}`);
|
|
467
|
+
const result = await tools.listJobs.execute({ status: statusFilter });
|
|
468
|
+
return {
|
|
469
|
+
contents: [
|
|
470
|
+
{
|
|
471
|
+
uri: uri.href,
|
|
472
|
+
mimeType: "application/json",
|
|
473
|
+
text: JSON.stringify(result.jobs, null, 2)
|
|
474
|
+
// Stringify the simplified jobs array
|
|
407
475
|
}
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
476
|
+
]
|
|
477
|
+
};
|
|
478
|
+
}
|
|
479
|
+
);
|
|
480
|
+
server.resource(
|
|
481
|
+
"job",
|
|
482
|
+
// A distinct name for this specific resource type
|
|
483
|
+
new ResourceTemplate("docs://jobs/{jobId}", { list: void 0 }),
|
|
484
|
+
{
|
|
485
|
+
description: "Get details for a specific pipeline job by ID.",
|
|
486
|
+
mimeType: "application/json"
|
|
487
|
+
},
|
|
488
|
+
async (uri, { jobId }) => {
|
|
489
|
+
if (typeof jobId !== "string" || jobId.length === 0) {
|
|
490
|
+
logger.warn(`Invalid jobId received in URI: ${jobId}`);
|
|
491
|
+
return { contents: [] };
|
|
420
492
|
}
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
493
|
+
const result = await tools.getJobInfo.execute({ jobId });
|
|
494
|
+
if (!result.job) {
|
|
495
|
+
return { contents: [] };
|
|
496
|
+
}
|
|
497
|
+
return {
|
|
498
|
+
contents: [
|
|
499
|
+
{
|
|
500
|
+
uri: uri.href,
|
|
501
|
+
mimeType: "application/json",
|
|
502
|
+
text: JSON.stringify(result.job, null, 2)
|
|
503
|
+
// Stringify the simplified job object
|
|
504
|
+
}
|
|
505
|
+
]
|
|
506
|
+
};
|
|
507
|
+
}
|
|
508
|
+
);
|
|
509
|
+
return server;
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
// src/mcp/startHttpServer.ts
|
|
513
|
+
async function startHttpServer(tools, port) {
|
|
514
|
+
setLogLevel(2 /* INFO */);
|
|
515
|
+
const server = createMcpServerInstance(tools);
|
|
516
|
+
const sseTransports = {};
|
|
517
|
+
const httpServer = http.createServer(async (req, res) => {
|
|
518
|
+
try {
|
|
519
|
+
const url = new URL(req.url || "/", `http://${req.headers.host}`);
|
|
520
|
+
if (req.method === "GET" && url.pathname === "/sse") {
|
|
521
|
+
const transport = new SSEServerTransport("/messages", res);
|
|
522
|
+
sseTransports[transport.sessionId] = transport;
|
|
523
|
+
res.on("close", () => {
|
|
524
|
+
delete sseTransports[transport.sessionId];
|
|
525
|
+
transport.close();
|
|
526
|
+
});
|
|
527
|
+
await server.connect(transport);
|
|
528
|
+
} else if (req.method === "POST" && url.pathname === "/messages") {
|
|
529
|
+
const sessionId = url.searchParams.get("sessionId");
|
|
530
|
+
const transport = sessionId ? sseTransports[sessionId] : void 0;
|
|
531
|
+
if (transport) {
|
|
532
|
+
let body = "";
|
|
533
|
+
for await (const chunk of req) {
|
|
534
|
+
body += chunk;
|
|
535
|
+
}
|
|
536
|
+
const parsedBody = JSON.parse(body);
|
|
537
|
+
await transport.handlePostMessage(req, res, parsedBody);
|
|
538
|
+
} else {
|
|
539
|
+
res.writeHead(400, { "Content-Type": "application/json" });
|
|
540
|
+
res.end(JSON.stringify({ error: "No transport found for sessionId" }));
|
|
434
541
|
}
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
542
|
+
} else if (url.pathname === "/mcp") {
|
|
543
|
+
let body = "";
|
|
544
|
+
for await (const chunk of req) {
|
|
545
|
+
body += chunk;
|
|
438
546
|
}
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
};
|
|
547
|
+
const parsedBody = JSON.parse(body);
|
|
548
|
+
const requestServer = createMcpServerInstance(tools);
|
|
549
|
+
const requestTransport = new StreamableHTTPServerTransport({
|
|
550
|
+
sessionIdGenerator: void 0
|
|
551
|
+
});
|
|
552
|
+
res.on("close", () => {
|
|
553
|
+
logger.info("Streamable HTTP request closed");
|
|
554
|
+
requestTransport.close();
|
|
555
|
+
requestServer.close();
|
|
556
|
+
});
|
|
557
|
+
await requestServer.connect(requestTransport);
|
|
558
|
+
await requestTransport.handleRequest(req, res, parsedBody);
|
|
559
|
+
} else {
|
|
560
|
+
res.writeHead(404, { "Content-Type": "application/json" });
|
|
561
|
+
res.end(
|
|
562
|
+
JSON.stringify({
|
|
563
|
+
error: `Endpoint ${url.pathname} not found.`
|
|
564
|
+
})
|
|
565
|
+
);
|
|
566
|
+
}
|
|
567
|
+
} catch (error) {
|
|
568
|
+
logger.error(`Error handling HTTP request: ${error}`);
|
|
569
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
570
|
+
res.end(
|
|
571
|
+
JSON.stringify({
|
|
572
|
+
error: error instanceof Error ? error.message : String(error)
|
|
573
|
+
})
|
|
574
|
+
);
|
|
575
|
+
}
|
|
576
|
+
});
|
|
577
|
+
httpServer.listen(port, () => {
|
|
578
|
+
logger.info(`Documentation MCP server running on http://0.0.0.0:${port}`);
|
|
579
|
+
});
|
|
580
|
+
process.removeAllListeners("SIGINT");
|
|
581
|
+
process.on("SIGINT", async () => {
|
|
582
|
+
logger.info("Shutting down HTTP server...");
|
|
583
|
+
await shutdownServices();
|
|
584
|
+
await server.close();
|
|
585
|
+
httpServer.close();
|
|
586
|
+
logger.info("HTTP server closed.");
|
|
587
|
+
process.exit(0);
|
|
588
|
+
});
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
// src/mcp/startStdioServer.ts
|
|
592
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
593
|
+
async function startStdioServer(tools) {
|
|
594
|
+
setLogLevel(0 /* ERROR */);
|
|
595
|
+
const server = createMcpServerInstance(tools);
|
|
596
|
+
const transport = new StdioServerTransport();
|
|
597
|
+
await server.connect(transport);
|
|
598
|
+
logger.info("Documentation MCP server running on stdio");
|
|
599
|
+
process.removeAllListeners("SIGINT");
|
|
600
|
+
process.on("SIGINT", async () => {
|
|
601
|
+
logger.info("Shutting down Stdio server...");
|
|
602
|
+
await shutdownServices();
|
|
603
|
+
await server.close();
|
|
604
|
+
logger.info("Stdio server closed.");
|
|
605
|
+
process.exit(0);
|
|
606
|
+
});
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
// src/mcp/tools.ts
|
|
610
|
+
async function initializeTools() {
|
|
611
|
+
const docService2 = getDocService();
|
|
612
|
+
const pipelineManager2 = getPipelineManager();
|
|
613
|
+
const tools = {
|
|
614
|
+
listLibraries: new ListLibrariesTool(docService2),
|
|
615
|
+
findVersion: new FindVersionTool(docService2),
|
|
616
|
+
scrape: new ScrapeTool(docService2, pipelineManager2),
|
|
617
|
+
search: new SearchTool(docService2),
|
|
618
|
+
listJobs: new ListJobsTool(pipelineManager2),
|
|
619
|
+
getJobInfo: new GetJobInfoTool(pipelineManager2),
|
|
620
|
+
cancelJob: new CancelJobTool(pipelineManager2),
|
|
621
|
+
remove: new RemoveTool(docService2),
|
|
622
|
+
// FetchUrlTool now uses middleware pipeline internally
|
|
623
|
+
fetchUrl: new FetchUrlTool(new HttpFetcher(), new FileFetcher())
|
|
624
|
+
};
|
|
625
|
+
return tools;
|
|
626
|
+
}
|
|
627
|
+
|
|
628
|
+
// src/mcp/index.ts
|
|
629
|
+
async function startServer(protocol, port) {
|
|
630
|
+
try {
|
|
631
|
+
await initializeServices();
|
|
632
|
+
const tools = await initializeTools();
|
|
633
|
+
if (protocol === "stdio") {
|
|
634
|
+
await startStdioServer(tools);
|
|
635
|
+
} else if (protocol === "http") {
|
|
636
|
+
if (port === void 0) {
|
|
637
|
+
logger.error("HTTP protocol requires a port.");
|
|
638
|
+
process.exit(1);
|
|
449
639
|
}
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
await pipelineManager.stop();
|
|
456
|
-
await docService.shutdown();
|
|
457
|
-
await server.close();
|
|
458
|
-
process.exit(0);
|
|
459
|
-
});
|
|
640
|
+
await startHttpServer(tools, port);
|
|
641
|
+
} else {
|
|
642
|
+
logger.error(`Unknown protocol: ${protocol}`);
|
|
643
|
+
process.exit(1);
|
|
644
|
+
}
|
|
460
645
|
} catch (error) {
|
|
461
|
-
await docService.shutdown();
|
|
462
646
|
logger.error(`\u274C Fatal Error: ${error}`);
|
|
463
647
|
process.exit(1);
|
|
464
648
|
}
|
|
465
649
|
}
|
|
466
650
|
|
|
467
651
|
// src/server.ts
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
}
|
|
652
|
+
program.option("--protocol <type>", "Protocol to use (stdio or http)", DEFAULT_PROTOCOL).option(
|
|
653
|
+
"--port <number>",
|
|
654
|
+
"Port to listen on for http protocol",
|
|
655
|
+
`${DEFAULT_HTTP_PORT}`
|
|
656
|
+
).parse(process.argv);
|
|
657
|
+
var options = program.opts();
|
|
658
|
+
async function main() {
|
|
659
|
+
const protocol = options.protocol;
|
|
660
|
+
const port = Number.parseInt(options.port, 10);
|
|
661
|
+
if (protocol !== "stdio" && protocol !== "http") {
|
|
662
|
+
console.error('Invalid protocol specified. Use "stdio" or "http".');
|
|
663
|
+
process.exit(1);
|
|
664
|
+
}
|
|
665
|
+
if (protocol === "http" && Number.isNaN(port)) {
|
|
666
|
+
console.error("Port must be a number when using http protocol.");
|
|
667
|
+
process.exit(1);
|
|
668
|
+
}
|
|
669
|
+
try {
|
|
670
|
+
await startServer(protocol, protocol === "http" ? port : void 0);
|
|
671
|
+
} catch (error) {
|
|
672
|
+
console.error(`Server failed to start: ${error}`);
|
|
673
|
+
process.exit(1);
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
main();
|
|
472
677
|
//# sourceMappingURL=server.js.map
|