mcp-docs-service 0.2.12 → 0.2.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/cursor-wrapper.js +84 -0
- package/dist/index.js +558 -494
- package/package.json +5 -3
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,20 @@
|
|
2
2
|
|
3
3
|
All notable changes to the MCP Docs Service will be documented in this file.
|
4
4
|
|
5
|
+
## [0.2.13] - 2024-05-16
|
6
|
+
|
7
|
+
### Added
|
8
|
+
|
9
|
+
- Added `mcp-docs-cursor` entry point specifically for Cursor integration
|
10
|
+
- Improved argument handling for Cursor integration
|
11
|
+
- Enhanced debugging capabilities with detailed logging
|
12
|
+
|
13
|
+
### Fixed
|
14
|
+
|
15
|
+
- Fixed issue with npx execution in Cursor's MCP integration
|
16
|
+
- Improved robustness of docs directory resolution
|
17
|
+
- Enhanced error handling for Cursor integration
|
18
|
+
|
5
19
|
## [0.2.12] - 2024-05-15
|
6
20
|
|
7
21
|
### Fixed
|
@@ -0,0 +1,84 @@
|
|
1
|
+
#!/usr/bin/env node
|
2
|
+
|
3
|
+
/**
|
4
|
+
* Cursor Wrapper for MCP Docs Service
|
5
|
+
*
|
6
|
+
* This script is designed to be used as the entry point for Cursor's MCP integration.
|
7
|
+
* It handles the arguments properly and forwards them to the actual MCP Docs Service.
|
8
|
+
*/
|
9
|
+
|
10
|
+
import fs from "fs";
|
11
|
+
import path from "path";
|
12
|
+
import { spawn } from "child_process";
|
13
|
+
import { fileURLToPath } from "url";
|
14
|
+
|
15
|
+
// Get the current directory
|
16
|
+
const __filename = fileURLToPath(import.meta.url);
|
17
|
+
const __dirname = path.dirname(__filename);
|
18
|
+
|
19
|
+
// Create a log file for debugging
|
20
|
+
const logFile = path.join(__dirname, "cursor-debug.log");
|
21
|
+
fs.writeFileSync(
|
22
|
+
logFile,
|
23
|
+
`Cursor wrapper called at ${new Date().toISOString()}\n`
|
24
|
+
);
|
25
|
+
fs.appendFileSync(logFile, `Arguments: ${JSON.stringify(process.argv)}\n`);
|
26
|
+
fs.appendFileSync(logFile, `Working directory: ${process.cwd()}\n`);
|
27
|
+
|
28
|
+
// Extract the docs directory from the arguments
|
29
|
+
// The docs directory is expected to be the last argument
|
30
|
+
let docsDir = "./docs";
|
31
|
+
const args = process.argv.slice(2);
|
32
|
+
|
33
|
+
if (args.length > 0) {
|
34
|
+
// Check if --docs-dir flag is used
|
35
|
+
const docsDirIndex = args.indexOf("--docs-dir");
|
36
|
+
if (docsDirIndex !== -1 && docsDirIndex + 1 < args.length) {
|
37
|
+
docsDir = args[docsDirIndex + 1];
|
38
|
+
} else {
|
39
|
+
// Otherwise, use the last argument if it looks like a path
|
40
|
+
const lastArg = args[args.length - 1];
|
41
|
+
if (!lastArg.startsWith("-")) {
|
42
|
+
docsDir = lastArg;
|
43
|
+
}
|
44
|
+
}
|
45
|
+
}
|
46
|
+
|
47
|
+
// Resolve the docs directory to an absolute path
|
48
|
+
docsDir = path.resolve(docsDir);
|
49
|
+
fs.appendFileSync(logFile, `Using docs directory: ${docsDir}\n`);
|
50
|
+
|
51
|
+
// Ensure the docs directory exists
|
52
|
+
if (!fs.existsSync(docsDir)) {
|
53
|
+
fs.appendFileSync(logFile, `Creating docs directory: ${docsDir}\n`);
|
54
|
+
try {
|
55
|
+
fs.mkdirSync(docsDir, { recursive: true });
|
56
|
+
} catch (error) {
|
57
|
+
fs.appendFileSync(logFile, `Error creating docs directory: ${error}\n`);
|
58
|
+
process.exit(1);
|
59
|
+
}
|
60
|
+
}
|
61
|
+
|
62
|
+
// Set up the arguments for the actual service
|
63
|
+
const serviceArgs = ["--docs-dir", docsDir];
|
64
|
+
fs.appendFileSync(
|
65
|
+
logFile,
|
66
|
+
`Service arguments: ${JSON.stringify(serviceArgs)}\n`
|
67
|
+
);
|
68
|
+
|
69
|
+
// Run the actual service
|
70
|
+
const binPath = path.join(__dirname, "dist", "cli", "bin.js");
|
71
|
+
fs.appendFileSync(
|
72
|
+
logFile,
|
73
|
+
`Running service: ${binPath} ${serviceArgs.join(" ")}\n`
|
74
|
+
);
|
75
|
+
|
76
|
+
const child = spawn("node", [binPath, ...serviceArgs], {
|
77
|
+
stdio: "inherit",
|
78
|
+
env: { ...process.env, MCP_CURSOR_INTEGRATION: "true" },
|
79
|
+
});
|
80
|
+
|
81
|
+
child.on("exit", (code) => {
|
82
|
+
fs.appendFileSync(logFile, `Child process exited with code ${code}\n`);
|
83
|
+
process.exit(code);
|
84
|
+
});
|
package/dist/index.js
CHANGED
@@ -1,7 +1,10 @@
|
|
1
1
|
#!/usr/bin/env node
|
2
2
|
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
3
3
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
4
|
-
import {
|
4
|
+
import {
|
5
|
+
CallToolRequestSchema,
|
6
|
+
ListToolsRequestSchema,
|
7
|
+
} from "@modelcontextprotocol/sdk/types.js";
|
5
8
|
import fs from "fs/promises";
|
6
9
|
import path from "path";
|
7
10
|
import { zodToJsonSchema } from "zod-to-json-schema";
|
@@ -12,79 +15,98 @@ import { normalizePath, expandHome, validatePath } from "./utils/path.js";
|
|
12
15
|
import * as DocsHandlers from "./handlers/docs.js";
|
13
16
|
// Import schemas
|
14
17
|
import * as ToolSchemas from "./schemas/tools.js";
|
18
|
+
|
15
19
|
// Check if we're running under MCP Inspector
|
16
|
-
const isMCPInspector =
|
17
|
-
|
20
|
+
const isMCPInspector =
|
21
|
+
process.env.MCP_INSPECTOR === "true" ||
|
22
|
+
process.argv.some((arg) => arg.includes("modelcontextprotocol/inspector"));
|
18
23
|
// Create a logging function that respects MCP Inspector mode
|
19
24
|
const log = (...args) => {
|
20
|
-
|
21
|
-
|
22
|
-
|
25
|
+
if (!isMCPInspector) {
|
26
|
+
console.log(...args);
|
27
|
+
}
|
23
28
|
};
|
29
|
+
|
24
30
|
const errorLog = (...args) => {
|
25
|
-
|
31
|
+
a;
|
32
|
+
console.error(...args);
|
26
33
|
};
|
34
|
+
|
27
35
|
// Command line argument parsing
|
28
36
|
const args = process.argv.slice(2);
|
29
37
|
let allowedDirectories = [];
|
30
38
|
let runHealthCheck = false;
|
39
|
+
|
31
40
|
// Check for health check flag
|
32
41
|
if (args.includes("--health-check")) {
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
42
|
+
runHealthCheck = true;
|
43
|
+
// Remove the health check flag from args
|
44
|
+
const healthCheckIndex = args.indexOf("--health-check");
|
45
|
+
args.splice(healthCheckIndex, 1);
|
37
46
|
}
|
47
|
+
|
38
48
|
// Filter out any other flags (starting with --)
|
39
49
|
const directoryArgs = args.filter((arg) => !arg.startsWith("--"));
|
40
50
|
if (directoryArgs.length === 0) {
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
errorLog(`Error: Default docs directory ${defaultDocsDir} does not exist or is not accessible`);
|
57
|
-
errorLog("Usage: mcp-server-filesystem <allowed-directory> [additional-directories...]");
|
58
|
-
process.exit(1);
|
51
|
+
// Use default docs directory if none is provided
|
52
|
+
const defaultDocsDir = path.join(process.cwd(), "docs");
|
53
|
+
try {
|
54
|
+
const stats = await fs.stat(defaultDocsDir);
|
55
|
+
if (stats.isDirectory()) {
|
56
|
+
log(`Using default docs directory: ${defaultDocsDir}`);
|
57
|
+
allowedDirectories = [normalizePath(path.resolve(defaultDocsDir))];
|
58
|
+
} else {
|
59
|
+
errorLog(
|
60
|
+
`Error: Default docs directory ${defaultDocsDir} is not a directory`
|
61
|
+
);
|
62
|
+
errorLog(
|
63
|
+
"Usage: mcp-server-filesystem <allowed-directory> [additional-directories...]"
|
64
|
+
);
|
65
|
+
process.exit(1);
|
59
66
|
}
|
60
|
-
}
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
67
|
+
} catch (error) {
|
68
|
+
errorLog(
|
69
|
+
`Error: Default docs directory ${defaultDocsDir} does not exist or is not accessible`
|
70
|
+
);
|
71
|
+
errorLog(
|
72
|
+
"Usage: mcp-server-filesystem <allowed-directory> [additional-directories...]"
|
73
|
+
);
|
74
|
+
process.exit(1);
|
75
|
+
}
|
76
|
+
} else {
|
77
|
+
// Store allowed directories in normalized form
|
78
|
+
allowedDirectories = directoryArgs.map((dir) =>
|
79
|
+
normalizePath(path.resolve(expandHome(dir)))
|
80
|
+
);
|
81
|
+
// Validate that all directories exist and are accessible
|
82
|
+
await Promise.all(
|
83
|
+
directoryArgs.map(async (dir) => {
|
84
|
+
try {
|
85
|
+
const stats = await fs.stat(dir);
|
86
|
+
if (!stats.isDirectory()) {
|
87
|
+
errorLog(`Error: ${dir} is not a directory`);
|
88
|
+
process.exit(1);
|
76
89
|
}
|
77
|
-
|
90
|
+
} catch (error) {
|
91
|
+
errorLog(`Error accessing directory ${dir}:`, error);
|
92
|
+
process.exit(1);
|
93
|
+
}
|
94
|
+
})
|
95
|
+
);
|
78
96
|
}
|
97
|
+
|
79
98
|
// Create server
|
80
|
-
const server = new Server(
|
99
|
+
const server = new Server(
|
100
|
+
{
|
81
101
|
name: "secure-filesystem-server",
|
82
|
-
version: "0.
|
83
|
-
},
|
102
|
+
version: "1.0.0",
|
103
|
+
},
|
104
|
+
{
|
84
105
|
capabilities: {
|
85
|
-
|
106
|
+
tools: {},
|
86
107
|
},
|
87
|
-
}
|
108
|
+
}
|
109
|
+
);
|
88
110
|
// Define tools
|
89
111
|
// ===================================================================
|
90
112
|
// DOCUMENTATION TOOLS
|
@@ -92,478 +114,520 @@ const server = new Server({
|
|
92
114
|
// These tools are specifically designed for working with documentation
|
93
115
|
// files (markdown with frontmatter)
|
94
116
|
const documentationTools = [
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
117
|
+
// Read document - reads a markdown document and extracts its content and metadata
|
118
|
+
{
|
119
|
+
name: "read_document",
|
120
|
+
description:
|
121
|
+
"Read a markdown document and extract its content and metadata",
|
122
|
+
schema: ToolSchemas.ReadDocumentSchema,
|
123
|
+
handler: async (args) => {
|
124
|
+
return await DocsHandlers.readDocument(args.path, allowedDirectories);
|
103
125
|
},
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
126
|
+
},
|
127
|
+
// List documents - lists all markdown documents in a directory
|
128
|
+
{
|
129
|
+
name: "list_documents",
|
130
|
+
description: "List all markdown documents in a directory",
|
131
|
+
schema: ToolSchemas.ListDocumentsSchema,
|
132
|
+
handler: async (args) => {
|
133
|
+
return await DocsHandlers.listDocuments(
|
134
|
+
args.basePath || "",
|
135
|
+
allowedDirectories
|
136
|
+
);
|
112
137
|
},
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
138
|
+
},
|
139
|
+
// Get structure - gets the structure of the documentation directory
|
140
|
+
{
|
141
|
+
name: "get_structure",
|
142
|
+
description: "Get the structure of the documentation directory",
|
143
|
+
schema: ToolSchemas.GetStructureSchema,
|
144
|
+
handler: async (args) => {
|
145
|
+
return await DocsHandlers.getStructure(
|
146
|
+
args.basePath || "",
|
147
|
+
allowedDirectories
|
148
|
+
);
|
121
149
|
},
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
150
|
+
},
|
151
|
+
// Get navigation - gets the navigation structure for the documentation
|
152
|
+
{
|
153
|
+
name: "get_navigation",
|
154
|
+
description: "Get the navigation structure for the documentation",
|
155
|
+
schema: ToolSchemas.GetNavigationSchema,
|
156
|
+
handler: async (args) => {
|
157
|
+
return await DocsHandlers.getNavigation(
|
158
|
+
args.basePath || "",
|
159
|
+
allowedDirectories
|
160
|
+
);
|
130
161
|
},
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
162
|
+
},
|
163
|
+
// Get docs knowledge base - creates a comprehensive knowledge base of documentation
|
164
|
+
{
|
165
|
+
name: "get_docs_knowledge_base",
|
166
|
+
description:
|
167
|
+
"Create a comprehensive knowledge base of documentation for LLM context",
|
168
|
+
schema: ToolSchemas.GetDocsKnowledgeBaseSchema,
|
169
|
+
handler: async (args) => {
|
170
|
+
try {
|
171
|
+
// First get the navigation structure
|
172
|
+
const navResult = await DocsHandlers.getNavigation(
|
173
|
+
args.basePath || "",
|
174
|
+
allowedDirectories
|
175
|
+
);
|
176
|
+
if (navResult.isError) {
|
177
|
+
return navResult;
|
178
|
+
}
|
179
|
+
// Get all documents
|
180
|
+
const docsResult = await DocsHandlers.listDocuments(
|
181
|
+
args.basePath || "",
|
182
|
+
allowedDirectories
|
183
|
+
);
|
184
|
+
if (docsResult.isError) {
|
185
|
+
return docsResult;
|
186
|
+
}
|
187
|
+
const documents = docsResult.metadata?.documents || [];
|
188
|
+
const navigation = navResult.metadata?.navigation || [];
|
189
|
+
// Create a map of path to document for quick lookup
|
190
|
+
const documentMap = new Map();
|
191
|
+
documents.forEach((doc) => {
|
192
|
+
documentMap.set(doc.path, doc);
|
193
|
+
});
|
194
|
+
// Create knowledge base structure
|
195
|
+
const knowledgeBase = {
|
196
|
+
navigation,
|
197
|
+
documents: [],
|
198
|
+
categories: {},
|
199
|
+
tags: {},
|
200
|
+
};
|
201
|
+
// Process documents to extract summaries if requested
|
202
|
+
const includeSummaries = args.includeSummaries !== false; // Default to true
|
203
|
+
const maxSummaryLength = args.maxSummaryLength || 500;
|
204
|
+
// Process all documents
|
205
|
+
for (const doc of documents) {
|
206
|
+
// Create document entry with metadata
|
207
|
+
const docEntry = {
|
208
|
+
path: doc.path,
|
209
|
+
name: doc.name,
|
210
|
+
metadata: doc.metadata || {},
|
211
|
+
};
|
212
|
+
// Add summary if requested
|
213
|
+
if (includeSummaries) {
|
137
214
|
try {
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
//
|
144
|
-
const
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
const
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
const knowledgeBase = {
|
157
|
-
navigation,
|
158
|
-
documents: [],
|
159
|
-
categories: {},
|
160
|
-
tags: {},
|
161
|
-
};
|
162
|
-
// Process documents to extract summaries if requested
|
163
|
-
const includeSummaries = args.includeSummaries !== false; // Default to true
|
164
|
-
const maxSummaryLength = args.maxSummaryLength || 500;
|
165
|
-
// Process all documents
|
166
|
-
for (const doc of documents) {
|
167
|
-
// Create document entry with metadata
|
168
|
-
const docEntry = {
|
169
|
-
path: doc.path,
|
170
|
-
name: doc.name,
|
171
|
-
metadata: doc.metadata || {},
|
172
|
-
};
|
173
|
-
// Add summary if requested
|
174
|
-
if (includeSummaries) {
|
175
|
-
try {
|
176
|
-
const docContent = await DocsHandlers.readDocument(doc.path, allowedDirectories);
|
177
|
-
if (!docContent.isError && docContent.metadata?.content) {
|
178
|
-
// Extract a summary (first few paragraphs)
|
179
|
-
const content = docContent.metadata.content;
|
180
|
-
const paragraphs = content.split("\n\n");
|
181
|
-
let summary = "";
|
182
|
-
// Get first few paragraphs up to maxSummaryLength
|
183
|
-
for (const para of paragraphs) {
|
184
|
-
if (summary.length + para.length <= maxSummaryLength) {
|
185
|
-
summary += para + "\n\n";
|
186
|
-
}
|
187
|
-
else {
|
188
|
-
// Add partial paragraph to reach maxSummaryLength
|
189
|
-
const remainingLength = maxSummaryLength - summary.length;
|
190
|
-
if (remainingLength > 0) {
|
191
|
-
summary += para.substring(0, remainingLength) + "...";
|
192
|
-
}
|
193
|
-
break;
|
194
|
-
}
|
195
|
-
}
|
196
|
-
docEntry.summary = summary.trim();
|
197
|
-
}
|
198
|
-
}
|
199
|
-
catch (error) {
|
200
|
-
// Skip summary if there's an error
|
201
|
-
docEntry.summary = "Error generating summary";
|
202
|
-
}
|
203
|
-
}
|
204
|
-
// Add to knowledge base
|
205
|
-
knowledgeBase.documents.push(docEntry);
|
206
|
-
// Organize by categories (based on directory structure)
|
207
|
-
const dirPath = path.dirname(doc.path);
|
208
|
-
if (!knowledgeBase.categories[dirPath]) {
|
209
|
-
knowledgeBase.categories[dirPath] = [];
|
210
|
-
}
|
211
|
-
knowledgeBase.categories[dirPath].push(docEntry);
|
212
|
-
// Organize by tags
|
213
|
-
if (doc.metadata?.tags) {
|
214
|
-
for (const tag of doc.metadata.tags) {
|
215
|
-
if (!knowledgeBase.tags[tag]) {
|
216
|
-
knowledgeBase.tags[tag] = [];
|
217
|
-
}
|
218
|
-
knowledgeBase.tags[tag].push(docEntry);
|
219
|
-
}
|
215
|
+
const docContent = await DocsHandlers.readDocument(
|
216
|
+
doc.path,
|
217
|
+
allowedDirectories
|
218
|
+
);
|
219
|
+
if (!docContent.isError && docContent.metadata?.content) {
|
220
|
+
// Extract a summary (first few paragraphs)
|
221
|
+
const content = docContent.metadata.content;
|
222
|
+
const paragraphs = content.split("\n\n");
|
223
|
+
let summary = "";
|
224
|
+
// Get first few paragraphs up to maxSummaryLength
|
225
|
+
for (const para of paragraphs) {
|
226
|
+
if (summary.length + para.length <= maxSummaryLength) {
|
227
|
+
summary += para + "\n\n";
|
228
|
+
} else {
|
229
|
+
// Add partial paragraph to reach maxSummaryLength
|
230
|
+
const remainingLength = maxSummaryLength - summary.length;
|
231
|
+
if (remainingLength > 0) {
|
232
|
+
summary += para.substring(0, remainingLength) + "...";
|
220
233
|
}
|
234
|
+
break;
|
235
|
+
}
|
221
236
|
}
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
},
|
228
|
-
],
|
229
|
-
metadata: {
|
230
|
-
knowledgeBase,
|
231
|
-
},
|
232
|
-
};
|
237
|
+
docEntry.summary = summary.trim();
|
238
|
+
}
|
239
|
+
} catch (error) {
|
240
|
+
// Skip summary if there's an error
|
241
|
+
docEntry.summary = "Error generating summary";
|
233
242
|
}
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
243
|
+
}
|
244
|
+
// Add to knowledge base
|
245
|
+
knowledgeBase.documents.push(docEntry);
|
246
|
+
// Organize by categories (based on directory structure)
|
247
|
+
const dirPath = path.dirname(doc.path);
|
248
|
+
if (!knowledgeBase.categories[dirPath]) {
|
249
|
+
knowledgeBase.categories[dirPath] = [];
|
250
|
+
}
|
251
|
+
knowledgeBase.categories[dirPath].push(docEntry);
|
252
|
+
// Organize by tags
|
253
|
+
if (doc.metadata?.tags) {
|
254
|
+
for (const tag of doc.metadata.tags) {
|
255
|
+
if (!knowledgeBase.tags[tag]) {
|
256
|
+
knowledgeBase.tags[tag] = [];
|
257
|
+
}
|
258
|
+
knowledgeBase.tags[tag].push(docEntry);
|
244
259
|
}
|
245
|
-
|
260
|
+
}
|
261
|
+
}
|
262
|
+
return {
|
263
|
+
content: [
|
264
|
+
{
|
265
|
+
type: "text",
|
266
|
+
text: `Generated knowledge base with ${knowledgeBase.documents.length} documents`,
|
267
|
+
},
|
268
|
+
],
|
269
|
+
metadata: {
|
270
|
+
knowledgeBase,
|
271
|
+
},
|
272
|
+
};
|
273
|
+
} catch (error) {
|
274
|
+
return {
|
275
|
+
content: [
|
276
|
+
{
|
277
|
+
type: "text",
|
278
|
+
text: `Error generating knowledge base: ${error.message}`,
|
279
|
+
},
|
280
|
+
],
|
281
|
+
isError: true,
|
282
|
+
};
|
283
|
+
}
|
246
284
|
},
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
285
|
+
},
|
286
|
+
// Write document - writes content to a markdown document with frontmatter
|
287
|
+
{
|
288
|
+
name: "write_document",
|
289
|
+
description: "Write content to a markdown document with frontmatter",
|
290
|
+
schema: ToolSchemas.WriteDocumentSchema,
|
291
|
+
handler: async (args) => {
|
292
|
+
try {
|
293
|
+
const normalizedPath = await validatePath(
|
294
|
+
args.path,
|
295
|
+
allowedDirectories
|
296
|
+
);
|
297
|
+
// Convert metadata to frontmatter and combine with content
|
298
|
+
const frontmatter = args.metadata
|
299
|
+
? matter.stringify(args.content, args.metadata)
|
300
|
+
: args.content;
|
301
|
+
// Ensure the directory exists
|
302
|
+
const dirPath = path.dirname(normalizedPath);
|
303
|
+
await fs.mkdir(dirPath, { recursive: true });
|
304
|
+
// Write the file
|
305
|
+
await fs.writeFile(normalizedPath, frontmatter);
|
306
|
+
return {
|
307
|
+
content: [{ type: "text", text: "Document written successfully" }],
|
308
|
+
metadata: {
|
309
|
+
path: args.path,
|
310
|
+
},
|
311
|
+
};
|
312
|
+
} catch (error) {
|
313
|
+
return {
|
314
|
+
content: [
|
315
|
+
{ type: "text", text: `Error writing document: ${error.message}` },
|
316
|
+
],
|
317
|
+
isError: true,
|
318
|
+
};
|
319
|
+
}
|
280
320
|
},
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
321
|
+
},
|
322
|
+
// Edit document - applies edits to a markdown document while preserving frontmatter
|
323
|
+
{
|
324
|
+
name: "edit_document",
|
325
|
+
description:
|
326
|
+
"Apply edits to a markdown document while preserving frontmatter",
|
327
|
+
schema: ToolSchemas.EditDocumentSchema,
|
328
|
+
handler: async (args) => {
|
329
|
+
try {
|
330
|
+
// First read the document to get its current content and metadata
|
331
|
+
const docResult = await DocsHandlers.readDocument(
|
332
|
+
args.path,
|
333
|
+
allowedDirectories
|
334
|
+
);
|
335
|
+
if (docResult.isError) {
|
336
|
+
return docResult;
|
337
|
+
}
|
338
|
+
const normalizedPath = await validatePath(
|
339
|
+
args.path,
|
340
|
+
allowedDirectories
|
341
|
+
);
|
342
|
+
// Read the current content
|
343
|
+
const content = await fs.readFile(normalizedPath, "utf-8");
|
344
|
+
// Apply edits
|
345
|
+
let newContent = content;
|
346
|
+
let appliedEdits = 0;
|
347
|
+
for (const edit of args.edits) {
|
348
|
+
if (newContent.includes(edit.oldText)) {
|
349
|
+
newContent = newContent.replace(edit.oldText, edit.newText);
|
350
|
+
appliedEdits++;
|
351
|
+
}
|
352
|
+
}
|
353
|
+
// Write the updated content
|
354
|
+
await fs.writeFile(normalizedPath, newContent);
|
355
|
+
return {
|
356
|
+
content: [{ type: "text", text: "Document edited successfully" }],
|
357
|
+
metadata: {
|
358
|
+
path: args.path,
|
359
|
+
appliedEdits,
|
360
|
+
},
|
361
|
+
};
|
362
|
+
} catch (error) {
|
363
|
+
return {
|
364
|
+
content: [
|
365
|
+
{ type: "text", text: `Error editing document: ${error.message}` },
|
366
|
+
],
|
367
|
+
isError: true,
|
368
|
+
};
|
369
|
+
}
|
324
370
|
},
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
371
|
+
},
|
372
|
+
// Delete document - deletes a markdown document
|
373
|
+
{
|
374
|
+
name: "delete_document",
|
375
|
+
description: "Delete a markdown document",
|
376
|
+
schema: ToolSchemas.DeleteDocumentSchema,
|
377
|
+
handler: async (args) => {
|
378
|
+
try {
|
379
|
+
const normalizedPath = await validatePath(
|
380
|
+
args.path,
|
381
|
+
allowedDirectories
|
382
|
+
);
|
383
|
+
// Check if the file exists and is a markdown file
|
384
|
+
const stats = await fs.stat(normalizedPath);
|
385
|
+
if (!stats.isFile() || !normalizedPath.endsWith(".md")) {
|
386
|
+
return {
|
387
|
+
content: [
|
388
|
+
{
|
389
|
+
type: "text",
|
390
|
+
text: `Error: ${args.path} is not a markdown document`,
|
391
|
+
},
|
392
|
+
],
|
393
|
+
isError: true,
|
394
|
+
};
|
395
|
+
}
|
396
|
+
// Delete the file
|
397
|
+
await fs.unlink(normalizedPath);
|
398
|
+
return {
|
399
|
+
content: [{ type: "text", text: "Document deleted successfully" }],
|
400
|
+
metadata: {
|
401
|
+
path: args.path,
|
402
|
+
},
|
403
|
+
};
|
404
|
+
} catch (error) {
|
405
|
+
return {
|
406
|
+
content: [
|
407
|
+
{ type: "text", text: `Error deleting document: ${error.message}` },
|
408
|
+
],
|
409
|
+
isError: true,
|
410
|
+
};
|
411
|
+
}
|
364
412
|
},
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
}
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
|
416
|
-
|
417
|
-
|
418
|
-
|
419
|
-
|
420
|
-
|
413
|
+
},
|
414
|
+
// Search documents - searches for markdown documents matching criteria
|
415
|
+
{
|
416
|
+
name: "search_documents",
|
417
|
+
description: "Search for markdown documents matching criteria",
|
418
|
+
schema: ToolSchemas.SearchDocumentsSchema,
|
419
|
+
handler: async (args) => {
|
420
|
+
try {
|
421
|
+
// Get the list of documents first
|
422
|
+
const listResult = await DocsHandlers.listDocuments(
|
423
|
+
args.basePath || "",
|
424
|
+
allowedDirectories
|
425
|
+
);
|
426
|
+
if (listResult.isError) {
|
427
|
+
return listResult;
|
428
|
+
}
|
429
|
+
let documents = listResult.metadata?.documents || [];
|
430
|
+
// Filter by query if provided
|
431
|
+
if (args.query) {
|
432
|
+
documents = documents.filter((doc) => {
|
433
|
+
// Check if query matches document path, name, or metadata
|
434
|
+
const docString = JSON.stringify(doc).toLowerCase();
|
435
|
+
return docString.includes(args.query.toLowerCase());
|
436
|
+
});
|
437
|
+
}
|
438
|
+
// Filter by tags if provided
|
439
|
+
if (args.tags && args.tags.length > 0) {
|
440
|
+
documents = documents.filter((doc) => {
|
441
|
+
const docTags = doc.metadata?.tags || [];
|
442
|
+
return args.tags.some((tag) => docTags.includes(tag));
|
443
|
+
});
|
444
|
+
}
|
445
|
+
// Filter by status if provided
|
446
|
+
if (args.status) {
|
447
|
+
documents = documents.filter(
|
448
|
+
(doc) => doc.metadata?.status === args.status
|
449
|
+
);
|
450
|
+
}
|
451
|
+
return {
|
452
|
+
content: [
|
453
|
+
{
|
454
|
+
type: "text",
|
455
|
+
text: `Found ${documents.length} matching documents`,
|
456
|
+
},
|
457
|
+
],
|
458
|
+
metadata: {
|
459
|
+
documents,
|
460
|
+
},
|
461
|
+
};
|
462
|
+
} catch (error) {
|
463
|
+
return {
|
464
|
+
content: [
|
465
|
+
{
|
466
|
+
type: "text",
|
467
|
+
text: `Error searching documents: ${error.message}`,
|
468
|
+
},
|
469
|
+
],
|
470
|
+
isError: true,
|
471
|
+
};
|
472
|
+
}
|
421
473
|
},
|
422
|
-
|
423
|
-
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
|
445
|
-
|
446
|
-
|
447
|
-
|
448
|
-
|
449
|
-
|
450
|
-
|
451
|
-
|
452
|
-
|
453
|
-
|
454
|
-
|
455
|
-
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
474
|
+
},
|
475
|
+
// Check documentation health - checks the health of documentation
|
476
|
+
{
|
477
|
+
name: "check_documentation_health",
|
478
|
+
description: "Check the health of documentation and identify issues",
|
479
|
+
schema: ToolSchemas.CheckDocumentationHealthSchema,
|
480
|
+
handler: async (args) => {
|
481
|
+
try {
|
482
|
+
// If basePath is provided, validate it
|
483
|
+
let validatedBasePath = "";
|
484
|
+
if (args.basePath) {
|
485
|
+
try {
|
486
|
+
validatedBasePath = await validatePath(
|
487
|
+
args.basePath,
|
488
|
+
allowedDirectories
|
489
|
+
);
|
490
|
+
} catch (error) {
|
491
|
+
// If validation fails, use the first allowed directory
|
492
|
+
console.warn(
|
493
|
+
`Warning: Invalid basePath "${args.basePath}". Using default directory instead.`
|
494
|
+
);
|
495
|
+
validatedBasePath = allowedDirectories[0];
|
496
|
+
}
|
497
|
+
}
|
498
|
+
return await DocsHandlers.checkDocumentationHealth(
|
499
|
+
validatedBasePath,
|
500
|
+
{
|
501
|
+
checkLinks: args.checkLinks,
|
502
|
+
checkMetadata: args.checkMetadata,
|
503
|
+
checkOrphans: args.checkOrphans,
|
504
|
+
requiredMetadataFields: args.requiredMetadataFields,
|
505
|
+
},
|
506
|
+
allowedDirectories
|
507
|
+
);
|
508
|
+
} catch (error) {
|
509
|
+
return {
|
510
|
+
content: [
|
511
|
+
{
|
512
|
+
type: "text",
|
513
|
+
text: `Error checking documentation health: ${error.message}`,
|
514
|
+
},
|
515
|
+
],
|
516
|
+
isError: true,
|
517
|
+
};
|
518
|
+
}
|
460
519
|
},
|
520
|
+
},
|
461
521
|
];
|
462
522
|
// Combine all tools
|
463
523
|
const tools = [...documentationTools];
|
464
524
|
// Register tool handlers
|
465
525
|
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
466
|
-
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
|
471
|
-
|
472
|
-
|
526
|
+
return {
|
527
|
+
tools: tools.map((tool) => ({
|
528
|
+
name: tool.name,
|
529
|
+
description: tool.description,
|
530
|
+
inputSchema: zodToJsonSchema(tool.schema),
|
531
|
+
})),
|
532
|
+
};
|
473
533
|
});
|
474
534
|
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
|
485
|
-
|
486
|
-
|
487
|
-
|
488
|
-
|
489
|
-
|
490
|
-
|
491
|
-
|
492
|
-
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
497
|
-
|
498
|
-
|
499
|
-
}
|
500
|
-
return result;
|
501
|
-
}
|
502
|
-
catch (error) {
|
503
|
-
console.error(`Error executing tool ${name}:`, error);
|
504
|
-
return {
|
505
|
-
content: [
|
506
|
-
{
|
507
|
-
type: "text",
|
508
|
-
text: `Error calling tool ${name}: ${error.message}`,
|
509
|
-
},
|
510
|
-
],
|
511
|
-
isError: true,
|
512
|
-
};
|
535
|
+
const { name, arguments: args } = request.params;
|
536
|
+
// Find the requested tool
|
537
|
+
const tool = tools.find((t) => t.name === name);
|
538
|
+
if (!tool) {
|
539
|
+
return {
|
540
|
+
content: [
|
541
|
+
{
|
542
|
+
type: "text",
|
543
|
+
text: `Tool not found: ${name}`,
|
544
|
+
},
|
545
|
+
],
|
546
|
+
isError: true,
|
547
|
+
};
|
548
|
+
}
|
549
|
+
try {
|
550
|
+
// Parse and validate arguments
|
551
|
+
const parsedArgs = tool.schema.parse(args);
|
552
|
+
// Call the tool handler with the appropriate type
|
553
|
+
const result = await tool.handler(parsedArgs);
|
554
|
+
// Ensure the content field is always an array
|
555
|
+
if (!result.content || !Array.isArray(result.content)) {
|
556
|
+
result.content = [
|
557
|
+
{ type: "text", text: "Operation completed successfully" },
|
558
|
+
];
|
513
559
|
}
|
560
|
+
return result;
|
561
|
+
} catch (error) {
|
562
|
+
console.error(`Error executing tool ${name}:`, error);
|
563
|
+
return {
|
564
|
+
content: [
|
565
|
+
{
|
566
|
+
type: "text",
|
567
|
+
text: `Error calling tool ${name}: ${error.message}`,
|
568
|
+
},
|
569
|
+
],
|
570
|
+
isError: true,
|
571
|
+
};
|
572
|
+
}
|
514
573
|
});
|
515
574
|
// If health check flag is set, run the health check and exit
|
516
575
|
if (runHealthCheck) {
|
517
|
-
|
518
|
-
|
519
|
-
|
520
|
-
|
521
|
-
|
522
|
-
|
523
|
-
|
524
|
-
|
525
|
-
|
526
|
-
|
527
|
-
|
528
|
-
|
529
|
-
|
530
|
-
|
531
|
-
|
532
|
-
console.log(`Overall Health Score: ${metadata.score || 0}%`);
|
533
|
-
console.log(`Total Documents: ${metadata.totalDocuments || 0}`);
|
534
|
-
console.log(`Metadata Completeness: ${metadata.metadataCompleteness || 0}%`);
|
535
|
-
console.log(`Broken Links: ${metadata.brokenLinks || 0}`);
|
536
|
-
if (metadata.issues && metadata.issues.length > 0) {
|
537
|
-
console.log("\nIssues Found:");
|
538
|
-
// Group issues by type
|
539
|
-
const issuesByType = {};
|
540
|
-
metadata.issues.forEach((issue) => {
|
541
|
-
if (!issuesByType[issue.type]) {
|
542
|
-
issuesByType[issue.type] = [];
|
543
|
-
}
|
544
|
-
issuesByType[issue.type].push(issue);
|
545
|
-
});
|
546
|
-
// Display issues by type
|
547
|
-
for (const [type, issues] of Object.entries(issuesByType)) {
|
548
|
-
console.log(`\n${type.replace("_", " ").toUpperCase()} (${issues.length}):`);
|
549
|
-
issues.forEach((issue) => {
|
550
|
-
console.log(`- ${issue.path}: ${issue.message}`);
|
551
|
-
});
|
552
|
-
}
|
553
|
-
}
|
554
|
-
else {
|
555
|
-
console.log("\nNo issues found. Documentation is in good health!");
|
556
|
-
}
|
557
|
-
console.log("\n=== END OF HEALTH CHECK ===\n");
|
558
|
-
// Exit with success
|
559
|
-
process.exit(0);
|
576
|
+
console.log("Running documentation health check...");
|
577
|
+
try {
|
578
|
+
const healthCheckResult = await DocsHandlers.checkDocumentationHealth(
|
579
|
+
"", // Use the first allowed directory
|
580
|
+
{
|
581
|
+
checkLinks: true,
|
582
|
+
checkMetadata: true,
|
583
|
+
checkOrphans: true,
|
584
|
+
requiredMetadataFields: ["title", "description", "status"],
|
585
|
+
},
|
586
|
+
allowedDirectories
|
587
|
+
);
|
588
|
+
if (healthCheckResult.isError) {
|
589
|
+
console.error("Health check failed:", healthCheckResult.content[0].text);
|
590
|
+
process.exit(1);
|
560
591
|
}
|
561
|
-
|
562
|
-
|
563
|
-
|
592
|
+
const metadata = healthCheckResult.metadata || {};
|
593
|
+
console.log("\n=== DOCUMENTATION HEALTH CHECK RESULTS ===\n");
|
594
|
+
console.log(`Overall Health Score: ${metadata.score || 0}%`);
|
595
|
+
console.log(`Total Documents: ${metadata.totalDocuments || 0}`);
|
596
|
+
console.log(
|
597
|
+
`Metadata Completeness: ${metadata.metadataCompleteness || 0}%`
|
598
|
+
);
|
599
|
+
console.log(`Broken Links: ${metadata.brokenLinks || 0}`);
|
600
|
+
if (metadata.issues && metadata.issues.length > 0) {
|
601
|
+
console.log("\nIssues Found:");
|
602
|
+
// Group issues by type
|
603
|
+
const issuesByType = {};
|
604
|
+
metadata.issues.forEach((issue) => {
|
605
|
+
if (!issuesByType[issue.type]) {
|
606
|
+
issuesByType[issue.type] = [];
|
607
|
+
}
|
608
|
+
issuesByType[issue.type].push(issue);
|
609
|
+
});
|
610
|
+
// Display issues by type
|
611
|
+
for (const [type, issues] of Object.entries(issuesByType)) {
|
612
|
+
console.log(
|
613
|
+
`\n${type.replace("_", " ").toUpperCase()} (${issues.length}):`
|
614
|
+
);
|
615
|
+
issues.forEach((issue) => {
|
616
|
+
console.log(`- ${issue.path}: ${issue.message}`);
|
617
|
+
});
|
618
|
+
}
|
619
|
+
} else {
|
620
|
+
console.log("\nNo issues found. Documentation is in good health!");
|
564
621
|
}
|
622
|
+
console.log("\n=== END OF HEALTH CHECK ===\n");
|
623
|
+
// Exit with success
|
624
|
+
process.exit(0);
|
625
|
+
} catch (error) {
|
626
|
+
console.error("Error running health check:", error);
|
627
|
+
process.exit(1);
|
628
|
+
}
|
565
629
|
}
|
566
630
|
// Connect to transport and start the server
|
567
631
|
const transport = new StdioServerTransport();
|
568
632
|
await server.connect(transport);
|
569
|
-
//# sourceMappingURL=index.js.map
|
633
|
+
//# sourceMappingURL=index.js.map
|
package/package.json
CHANGED
@@ -1,13 +1,14 @@
|
|
1
1
|
{
|
2
2
|
"name": "mcp-docs-service",
|
3
|
-
"version": "0.2.
|
3
|
+
"version": "0.2.14",
|
4
4
|
"description": "MCP Documentation Management Service - A Model Context Protocol implementation for documentation management",
|
5
5
|
"type": "module",
|
6
6
|
"main": "dist/index.js",
|
7
7
|
"types": "dist/index.d.ts",
|
8
8
|
"bin": {
|
9
9
|
"mcp-docs-service": "dist/cli/bin.js",
|
10
|
-
"mcp-docs-inspector": "mcp-inspector-wrapper.js"
|
10
|
+
"mcp-docs-inspector": "mcp-inspector-wrapper.js",
|
11
|
+
"mcp-docs-cursor": "cursor-wrapper.js"
|
11
12
|
},
|
12
13
|
"scripts": {
|
13
14
|
"build": "tsc",
|
@@ -50,7 +51,8 @@
|
|
50
51
|
"README.md",
|
51
52
|
"CHANGELOG.md",
|
52
53
|
"LICENSE",
|
53
|
-
"mcp-inspector-wrapper.js"
|
54
|
+
"mcp-inspector-wrapper.js",
|
55
|
+
"cursor-wrapper.js"
|
54
56
|
],
|
55
57
|
"engines": {
|
56
58
|
"node": ">=18"
|