mcp-docs-service 0.3.10 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/bin.d.ts +8 -0
- package/dist/cli/bin.js +133 -0
- package/dist/cli/bin.js.map +1 -0
- package/dist/handlers/docs.d.ts +26 -0
- package/dist/handlers/docs.js +513 -0
- package/dist/handlers/docs.js.map +1 -0
- package/dist/handlers/documents.js +282 -0
- package/dist/handlers/file.d.ts +32 -0
- package/dist/handlers/file.js +222 -0
- package/dist/handlers/file.js.map +1 -0
- package/dist/handlers/health.js +196 -0
- package/dist/handlers/index.d.ts +1 -0
- package/dist/handlers/index.js +8 -0
- package/dist/handlers/index.js.map +1 -0
- package/dist/handlers/navigation.js +128 -0
- package/dist/index.js +107 -549
- package/dist/schemas/index.d.ts +1 -0
- package/dist/schemas/index.js +1 -0
- package/dist/schemas/index.js.map +1 -0
- package/dist/schemas/tools.d.ts +164 -0
- package/dist/schemas/tools.js +47 -0
- package/dist/schemas/tools.js.map +1 -0
- package/dist/types/docs.d.ts +74 -0
- package/dist/types/docs.js +1 -0
- package/dist/types/docs.js.map +1 -0
- package/dist/types/file.d.ts +21 -0
- package/dist/types/file.js +2 -0
- package/dist/types/file.js.map +1 -0
- package/dist/types/index.d.ts +44 -0
- package/dist/types/index.js +2 -0
- package/dist/types/index.js.map +1 -0
- package/dist/types/tools.d.ts +11 -0
- package/dist/types/tools.js +1 -0
- package/dist/types/tools.js.map +1 -0
- package/dist/utils/file.d.ts +24 -0
- package/dist/utils/file.js +94 -0
- package/dist/utils/file.js.map +1 -0
- package/dist/utils/index.d.ts +1 -0
- package/dist/utils/index.js +2 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/logging.js +27 -0
- package/dist/utils/path.d.ts +16 -0
- package/dist/utils/path.js +69 -0
- package/dist/utils/path.js.map +1 -0
- package/package.json +4 -8
- package/cursor-wrapper.cjs +0 -111
- package/npx-wrapper.cjs +0 -160
package/dist/index.js
CHANGED
@@ -7,29 +7,16 @@
|
|
7
7
|
*/
|
8
8
|
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
9
9
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
10
|
-
import { CallToolRequestSchema, ListToolsRequestSchema,
|
10
|
+
import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
|
11
11
|
import fs from "fs/promises";
|
12
12
|
import path from "path";
|
13
|
-
import os from "os";
|
14
|
-
import { z } from "zod";
|
15
13
|
import { zodToJsonSchema } from "zod-to-json-schema";
|
16
|
-
|
17
|
-
import {
|
18
|
-
//
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
const isInspector = process.env.MCP_INSPECTOR === "true";
|
23
|
-
// Create a safe logging function that won't interfere with MCP protocol
|
24
|
-
const safeLog = (...args) => {
|
25
|
-
// When running under Cursor or NPX, redirect all logs to stderr
|
26
|
-
if (isCursorWrapper || isNpxWrapper) {
|
27
|
-
console.error(...args);
|
28
|
-
}
|
29
|
-
else {
|
30
|
-
console.log(...args);
|
31
|
-
}
|
32
|
-
};
|
14
|
+
// Import our utilities
|
15
|
+
import { safeLog, normalizePath } from "./utils/index.js";
|
16
|
+
// Import schemas
|
17
|
+
import { ReadDocumentSchema, WriteDocumentSchema, EditDocumentSchema, ListDocumentsSchema, SearchDocumentsSchema, CheckDocumentationHealthSchema, } from "./schemas/index.js";
|
18
|
+
// Import handlers
|
19
|
+
import { DocumentHandler, NavigationHandler, HealthCheckHandler, } from "./handlers/index.js";
|
33
20
|
// Command line argument parsing
|
34
21
|
const args = process.argv.slice(2);
|
35
22
|
let docsDir = path.join(process.cwd(), "docs");
|
@@ -52,28 +39,29 @@ for (let i = 0; i < args.length; i++) {
|
|
52
39
|
}
|
53
40
|
}
|
54
41
|
// Normalize path
|
55
|
-
docsDir =
|
42
|
+
docsDir = normalizePath(docsDir);
|
56
43
|
// Ensure docs directory exists
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
44
|
+
async function ensureDocsDirectory() {
|
45
|
+
try {
|
46
|
+
const stats = await fs.stat(docsDir);
|
47
|
+
if (!stats.isDirectory()) {
|
48
|
+
safeLog(`Error: ${docsDir} is not a directory`);
|
49
|
+
process.exit(1);
|
50
|
+
}
|
62
51
|
}
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
if (createDir) {
|
67
|
-
try {
|
68
|
-
await fs.mkdir(docsDir, { recursive: true });
|
69
|
-
safeLog(`Created docs directory: ${docsDir}`);
|
70
|
-
// Create a sample README.md
|
71
|
-
const readmePath = path.join(docsDir, "README.md");
|
52
|
+
catch (error) {
|
53
|
+
// Create directory if it doesn't exist and --create-dir is specified
|
54
|
+
if (createDir) {
|
72
55
|
try {
|
73
|
-
await fs.
|
74
|
-
|
75
|
-
|
76
|
-
const
|
56
|
+
await fs.mkdir(docsDir, { recursive: true });
|
57
|
+
safeLog(`Created docs directory: ${docsDir}`);
|
58
|
+
// Create a sample README.md
|
59
|
+
const readmePath = path.join(docsDir, "README.md");
|
60
|
+
try {
|
61
|
+
await fs.access(readmePath);
|
62
|
+
}
|
63
|
+
catch {
|
64
|
+
const content = `---
|
77
65
|
title: Documentation
|
78
66
|
description: Project documentation
|
79
67
|
---
|
@@ -82,374 +70,26 @@ description: Project documentation
|
|
82
70
|
|
83
71
|
This is the documentation directory for your project.
|
84
72
|
`;
|
85
|
-
|
86
|
-
|
87
|
-
}
|
88
|
-
}
|
89
|
-
catch (error) {
|
90
|
-
safeLog(`Error creating docs directory: ${error}`);
|
91
|
-
process.exit(1);
|
92
|
-
}
|
93
|
-
}
|
94
|
-
else {
|
95
|
-
safeLog(`Error: Docs directory does not exist: ${docsDir}`);
|
96
|
-
safeLog(`Use --create-dir to create it automatically`);
|
97
|
-
process.exit(1);
|
98
|
-
}
|
99
|
-
}
|
100
|
-
safeLog("MCP Documentation Service initialized with docs directory:", docsDir);
|
101
|
-
safeLog("Directory will be created if it doesn't exist");
|
102
|
-
// Schema definitions
|
103
|
-
const ReadDocumentArgsSchema = z.object({
|
104
|
-
path: z
|
105
|
-
.string()
|
106
|
-
.describe("Path to the markdown document, relative to docs directory"),
|
107
|
-
});
|
108
|
-
const WriteDocumentArgsSchema = z.object({
|
109
|
-
path: z
|
110
|
-
.string()
|
111
|
-
.describe("Path to the markdown document, relative to docs directory"),
|
112
|
-
content: z
|
113
|
-
.string()
|
114
|
-
.describe("Content of the document, including frontmatter"),
|
115
|
-
createDirectories: z
|
116
|
-
.boolean()
|
117
|
-
.default(true)
|
118
|
-
.describe("Create parent directories if they don't exist"),
|
119
|
-
});
|
120
|
-
const EditDocumentArgsSchema = z.object({
|
121
|
-
path: z
|
122
|
-
.string()
|
123
|
-
.describe("Path to the markdown document, relative to docs directory"),
|
124
|
-
edits: z.array(z.object({
|
125
|
-
oldText: z.string().describe("Text to search for - must match exactly"),
|
126
|
-
newText: z.string().describe("Text to replace with"),
|
127
|
-
})),
|
128
|
-
dryRun: z
|
129
|
-
.boolean()
|
130
|
-
.default(false)
|
131
|
-
.describe("Preview changes using git-style diff format"),
|
132
|
-
});
|
133
|
-
const ListDocumentsArgsSchema = z.object({
|
134
|
-
basePath: z
|
135
|
-
.string()
|
136
|
-
.optional()
|
137
|
-
.default("")
|
138
|
-
.describe("Base path within docs directory to list documents from"),
|
139
|
-
recursive: z.boolean().default(false).describe("List documents recursively"),
|
140
|
-
});
|
141
|
-
const SearchDocumentsArgsSchema = z.object({
|
142
|
-
query: z
|
143
|
-
.string()
|
144
|
-
.describe("Search query to find in document content or metadata"),
|
145
|
-
basePath: z
|
146
|
-
.string()
|
147
|
-
.optional()
|
148
|
-
.default("")
|
149
|
-
.describe("Base path within docs directory to search documents from"),
|
150
|
-
});
|
151
|
-
const GenerateNavigationArgsSchema = z.object({
|
152
|
-
basePath: z
|
153
|
-
.string()
|
154
|
-
.optional()
|
155
|
-
.default("")
|
156
|
-
.describe("Base path within docs directory to generate navigation from"),
|
157
|
-
outputPath: z
|
158
|
-
.string()
|
159
|
-
.optional()
|
160
|
-
.default("navigation.json")
|
161
|
-
.describe("Path to output navigation file"),
|
162
|
-
});
|
163
|
-
const CheckDocumentationHealthArgsSchema = z.object({
|
164
|
-
basePath: z
|
165
|
-
.string()
|
166
|
-
.optional()
|
167
|
-
.default("")
|
168
|
-
.describe("Base path within docs directory to check health of"),
|
169
|
-
});
|
170
|
-
const ToolInputSchema = ToolSchema.shape.inputSchema;
|
171
|
-
// Utility functions
|
172
|
-
function normalizePath(p) {
|
173
|
-
return path.normalize(p);
|
174
|
-
}
|
175
|
-
function expandHome(filepath) {
|
176
|
-
if (filepath.startsWith("~/") || filepath === "~") {
|
177
|
-
return path.join(os.homedir(), filepath.slice(1));
|
178
|
-
}
|
179
|
-
return filepath;
|
180
|
-
}
|
181
|
-
async function validatePath(requestedPath) {
|
182
|
-
// Resolve path relative to docs directory
|
183
|
-
const resolvedPath = path.isAbsolute(requestedPath)
|
184
|
-
? requestedPath
|
185
|
-
: path.join(docsDir, requestedPath);
|
186
|
-
const normalizedPath = normalizePath(resolvedPath);
|
187
|
-
// Check if path is within docs directory
|
188
|
-
if (!normalizedPath.startsWith(docsDir)) {
|
189
|
-
throw new Error(`Access denied - path outside docs directory: ${normalizedPath}`);
|
190
|
-
}
|
191
|
-
return normalizedPath;
|
192
|
-
}
|
193
|
-
// File editing and diffing utilities
|
194
|
-
function normalizeLineEndings(text) {
|
195
|
-
return text.replace(/\r\n/g, "\n");
|
196
|
-
}
|
197
|
-
function createUnifiedDiff(originalContent, newContent, filepath = "file") {
|
198
|
-
// Ensure consistent line endings for diff
|
199
|
-
const normalizedOriginal = normalizeLineEndings(originalContent);
|
200
|
-
const normalizedNew = normalizeLineEndings(newContent);
|
201
|
-
return createTwoFilesPatch(filepath, filepath, normalizedOriginal, normalizedNew, "original", "modified");
|
202
|
-
}
|
203
|
-
async function applyDocumentEdits(filePath, edits, dryRun = false) {
|
204
|
-
// Read file content and normalize line endings
|
205
|
-
const content = normalizeLineEndings(await fs.readFile(filePath, "utf-8"));
|
206
|
-
// Apply edits sequentially
|
207
|
-
let modifiedContent = content;
|
208
|
-
for (const edit of edits) {
|
209
|
-
const normalizedOld = normalizeLineEndings(edit.oldText);
|
210
|
-
const normalizedNew = normalizeLineEndings(edit.newText);
|
211
|
-
// If exact match exists, use it
|
212
|
-
if (modifiedContent.includes(normalizedOld)) {
|
213
|
-
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew);
|
214
|
-
continue;
|
215
|
-
}
|
216
|
-
// Otherwise, try line-by-line matching with flexibility for whitespace
|
217
|
-
const oldLines = normalizedOld.split("\n");
|
218
|
-
const contentLines = modifiedContent.split("\n");
|
219
|
-
let matchFound = false;
|
220
|
-
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
|
221
|
-
const potentialMatch = contentLines.slice(i, i + oldLines.length);
|
222
|
-
// Compare lines with normalized whitespace
|
223
|
-
const isMatch = oldLines.every((oldLine, j) => {
|
224
|
-
const contentLine = potentialMatch[j];
|
225
|
-
return oldLine.trim() === contentLine.trim();
|
226
|
-
});
|
227
|
-
if (isMatch) {
|
228
|
-
// Preserve original indentation of first line
|
229
|
-
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || "";
|
230
|
-
const newLines = normalizedNew.split("\n").map((line, j) => {
|
231
|
-
if (j === 0)
|
232
|
-
return originalIndent + line.trimStart();
|
233
|
-
// For subsequent lines, try to preserve relative indentation
|
234
|
-
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || "";
|
235
|
-
const newIndent = line.match(/^\s*/)?.[0] || "";
|
236
|
-
if (oldIndent && newIndent) {
|
237
|
-
const relativeIndent = newIndent.length - oldIndent.length;
|
238
|
-
return (originalIndent +
|
239
|
-
" ".repeat(Math.max(0, relativeIndent)) +
|
240
|
-
line.trimStart());
|
241
|
-
}
|
242
|
-
return line;
|
243
|
-
});
|
244
|
-
contentLines.splice(i, oldLines.length, ...newLines);
|
245
|
-
modifiedContent = contentLines.join("\n");
|
246
|
-
matchFound = true;
|
247
|
-
break;
|
248
|
-
}
|
249
|
-
}
|
250
|
-
if (!matchFound) {
|
251
|
-
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`);
|
252
|
-
}
|
253
|
-
}
|
254
|
-
// Create unified diff
|
255
|
-
const diff = createUnifiedDiff(content, modifiedContent, filePath);
|
256
|
-
// Format diff with appropriate number of backticks
|
257
|
-
let numBackticks = 3;
|
258
|
-
while (diff.includes("`".repeat(numBackticks))) {
|
259
|
-
numBackticks++;
|
260
|
-
}
|
261
|
-
const formattedDiff = `${"`".repeat(numBackticks)}diff\n${diff}${"`".repeat(numBackticks)}\n\n`;
|
262
|
-
if (!dryRun) {
|
263
|
-
await fs.writeFile(filePath, modifiedContent, "utf-8");
|
264
|
-
}
|
265
|
-
return formattedDiff;
|
266
|
-
}
|
267
|
-
// Parse frontmatter from markdown content
|
268
|
-
function parseFrontmatter(content) {
|
269
|
-
const frontmatterRegex = /^---\s*\n([\s\S]*?)\n---\s*\n/;
|
270
|
-
const match = content.match(frontmatterRegex);
|
271
|
-
if (!match) {
|
272
|
-
return { frontmatter: {}, content };
|
273
|
-
}
|
274
|
-
const frontmatterStr = match[1];
|
275
|
-
const contentWithoutFrontmatter = content.slice(match[0].length);
|
276
|
-
// Parse frontmatter as key-value pairs
|
277
|
-
const frontmatter = {};
|
278
|
-
const lines = frontmatterStr.split("\n");
|
279
|
-
for (const line of lines) {
|
280
|
-
const colonIndex = line.indexOf(":");
|
281
|
-
if (colonIndex !== -1) {
|
282
|
-
const key = line.slice(0, colonIndex).trim();
|
283
|
-
let value = line.slice(colonIndex + 1).trim();
|
284
|
-
// Handle quoted values
|
285
|
-
if (value.startsWith('"') && value.endsWith('"')) {
|
286
|
-
value = value.slice(1, -1);
|
287
|
-
}
|
288
|
-
// Handle arrays
|
289
|
-
if (value.startsWith("[") && value.endsWith("]")) {
|
290
|
-
try {
|
291
|
-
value = JSON.parse(value);
|
292
|
-
}
|
293
|
-
catch {
|
294
|
-
// Keep as string if parsing fails
|
295
|
-
}
|
296
|
-
}
|
297
|
-
frontmatter[key] = value;
|
298
|
-
}
|
299
|
-
}
|
300
|
-
return { frontmatter, content: contentWithoutFrontmatter };
|
301
|
-
}
|
302
|
-
// Generate navigation structure from documents
|
303
|
-
async function generateNavigation(basePath) {
|
304
|
-
const baseDir = path.join(docsDir, basePath);
|
305
|
-
const pattern = path.join(baseDir, "**/*.md");
|
306
|
-
const files = await glob(pattern);
|
307
|
-
// Sort files to ensure consistent order and process index.md files first
|
308
|
-
files.sort((a, b) => {
|
309
|
-
const aIsIndex = path.basename(a) === "index.md";
|
310
|
-
const bIsIndex = path.basename(b) === "index.md";
|
311
|
-
if (aIsIndex && !bIsIndex)
|
312
|
-
return -1;
|
313
|
-
if (!aIsIndex && bIsIndex)
|
314
|
-
return 1;
|
315
|
-
return a.localeCompare(b);
|
316
|
-
});
|
317
|
-
const navigation = [];
|
318
|
-
const directoryMap = {};
|
319
|
-
for (const file of files) {
|
320
|
-
const relativePath = path.relative(docsDir, file);
|
321
|
-
const content = await fs.readFile(file, "utf-8");
|
322
|
-
const { frontmatter } = parseFrontmatter(content);
|
323
|
-
const title = frontmatter.title || path.basename(file, ".md");
|
324
|
-
const order = frontmatter.order !== undefined ? Number(frontmatter.order) : 999;
|
325
|
-
const item = {
|
326
|
-
title,
|
327
|
-
path: relativePath,
|
328
|
-
order,
|
329
|
-
children: [],
|
330
|
-
};
|
331
|
-
const dirPath = path.dirname(relativePath);
|
332
|
-
if (dirPath === "." || dirPath === basePath) {
|
333
|
-
navigation.push(item);
|
334
|
-
}
|
335
|
-
else {
|
336
|
-
// Create parent directories if they don't exist in the navigation
|
337
|
-
const pathParts = dirPath.split(path.sep);
|
338
|
-
let currentPath = "";
|
339
|
-
let currentNavigation = navigation;
|
340
|
-
for (const part of pathParts) {
|
341
|
-
currentPath = currentPath ? path.join(currentPath, part) : part;
|
342
|
-
if (!directoryMap[currentPath]) {
|
343
|
-
const dirItem = {
|
344
|
-
title: part,
|
345
|
-
path: currentPath,
|
346
|
-
order: 0,
|
347
|
-
children: [],
|
348
|
-
};
|
349
|
-
directoryMap[currentPath] = dirItem;
|
350
|
-
currentNavigation.push(dirItem);
|
73
|
+
await fs.writeFile(readmePath, content);
|
74
|
+
safeLog(`Created sample README.md in ${docsDir}`);
|
351
75
|
}
|
352
|
-
currentNavigation = directoryMap[currentPath].children;
|
353
76
|
}
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
// Sort navigation items by order
|
358
|
-
function sortNavigation(items) {
|
359
|
-
items.sort((a, b) => a.order - b.order);
|
360
|
-
for (const item of items) {
|
361
|
-
if (item.children && item.children.length > 0) {
|
362
|
-
sortNavigation(item.children);
|
77
|
+
catch (error) {
|
78
|
+
safeLog(`Error creating docs directory: ${error}`);
|
79
|
+
process.exit(1);
|
363
80
|
}
|
364
81
|
}
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
-
// Check documentation health
|
370
|
-
async function checkDocumentationHealth(basePath) {
|
371
|
-
const baseDir = path.join(docsDir, basePath);
|
372
|
-
const pattern = path.join(baseDir, "**/*.md");
|
373
|
-
const files = await glob(pattern);
|
374
|
-
const results = {
|
375
|
-
totalDocuments: files.length,
|
376
|
-
documentsWithMissingFrontmatter: 0,
|
377
|
-
documentsWithMissingTitle: 0,
|
378
|
-
documentsWithMissingDescription: 0,
|
379
|
-
brokenLinks: 0,
|
380
|
-
orphanedDocuments: 0,
|
381
|
-
issues: [],
|
382
|
-
};
|
383
|
-
// Check frontmatter and content
|
384
|
-
for (const file of files) {
|
385
|
-
const relativePath = path.relative(docsDir, file);
|
386
|
-
const content = await fs.readFile(file, "utf-8");
|
387
|
-
const { frontmatter } = parseFrontmatter(content);
|
388
|
-
if (Object.keys(frontmatter).length === 0) {
|
389
|
-
results.documentsWithMissingFrontmatter++;
|
390
|
-
results.issues.push(`${relativePath}: Missing frontmatter`);
|
391
|
-
}
|
392
|
-
if (!frontmatter.title) {
|
393
|
-
results.documentsWithMissingTitle++;
|
394
|
-
results.issues.push(`${relativePath}: Missing title in frontmatter`);
|
395
|
-
}
|
396
|
-
if (!frontmatter.description) {
|
397
|
-
results.documentsWithMissingDescription++;
|
398
|
-
results.issues.push(`${relativePath}: Missing description in frontmatter`);
|
399
|
-
}
|
400
|
-
// Check for internal links
|
401
|
-
const linkRegex = /\[.*?\]\((.*?)\)/g;
|
402
|
-
let match;
|
403
|
-
while ((match = linkRegex.exec(content)) !== null) {
|
404
|
-
const link = match[1];
|
405
|
-
// Only check relative links to markdown files
|
406
|
-
if (!link.startsWith("http") &&
|
407
|
-
!link.startsWith("#") &&
|
408
|
-
link.endsWith(".md")) {
|
409
|
-
const linkPath = path.join(path.dirname(file), link);
|
410
|
-
try {
|
411
|
-
await fs.access(linkPath);
|
412
|
-
}
|
413
|
-
catch {
|
414
|
-
results.brokenLinks++;
|
415
|
-
results.issues.push(`${relativePath}: Broken link to ${link}`);
|
416
|
-
}
|
417
|
-
}
|
418
|
-
}
|
419
|
-
}
|
420
|
-
// Generate navigation to check for orphaned documents
|
421
|
-
const navigation = await generateNavigation(basePath);
|
422
|
-
function collectPaths(items) {
|
423
|
-
let paths = [];
|
424
|
-
for (const item of items) {
|
425
|
-
paths.push(item.path);
|
426
|
-
if (item.children && item.children.length > 0) {
|
427
|
-
paths = paths.concat(collectPaths(item.children));
|
428
|
-
}
|
429
|
-
}
|
430
|
-
return paths;
|
431
|
-
}
|
432
|
-
const navigationPaths = collectPaths(navigation);
|
433
|
-
for (const file of files) {
|
434
|
-
const relativePath = path.relative(docsDir, file);
|
435
|
-
if (!navigationPaths.includes(relativePath)) {
|
436
|
-
results.orphanedDocuments++;
|
437
|
-
results.issues.push(`${relativePath}: Orphaned document (not in navigation)`);
|
82
|
+
else {
|
83
|
+
safeLog(`Error: Docs directory does not exist: ${docsDir}`);
|
84
|
+
safeLog(`Use --create-dir to create it automatically`);
|
85
|
+
process.exit(1);
|
438
86
|
}
|
439
87
|
}
|
440
|
-
// Calculate health score (0-100)
|
441
|
-
const totalIssues = results.documentsWithMissingFrontmatter +
|
442
|
-
results.documentsWithMissingTitle +
|
443
|
-
results.documentsWithMissingDescription +
|
444
|
-
results.brokenLinks +
|
445
|
-
results.orphanedDocuments;
|
446
|
-
const maxIssues = results.totalDocuments * 5; // 5 possible issues per document
|
447
|
-
const healthScore = Math.max(0, 100 - Math.round((totalIssues / maxIssues) * 100));
|
448
|
-
return {
|
449
|
-
...results,
|
450
|
-
healthScore,
|
451
|
-
};
|
452
88
|
}
|
89
|
+
// Initialize handlers
|
90
|
+
const documentHandler = new DocumentHandler(docsDir);
|
91
|
+
const navigationHandler = new NavigationHandler(docsDir);
|
92
|
+
const healthCheckHandler = new HealthCheckHandler(docsDir);
|
453
93
|
// Server setup
|
454
94
|
const server = new Server({
|
455
95
|
name: "mcp-docs-service",
|
@@ -464,48 +104,48 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
|
464
104
|
return {
|
465
105
|
tools: [
|
466
106
|
{
|
467
|
-
name: "
|
107
|
+
name: "mcp_docs_manager_read_document",
|
468
108
|
description: "Read a markdown document from the docs directory. Returns the document content " +
|
469
109
|
"including frontmatter. Use this tool when you need to examine the contents of a " +
|
470
110
|
"single document.",
|
471
|
-
inputSchema: zodToJsonSchema(
|
111
|
+
inputSchema: zodToJsonSchema(ReadDocumentSchema),
|
472
112
|
},
|
473
113
|
{
|
474
|
-
name: "
|
114
|
+
name: "mcp_docs_manager_write_document",
|
475
115
|
description: "Create a new markdown document or completely overwrite an existing document with new content. " +
|
476
116
|
"Use with caution as it will overwrite existing documents without warning. " +
|
477
117
|
"Can create parent directories if they don't exist.",
|
478
|
-
inputSchema: zodToJsonSchema(
|
118
|
+
inputSchema: zodToJsonSchema(WriteDocumentSchema),
|
479
119
|
},
|
480
120
|
{
|
481
|
-
name: "
|
121
|
+
name: "mcp_docs_manager_edit_document",
|
482
122
|
description: "Make line-based edits to a markdown document. Each edit replaces exact line sequences " +
|
483
123
|
"with new content. Returns a git-style diff showing the changes made.",
|
484
|
-
inputSchema: zodToJsonSchema(
|
124
|
+
inputSchema: zodToJsonSchema(EditDocumentSchema),
|
485
125
|
},
|
486
126
|
{
|
487
|
-
name: "
|
127
|
+
name: "mcp_docs_manager_list_documents",
|
488
128
|
description: "List all markdown documents in the docs directory or a subdirectory. " +
|
489
129
|
"Returns the relative paths to all documents.",
|
490
|
-
inputSchema: zodToJsonSchema(
|
130
|
+
inputSchema: zodToJsonSchema(ListDocumentsSchema),
|
491
131
|
},
|
492
132
|
{
|
493
|
-
name: "
|
133
|
+
name: "mcp_docs_manager_search_documents",
|
494
134
|
description: "Search for markdown documents containing specific text in their content or frontmatter. " +
|
495
135
|
"Returns the relative paths to matching documents.",
|
496
|
-
inputSchema: zodToJsonSchema(
|
136
|
+
inputSchema: zodToJsonSchema(SearchDocumentsSchema),
|
497
137
|
},
|
498
138
|
{
|
499
|
-
name: "
|
139
|
+
name: "mcp_docs_manager_generate_navigation",
|
500
140
|
description: "Generate a navigation structure from the markdown documents in the docs directory. " +
|
501
141
|
"Returns a JSON structure that can be used for navigation menus.",
|
502
|
-
inputSchema: zodToJsonSchema(
|
142
|
+
inputSchema: zodToJsonSchema(ListDocumentsSchema),
|
503
143
|
},
|
504
144
|
{
|
505
|
-
name: "
|
145
|
+
name: "mcp_docs_manager_check_documentation_health",
|
506
146
|
description: "Check the health of the documentation by analyzing frontmatter, links, and navigation. " +
|
507
147
|
"Returns a report with issues and a health score.",
|
508
|
-
inputSchema: zodToJsonSchema(
|
148
|
+
inputSchema: zodToJsonSchema(CheckDocumentationHealthSchema),
|
509
149
|
},
|
510
150
|
],
|
511
151
|
};
|
@@ -514,138 +154,54 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
514
154
|
try {
|
515
155
|
const { name, arguments: args } = request.params;
|
516
156
|
switch (name) {
|
517
|
-
case "
|
518
|
-
const parsed =
|
157
|
+
case "mcp_docs_manager_read_document": {
|
158
|
+
const parsed = ReadDocumentSchema.safeParse(args);
|
519
159
|
if (!parsed.success) {
|
520
160
|
throw new Error(`Invalid arguments for read_document: ${parsed.error}`);
|
521
161
|
}
|
522
|
-
|
523
|
-
const content = await fs.readFile(validPath, "utf-8");
|
524
|
-
return {
|
525
|
-
content: [{ type: "text", text: content }],
|
526
|
-
metadata: {
|
527
|
-
path: parsed.data.path,
|
528
|
-
...parseFrontmatter(content).frontmatter,
|
529
|
-
},
|
530
|
-
};
|
162
|
+
return await documentHandler.readDocument(parsed.data.path);
|
531
163
|
}
|
532
|
-
case "
|
533
|
-
const parsed =
|
164
|
+
case "mcp_docs_manager_write_document": {
|
165
|
+
const parsed = WriteDocumentSchema.safeParse(args);
|
534
166
|
if (!parsed.success) {
|
535
167
|
throw new Error(`Invalid arguments for write_document: ${parsed.error}`);
|
536
168
|
}
|
537
|
-
|
538
|
-
// Create parent directories if needed
|
539
|
-
if (parsed.data.createDirectories) {
|
540
|
-
const dirPath = path.dirname(validPath);
|
541
|
-
await fs.mkdir(dirPath, { recursive: true });
|
542
|
-
}
|
543
|
-
await fs.writeFile(validPath, parsed.data.content, "utf-8");
|
544
|
-
return {
|
545
|
-
content: [
|
546
|
-
{ type: "text", text: `Successfully wrote to ${parsed.data.path}` },
|
547
|
-
],
|
548
|
-
};
|
169
|
+
return await documentHandler.writeDocument(parsed.data.path, parsed.data.content, parsed.data.createDirectories);
|
549
170
|
}
|
550
|
-
case "
|
551
|
-
const parsed =
|
171
|
+
case "mcp_docs_manager_edit_document": {
|
172
|
+
const parsed = EditDocumentSchema.safeParse(args);
|
552
173
|
if (!parsed.success) {
|
553
174
|
throw new Error(`Invalid arguments for edit_document: ${parsed.error}`);
|
554
175
|
}
|
555
|
-
|
556
|
-
const result = await applyDocumentEdits(validPath, parsed.data.edits, parsed.data.dryRun);
|
557
|
-
return {
|
558
|
-
content: [{ type: "text", text: result }],
|
559
|
-
};
|
176
|
+
return await documentHandler.editDocument(parsed.data.path, parsed.data.edits, parsed.data.dryRun);
|
560
177
|
}
|
561
|
-
case "
|
562
|
-
const parsed =
|
178
|
+
case "mcp_docs_manager_list_documents": {
|
179
|
+
const parsed = ListDocumentsSchema.safeParse(args);
|
563
180
|
if (!parsed.success) {
|
564
181
|
throw new Error(`Invalid arguments for list_documents: ${parsed.error}`);
|
565
182
|
}
|
566
|
-
|
567
|
-
const pattern = parsed.data.recursive
|
568
|
-
? path.join(baseDir, "**/*.md")
|
569
|
-
: path.join(baseDir, "*.md");
|
570
|
-
const files = await glob(pattern);
|
571
|
-
const relativePaths = files.map((file) => path.relative(docsDir, file));
|
572
|
-
return {
|
573
|
-
content: [{ type: "text", text: relativePaths.join("\n") }],
|
574
|
-
};
|
183
|
+
return await documentHandler.listDocuments(parsed.data.basePath, parsed.data.recursive);
|
575
184
|
}
|
576
|
-
case "
|
577
|
-
const parsed =
|
185
|
+
case "mcp_docs_manager_search_documents": {
|
186
|
+
const parsed = SearchDocumentsSchema.safeParse(args);
|
578
187
|
if (!parsed.success) {
|
579
188
|
throw new Error(`Invalid arguments for search_documents: ${parsed.error}`);
|
580
189
|
}
|
581
|
-
|
582
|
-
const pattern = path.join(baseDir, "**/*.md");
|
583
|
-
const files = await glob(pattern);
|
584
|
-
const results = [];
|
585
|
-
for (const file of files) {
|
586
|
-
const content = await fs.readFile(file, "utf-8");
|
587
|
-
if (content.toLowerCase().includes(parsed.data.query.toLowerCase())) {
|
588
|
-
results.push(path.relative(docsDir, file));
|
589
|
-
}
|
590
|
-
}
|
591
|
-
return {
|
592
|
-
content: [
|
593
|
-
{
|
594
|
-
type: "text",
|
595
|
-
text: results.length > 0
|
596
|
-
? `Found ${results.length} matching documents:\n${results.join("\n")}`
|
597
|
-
: "No matching documents found",
|
598
|
-
},
|
599
|
-
],
|
600
|
-
};
|
190
|
+
return await documentHandler.searchDocuments(parsed.data.query, parsed.data.basePath);
|
601
191
|
}
|
602
|
-
case "
|
603
|
-
const parsed =
|
192
|
+
case "mcp_docs_manager_generate_navigation": {
|
193
|
+
const parsed = ListDocumentsSchema.safeParse(args);
|
604
194
|
if (!parsed.success) {
|
605
195
|
throw new Error(`Invalid arguments for generate_navigation: ${parsed.error}`);
|
606
196
|
}
|
607
|
-
|
608
|
-
// Write navigation to file if outputPath is provided
|
609
|
-
if (parsed.data.outputPath) {
|
610
|
-
const outputPath = await validatePath(parsed.data.outputPath);
|
611
|
-
await fs.writeFile(outputPath, JSON.stringify(navigation, null, 2), "utf-8");
|
612
|
-
}
|
613
|
-
return {
|
614
|
-
content: [
|
615
|
-
{
|
616
|
-
type: "text",
|
617
|
-
text: `Navigation structure:\n${JSON.stringify(navigation, null, 2)}`,
|
618
|
-
},
|
619
|
-
],
|
620
|
-
};
|
197
|
+
return await navigationHandler.generateNavigation(parsed.data.basePath);
|
621
198
|
}
|
622
|
-
case "
|
623
|
-
const parsed =
|
199
|
+
case "mcp_docs_manager_check_documentation_health": {
|
200
|
+
const parsed = CheckDocumentationHealthSchema.safeParse(args);
|
624
201
|
if (!parsed.success) {
|
625
202
|
throw new Error(`Invalid arguments for check_documentation_health: ${parsed.error}`);
|
626
203
|
}
|
627
|
-
|
628
|
-
return {
|
629
|
-
content: [
|
630
|
-
{
|
631
|
-
type: "text",
|
632
|
-
text: `Documentation Health Report:
|
633
|
-
Health Score: ${healthReport.healthScore}/100
|
634
|
-
|
635
|
-
Summary:
|
636
|
-
- Total Documents: ${healthReport.totalDocuments}
|
637
|
-
- Documents with Missing Frontmatter: ${healthReport.documentsWithMissingFrontmatter}
|
638
|
-
- Documents with Missing Title: ${healthReport.documentsWithMissingTitle}
|
639
|
-
- Documents with Missing Description: ${healthReport.documentsWithMissingDescription}
|
640
|
-
- Broken Links: ${healthReport.brokenLinks}
|
641
|
-
- Orphaned Documents: ${healthReport.orphanedDocuments}
|
642
|
-
|
643
|
-
Issues:
|
644
|
-
${healthReport.issues.map((issue) => `- ${issue}`).join("\n")}
|
645
|
-
`,
|
646
|
-
},
|
647
|
-
],
|
648
|
-
};
|
204
|
+
return await healthCheckHandler.checkDocumentationHealth(parsed.data.basePath);
|
649
205
|
}
|
650
206
|
default:
|
651
207
|
throw new Error(`Unknown tool: ${name}`);
|
@@ -661,35 +217,37 @@ ${healthReport.issues.map((issue) => `- ${issue}`).join("\n")}
|
|
661
217
|
});
|
662
218
|
// Run health check if requested
|
663
219
|
if (runHealthCheck) {
|
664
|
-
|
665
|
-
|
666
|
-
|
667
|
-
|
668
|
-
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
console.error(`Error running health check: ${error}`);
|
681
|
-
process.exit(1);
|
682
|
-
}
|
220
|
+
(async () => {
|
221
|
+
try {
|
222
|
+
await ensureDocsDirectory();
|
223
|
+
const healthResponse = await healthCheckHandler.checkDocumentationHealth("");
|
224
|
+
if (healthResponse.isError) {
|
225
|
+
safeLog(`Error running health check: ${healthResponse.content[0].text}`);
|
226
|
+
process.exit(1);
|
227
|
+
}
|
228
|
+
safeLog(healthResponse.content[0].text);
|
229
|
+
process.exit(0);
|
230
|
+
}
|
231
|
+
catch (error) {
|
232
|
+
safeLog(`Error running health check: ${error}`);
|
233
|
+
process.exit(1);
|
234
|
+
}
|
235
|
+
})();
|
683
236
|
}
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
690
|
-
|
237
|
+
else {
|
238
|
+
// Start server
|
239
|
+
(async () => {
|
240
|
+
try {
|
241
|
+
await ensureDocsDirectory();
|
242
|
+
const transport = new StdioServerTransport();
|
243
|
+
await server.connect(transport);
|
244
|
+
safeLog("MCP Documentation Management Service started.");
|
245
|
+
safeLog("Using docs directory:", docsDir);
|
246
|
+
safeLog("Reading from stdin, writing results to stdout...");
|
247
|
+
}
|
248
|
+
catch (error) {
|
249
|
+
safeLog("Fatal error running server:", error);
|
250
|
+
process.exit(1);
|
251
|
+
}
|
252
|
+
})();
|
691
253
|
}
|
692
|
-
runServer().catch((error) => {
|
693
|
-
safeLog("Fatal error running server:", error);
|
694
|
-
process.exit(1);
|
695
|
-
});
|