mcp-docs-service 0.2.16 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/README.md +33 -199
  2. package/dist/index.d.ts +6 -0
  3. package/dist/index.js +640 -591
  4. package/dist/index.js.map +1 -1
  5. package/package.json +32 -44
  6. package/CHANGELOG.md +0 -174
  7. package/cursor-wrapper.js +0 -199
  8. package/dist/cli/bin.d.ts +0 -8
  9. package/dist/cli/bin.js +0 -133
  10. package/dist/cli/bin.js.map +0 -1
  11. package/dist/handlers/docs.d.ts +0 -26
  12. package/dist/handlers/docs.js +0 -513
  13. package/dist/handlers/docs.js.map +0 -1
  14. package/dist/handlers/file.d.ts +0 -32
  15. package/dist/handlers/file.js +0 -222
  16. package/dist/handlers/file.js.map +0 -1
  17. package/dist/handlers/index.d.ts +0 -1
  18. package/dist/handlers/index.js +0 -3
  19. package/dist/handlers/index.js.map +0 -1
  20. package/dist/schemas/index.d.ts +0 -1
  21. package/dist/schemas/index.js +0 -3
  22. package/dist/schemas/index.js.map +0 -1
  23. package/dist/schemas/tools.d.ts +0 -164
  24. package/dist/schemas/tools.js +0 -53
  25. package/dist/schemas/tools.js.map +0 -1
  26. package/dist/types/docs.d.ts +0 -74
  27. package/dist/types/docs.js +0 -2
  28. package/dist/types/docs.js.map +0 -1
  29. package/dist/types/file.d.ts +0 -21
  30. package/dist/types/file.js +0 -2
  31. package/dist/types/file.js.map +0 -1
  32. package/dist/types/index.d.ts +0 -44
  33. package/dist/types/index.js +0 -4
  34. package/dist/types/index.js.map +0 -1
  35. package/dist/types/tools.d.ts +0 -11
  36. package/dist/types/tools.js +0 -2
  37. package/dist/types/tools.js.map +0 -1
  38. package/dist/utils/file.d.ts +0 -24
  39. package/dist/utils/file.js +0 -94
  40. package/dist/utils/file.js.map +0 -1
  41. package/dist/utils/index.d.ts +0 -1
  42. package/dist/utils/index.js +0 -3
  43. package/dist/utils/index.js.map +0 -1
  44. package/dist/utils/path.d.ts +0 -16
  45. package/dist/utils/path.js +0 -70
  46. package/dist/utils/path.js.map +0 -1
  47. package/mcp-inspector-wrapper.js +0 -208
  48. package/npx-wrapper.js +0 -119
package/dist/index.js CHANGED
@@ -1,632 +1,681 @@
1
1
  #!/usr/bin/env node
2
+ /**
3
+ * MCP Docs Service
4
+ *
5
+ * A Model Context Protocol implementation for documentation management.
6
+ * This service provides tools for reading, writing, and managing markdown documentation.
7
+ */
2
8
  import { Server } from "@modelcontextprotocol/sdk/server/index.js";
3
9
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
- import {
5
- CallToolRequestSchema,
6
- ListToolsRequestSchema,
7
- } from "@modelcontextprotocol/sdk/types.js";
10
+ import { CallToolRequestSchema, ListToolsRequestSchema, ToolSchema, } from "@modelcontextprotocol/sdk/types.js";
8
11
  import fs from "fs/promises";
9
12
  import path from "path";
13
+ import os from "os";
14
+ import { z } from "zod";
10
15
  import { zodToJsonSchema } from "zod-to-json-schema";
11
- import matter from "gray-matter";
12
- // Import utility functions
13
- import { normalizePath, expandHome, validatePath } from "./utils/path.js";
14
- // Import handlers
15
- import * as DocsHandlers from "./handlers/docs.js";
16
- // Import schemas
17
- import * as ToolSchemas from "./schemas/tools.js";
18
-
19
- // Check if we're running under MCP Inspector
20
- const isMCPInspector =
21
- process.env.MCP_INSPECTOR === "true" ||
22
- process.argv.some((arg) => arg.includes("modelcontextprotocol/inspector"));
23
- // Create a logging function that respects MCP Inspector mode
24
- const log = (...args) => {
25
- if (!isMCPInspector) {
26
- console.log(...args);
27
- }
28
- };
29
-
30
- const errorLog = (...args) => {
31
- console.error(...args);
32
- };
33
-
16
+ import { createTwoFilesPatch } from "diff";
17
+ import { glob } from "glob";
34
18
  // Command line argument parsing
35
19
  const args = process.argv.slice(2);
36
- let allowedDirectories = [];
20
+ let docsDir = path.join(process.cwd(), "docs");
21
+ let createDir = false;
37
22
  let runHealthCheck = false;
38
-
39
- // Check for health check flag
40
- if (args.includes("--health-check")) {
41
- runHealthCheck = true;
42
- // Remove the health check flag from args
43
- const healthCheckIndex = args.indexOf("--health-check");
44
- args.splice(healthCheckIndex, 1);
45
- }
46
-
47
- // Filter out any other flags (starting with --)
48
- const directoryArgs = args.filter((arg) => !arg.startsWith("--"));
49
- if (directoryArgs.length === 0) {
50
- // Use default docs directory if none is provided
51
- const defaultDocsDir = path.join(process.cwd(), "docs");
52
- try {
53
- const stats = await fs.stat(defaultDocsDir);
54
- if (stats.isDirectory()) {
55
- log(`Using default docs directory: ${defaultDocsDir}`);
56
- allowedDirectories = [normalizePath(path.resolve(defaultDocsDir))];
57
- } else {
58
- errorLog(
59
- `Error: Default docs directory ${defaultDocsDir} is not a directory`
60
- );
61
- errorLog(
62
- "Usage: mcp-server-filesystem <allowed-directory> [additional-directories...]"
63
- );
64
- process.exit(1);
23
+ // Parse arguments
24
+ for (let i = 0; i < args.length; i++) {
25
+ if (args[i] === "--docs-dir" && i + 1 < args.length) {
26
+ docsDir = path.resolve(args[i + 1]);
27
+ i++;
65
28
  }
66
- } catch (error) {
67
- errorLog(
68
- `Error: Default docs directory ${defaultDocsDir} does not exist or is not accessible`
69
- );
70
- errorLog(
71
- "Usage: mcp-server-filesystem <allowed-directory> [additional-directories...]"
72
- );
73
- process.exit(1);
74
- }
75
- } else {
76
- // Store allowed directories in normalized form
77
- allowedDirectories = directoryArgs.map((dir) =>
78
- normalizePath(path.resolve(expandHome(dir)))
79
- );
80
- // Validate that all directories exist and are accessible
81
- await Promise.all(
82
- directoryArgs.map(async (dir) => {
83
- try {
84
- const stats = await fs.stat(dir);
85
- if (!stats.isDirectory()) {
86
- errorLog(`Error: ${dir} is not a directory`);
87
- process.exit(1);
88
- }
89
- } catch (error) {
90
- errorLog(`Error accessing directory ${dir}:`, error);
29
+ else if (args[i] === "--create-dir") {
30
+ createDir = true;
31
+ }
32
+ else if (args[i] === "--health-check") {
33
+ runHealthCheck = true;
34
+ }
35
+ else if (!args[i].startsWith("--")) {
36
+ docsDir = path.resolve(args[i]);
37
+ }
38
+ }
39
+ // Normalize path
40
+ docsDir = path.normalize(docsDir);
41
+ // Ensure docs directory exists
42
+ try {
43
+ const stats = await fs.stat(docsDir);
44
+ if (!stats.isDirectory()) {
45
+ console.error(`Error: ${docsDir} is not a directory`);
91
46
  process.exit(1);
92
- }
93
- })
94
- );
47
+ }
95
48
  }
49
+ catch (error) {
50
+ // Create directory if it doesn't exist and --create-dir is specified
51
+ if (createDir) {
52
+ try {
53
+ await fs.mkdir(docsDir, { recursive: true });
54
+ console.log(`Created docs directory: ${docsDir}`);
55
+ // Create a sample README.md
56
+ const readmePath = path.join(docsDir, "README.md");
57
+ try {
58
+ await fs.access(readmePath);
59
+ }
60
+ catch {
61
+ const content = `---
62
+ title: Documentation
63
+ description: Project documentation
64
+ ---
96
65
 
97
- // Create server
98
- const server = new Server(
99
- {
100
- name: "secure-filesystem-server",
101
- version: "1.0.0",
102
- },
103
- {
104
- capabilities: {
105
- tools: {},
106
- },
107
- }
108
- );
109
- // Define tools
110
- // ===================================================================
111
- // DOCUMENTATION TOOLS
112
- // ===================================================================
113
- // These tools are specifically designed for working with documentation
114
- // files (markdown with frontmatter)
115
- const documentationTools = [
116
- // Read document - reads a markdown document and extracts its content and metadata
117
- {
118
- name: "read_document",
119
- description:
120
- "Read a markdown document and extract its content and metadata",
121
- schema: ToolSchemas.ReadDocumentSchema,
122
- handler: async (args) => {
123
- return await DocsHandlers.readDocument(args.path, allowedDirectories);
124
- },
125
- },
126
- // List documents - lists all markdown documents in a directory
127
- {
128
- name: "list_documents",
129
- description: "List all markdown documents in a directory",
130
- schema: ToolSchemas.ListDocumentsSchema,
131
- handler: async (args) => {
132
- return await DocsHandlers.listDocuments(
133
- args.basePath || "",
134
- allowedDirectories
135
- );
136
- },
137
- },
138
- // Get structure - gets the structure of the documentation directory
139
- {
140
- name: "get_structure",
141
- description: "Get the structure of the documentation directory",
142
- schema: ToolSchemas.GetStructureSchema,
143
- handler: async (args) => {
144
- return await DocsHandlers.getStructure(
145
- args.basePath || "",
146
- allowedDirectories
147
- );
148
- },
149
- },
150
- // Get navigation - gets the navigation structure for the documentation
151
- {
152
- name: "get_navigation",
153
- description: "Get the navigation structure for the documentation",
154
- schema: ToolSchemas.GetNavigationSchema,
155
- handler: async (args) => {
156
- return await DocsHandlers.getNavigation(
157
- args.basePath || "",
158
- allowedDirectories
159
- );
160
- },
161
- },
162
- // Get docs knowledge base - creates a comprehensive knowledge base of documentation
163
- {
164
- name: "get_docs_knowledge_base",
165
- description:
166
- "Create a comprehensive knowledge base of documentation for LLM context",
167
- schema: ToolSchemas.GetDocsKnowledgeBaseSchema,
168
- handler: async (args) => {
169
- try {
170
- // First get the navigation structure
171
- const navResult = await DocsHandlers.getNavigation(
172
- args.basePath || "",
173
- allowedDirectories
174
- );
175
- if (navResult.isError) {
176
- return navResult;
66
+ # Documentation
67
+
68
+ This is the documentation directory for your project.
69
+ `;
70
+ await fs.writeFile(readmePath, content);
71
+ console.log(`Created sample README.md in ${docsDir}`);
72
+ }
177
73
  }
178
- // Get all documents
179
- const docsResult = await DocsHandlers.listDocuments(
180
- args.basePath || "",
181
- allowedDirectories
182
- );
183
- if (docsResult.isError) {
184
- return docsResult;
74
+ catch (error) {
75
+ console.error(`Error creating docs directory: ${error}`);
76
+ process.exit(1);
185
77
  }
186
- const documents = docsResult.metadata?.documents || [];
187
- const navigation = navResult.metadata?.navigation || [];
188
- // Create a map of path to document for quick lookup
189
- const documentMap = new Map();
190
- documents.forEach((doc) => {
191
- documentMap.set(doc.path, doc);
192
- });
193
- // Create knowledge base structure
194
- const knowledgeBase = {
195
- navigation,
196
- documents: [],
197
- categories: {},
198
- tags: {},
199
- };
200
- // Process documents to extract summaries if requested
201
- const includeSummaries = args.includeSummaries !== false; // Default to true
202
- const maxSummaryLength = args.maxSummaryLength || 500;
203
- // Process all documents
204
- for (const doc of documents) {
205
- // Create document entry with metadata
206
- const docEntry = {
207
- path: doc.path,
208
- name: doc.name,
209
- metadata: doc.metadata || {},
210
- };
211
- // Add summary if requested
212
- if (includeSummaries) {
213
- try {
214
- const docContent = await DocsHandlers.readDocument(
215
- doc.path,
216
- allowedDirectories
217
- );
218
- if (!docContent.isError && docContent.metadata?.content) {
219
- // Extract a summary (first few paragraphs)
220
- const content = docContent.metadata.content;
221
- const paragraphs = content.split("\n\n");
222
- let summary = "";
223
- // Get first few paragraphs up to maxSummaryLength
224
- for (const para of paragraphs) {
225
- if (summary.length + para.length <= maxSummaryLength) {
226
- summary += para + "\n\n";
227
- } else {
228
- // Add partial paragraph to reach maxSummaryLength
229
- const remainingLength = maxSummaryLength - summary.length;
230
- if (remainingLength > 0) {
231
- summary += para.substring(0, remainingLength) + "...";
78
+ }
79
+ else {
80
+ console.error(`Error: Docs directory does not exist: ${docsDir}`);
81
+ console.error(`Use --create-dir to create it automatically`);
82
+ process.exit(1);
83
+ }
84
+ }
85
+ console.log("MCP Documentation Service initialized with docs directory:", docsDir);
86
+ console.log("Directory will be created if it doesn't exist");
87
+ // Schema definitions
88
+ const ReadDocumentArgsSchema = z.object({
89
+ path: z
90
+ .string()
91
+ .describe("Path to the markdown document, relative to docs directory"),
92
+ });
93
+ const WriteDocumentArgsSchema = z.object({
94
+ path: z
95
+ .string()
96
+ .describe("Path to the markdown document, relative to docs directory"),
97
+ content: z
98
+ .string()
99
+ .describe("Content of the document, including frontmatter"),
100
+ createDirectories: z
101
+ .boolean()
102
+ .default(true)
103
+ .describe("Create parent directories if they don't exist"),
104
+ });
105
+ const EditDocumentArgsSchema = z.object({
106
+ path: z
107
+ .string()
108
+ .describe("Path to the markdown document, relative to docs directory"),
109
+ edits: z.array(z.object({
110
+ oldText: z.string().describe("Text to search for - must match exactly"),
111
+ newText: z.string().describe("Text to replace with"),
112
+ })),
113
+ dryRun: z
114
+ .boolean()
115
+ .default(false)
116
+ .describe("Preview changes using git-style diff format"),
117
+ });
118
+ const ListDocumentsArgsSchema = z.object({
119
+ basePath: z
120
+ .string()
121
+ .optional()
122
+ .default("")
123
+ .describe("Base path within docs directory to list documents from"),
124
+ recursive: z.boolean().default(false).describe("List documents recursively"),
125
+ });
126
+ const SearchDocumentsArgsSchema = z.object({
127
+ query: z
128
+ .string()
129
+ .describe("Search query to find in document content or metadata"),
130
+ basePath: z
131
+ .string()
132
+ .optional()
133
+ .default("")
134
+ .describe("Base path within docs directory to search documents from"),
135
+ });
136
+ const GenerateNavigationArgsSchema = z.object({
137
+ basePath: z
138
+ .string()
139
+ .optional()
140
+ .default("")
141
+ .describe("Base path within docs directory to generate navigation from"),
142
+ outputPath: z
143
+ .string()
144
+ .optional()
145
+ .default("navigation.json")
146
+ .describe("Path to output navigation file"),
147
+ });
148
+ const CheckDocumentationHealthArgsSchema = z.object({
149
+ basePath: z
150
+ .string()
151
+ .optional()
152
+ .default("")
153
+ .describe("Base path within docs directory to check health of"),
154
+ });
155
+ const ToolInputSchema = ToolSchema.shape.inputSchema;
156
+ // Utility functions
157
+ function normalizePath(p) {
158
+ return path.normalize(p);
159
+ }
160
+ function expandHome(filepath) {
161
+ if (filepath.startsWith("~/") || filepath === "~") {
162
+ return path.join(os.homedir(), filepath.slice(1));
163
+ }
164
+ return filepath;
165
+ }
166
+ async function validatePath(requestedPath) {
167
+ // Resolve path relative to docs directory
168
+ const resolvedPath = path.isAbsolute(requestedPath)
169
+ ? requestedPath
170
+ : path.join(docsDir, requestedPath);
171
+ const normalizedPath = normalizePath(resolvedPath);
172
+ // Check if path is within docs directory
173
+ if (!normalizedPath.startsWith(docsDir)) {
174
+ throw new Error(`Access denied - path outside docs directory: ${normalizedPath}`);
175
+ }
176
+ return normalizedPath;
177
+ }
178
+ // File editing and diffing utilities
179
+ function normalizeLineEndings(text) {
180
+ return text.replace(/\r\n/g, "\n");
181
+ }
182
+ function createUnifiedDiff(originalContent, newContent, filepath = "file") {
183
+ // Ensure consistent line endings for diff
184
+ const normalizedOriginal = normalizeLineEndings(originalContent);
185
+ const normalizedNew = normalizeLineEndings(newContent);
186
+ return createTwoFilesPatch(filepath, filepath, normalizedOriginal, normalizedNew, "original", "modified");
187
+ }
188
+ async function applyDocumentEdits(filePath, edits, dryRun = false) {
189
+ // Read file content and normalize line endings
190
+ const content = normalizeLineEndings(await fs.readFile(filePath, "utf-8"));
191
+ // Apply edits sequentially
192
+ let modifiedContent = content;
193
+ for (const edit of edits) {
194
+ const normalizedOld = normalizeLineEndings(edit.oldText);
195
+ const normalizedNew = normalizeLineEndings(edit.newText);
196
+ // If exact match exists, use it
197
+ if (modifiedContent.includes(normalizedOld)) {
198
+ modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew);
199
+ continue;
200
+ }
201
+ // Otherwise, try line-by-line matching with flexibility for whitespace
202
+ const oldLines = normalizedOld.split("\n");
203
+ const contentLines = modifiedContent.split("\n");
204
+ let matchFound = false;
205
+ for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
206
+ const potentialMatch = contentLines.slice(i, i + oldLines.length);
207
+ // Compare lines with normalized whitespace
208
+ const isMatch = oldLines.every((oldLine, j) => {
209
+ const contentLine = potentialMatch[j];
210
+ return oldLine.trim() === contentLine.trim();
211
+ });
212
+ if (isMatch) {
213
+ // Preserve original indentation of first line
214
+ const originalIndent = contentLines[i].match(/^\s*/)?.[0] || "";
215
+ const newLines = normalizedNew.split("\n").map((line, j) => {
216
+ if (j === 0)
217
+ return originalIndent + line.trimStart();
218
+ // For subsequent lines, try to preserve relative indentation
219
+ const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || "";
220
+ const newIndent = line.match(/^\s*/)?.[0] || "";
221
+ if (oldIndent && newIndent) {
222
+ const relativeIndent = newIndent.length - oldIndent.length;
223
+ return (originalIndent +
224
+ " ".repeat(Math.max(0, relativeIndent)) +
225
+ line.trimStart());
232
226
  }
233
- break;
234
- }
235
- }
236
- docEntry.summary = summary.trim();
237
- }
238
- } catch (error) {
239
- // Skip summary if there's an error
240
- docEntry.summary = "Error generating summary";
227
+ return line;
228
+ });
229
+ contentLines.splice(i, oldLines.length, ...newLines);
230
+ modifiedContent = contentLines.join("\n");
231
+ matchFound = true;
232
+ break;
233
+ }
234
+ }
235
+ if (!matchFound) {
236
+ throw new Error(`Could not find exact match for edit:\n${edit.oldText}`);
237
+ }
238
+ }
239
+ // Create unified diff
240
+ const diff = createUnifiedDiff(content, modifiedContent, filePath);
241
+ // Format diff with appropriate number of backticks
242
+ let numBackticks = 3;
243
+ while (diff.includes("`".repeat(numBackticks))) {
244
+ numBackticks++;
245
+ }
246
+ const formattedDiff = `${"`".repeat(numBackticks)}diff\n${diff}${"`".repeat(numBackticks)}\n\n`;
247
+ if (!dryRun) {
248
+ await fs.writeFile(filePath, modifiedContent, "utf-8");
249
+ }
250
+ return formattedDiff;
251
+ }
252
+ // Parse frontmatter from markdown content
253
+ function parseFrontmatter(content) {
254
+ const frontmatterRegex = /^---\s*\n([\s\S]*?)\n---\s*\n/;
255
+ const match = content.match(frontmatterRegex);
256
+ if (!match) {
257
+ return { frontmatter: {}, content };
258
+ }
259
+ const frontmatterStr = match[1];
260
+ const contentWithoutFrontmatter = content.slice(match[0].length);
261
+ // Parse frontmatter as key-value pairs
262
+ const frontmatter = {};
263
+ const lines = frontmatterStr.split("\n");
264
+ for (const line of lines) {
265
+ const colonIndex = line.indexOf(":");
266
+ if (colonIndex !== -1) {
267
+ const key = line.slice(0, colonIndex).trim();
268
+ let value = line.slice(colonIndex + 1).trim();
269
+ // Handle quoted values
270
+ if (value.startsWith('"') && value.endsWith('"')) {
271
+ value = value.slice(1, -1);
241
272
  }
242
- }
243
- // Add to knowledge base
244
- knowledgeBase.documents.push(docEntry);
245
- // Organize by categories (based on directory structure)
246
- const dirPath = path.dirname(doc.path);
247
- if (!knowledgeBase.categories[dirPath]) {
248
- knowledgeBase.categories[dirPath] = [];
249
- }
250
- knowledgeBase.categories[dirPath].push(docEntry);
251
- // Organize by tags
252
- if (doc.metadata?.tags) {
253
- for (const tag of doc.metadata.tags) {
254
- if (!knowledgeBase.tags[tag]) {
255
- knowledgeBase.tags[tag] = [];
256
- }
257
- knowledgeBase.tags[tag].push(docEntry);
273
+ // Handle arrays
274
+ if (value.startsWith("[") && value.endsWith("]")) {
275
+ try {
276
+ value = JSON.parse(value);
277
+ }
278
+ catch {
279
+ // Keep as string if parsing fails
280
+ }
258
281
  }
259
- }
282
+ frontmatter[key] = value;
260
283
  }
261
- return {
262
- content: [
263
- {
264
- type: "text",
265
- text: `Generated knowledge base with ${knowledgeBase.documents.length} documents`,
266
- },
267
- ],
268
- metadata: {
269
- knowledgeBase,
270
- },
271
- };
272
- } catch (error) {
273
- return {
274
- content: [
275
- {
276
- type: "text",
277
- text: `Error generating knowledge base: ${error.message}`,
278
- },
279
- ],
280
- isError: true,
281
- };
282
- }
283
- },
284
- },
285
- // Write document - writes content to a markdown document with frontmatter
286
- {
287
- name: "write_document",
288
- description: "Write content to a markdown document with frontmatter",
289
- schema: ToolSchemas.WriteDocumentSchema,
290
- handler: async (args) => {
291
- try {
292
- const normalizedPath = await validatePath(
293
- args.path,
294
- allowedDirectories
295
- );
296
- // Convert metadata to frontmatter and combine with content
297
- const frontmatter = args.metadata
298
- ? matter.stringify(args.content, args.metadata)
299
- : args.content;
300
- // Ensure the directory exists
301
- const dirPath = path.dirname(normalizedPath);
302
- await fs.mkdir(dirPath, { recursive: true });
303
- // Write the file
304
- await fs.writeFile(normalizedPath, frontmatter);
305
- return {
306
- content: [{ type: "text", text: "Document written successfully" }],
307
- metadata: {
308
- path: args.path,
309
- },
310
- };
311
- } catch (error) {
312
- return {
313
- content: [
314
- { type: "text", text: `Error writing document: ${error.message}` },
315
- ],
316
- isError: true,
284
+ }
285
+ return { frontmatter, content: contentWithoutFrontmatter };
286
+ }
287
+ // Generate navigation structure from documents
288
+ async function generateNavigation(basePath) {
289
+ const baseDir = path.join(docsDir, basePath);
290
+ const pattern = path.join(baseDir, "**/*.md");
291
+ const files = await glob(pattern);
292
+ // Sort files to ensure consistent order and process index.md files first
293
+ files.sort((a, b) => {
294
+ const aIsIndex = path.basename(a) === "index.md";
295
+ const bIsIndex = path.basename(b) === "index.md";
296
+ if (aIsIndex && !bIsIndex)
297
+ return -1;
298
+ if (!aIsIndex && bIsIndex)
299
+ return 1;
300
+ return a.localeCompare(b);
301
+ });
302
+ const navigation = [];
303
+ const directoryMap = {};
304
+ for (const file of files) {
305
+ const relativePath = path.relative(docsDir, file);
306
+ const content = await fs.readFile(file, "utf-8");
307
+ const { frontmatter } = parseFrontmatter(content);
308
+ const title = frontmatter.title || path.basename(file, ".md");
309
+ const order = frontmatter.order !== undefined ? Number(frontmatter.order) : 999;
310
+ const item = {
311
+ title,
312
+ path: relativePath,
313
+ order,
314
+ children: [],
317
315
  };
318
- }
319
- },
320
- },
321
- // Edit document - applies edits to a markdown document while preserving frontmatter
322
- {
323
- name: "edit_document",
324
- description:
325
- "Apply edits to a markdown document while preserving frontmatter",
326
- schema: ToolSchemas.EditDocumentSchema,
327
- handler: async (args) => {
328
- try {
329
- // First read the document to get its current content and metadata
330
- const docResult = await DocsHandlers.readDocument(
331
- args.path,
332
- allowedDirectories
333
- );
334
- if (docResult.isError) {
335
- return docResult;
316
+ const dirPath = path.dirname(relativePath);
317
+ if (dirPath === "." || dirPath === basePath) {
318
+ navigation.push(item);
336
319
  }
337
- const normalizedPath = await validatePath(
338
- args.path,
339
- allowedDirectories
340
- );
341
- // Read the current content
342
- const content = await fs.readFile(normalizedPath, "utf-8");
343
- // Apply edits
344
- let newContent = content;
345
- let appliedEdits = 0;
346
- for (const edit of args.edits) {
347
- if (newContent.includes(edit.oldText)) {
348
- newContent = newContent.replace(edit.oldText, edit.newText);
349
- appliedEdits++;
350
- }
320
+ else {
321
+ // Create parent directories if they don't exist in the navigation
322
+ const pathParts = dirPath.split(path.sep);
323
+ let currentPath = "";
324
+ let currentNavigation = navigation;
325
+ for (const part of pathParts) {
326
+ currentPath = currentPath ? path.join(currentPath, part) : part;
327
+ if (!directoryMap[currentPath]) {
328
+ const dirItem = {
329
+ title: part,
330
+ path: currentPath,
331
+ order: 0,
332
+ children: [],
333
+ };
334
+ directoryMap[currentPath] = dirItem;
335
+ currentNavigation.push(dirItem);
336
+ }
337
+ currentNavigation = directoryMap[currentPath].children;
338
+ }
339
+ currentNavigation.push(item);
351
340
  }
352
- // Write the updated content
353
- await fs.writeFile(normalizedPath, newContent);
354
- return {
355
- content: [{ type: "text", text: "Document edited successfully" }],
356
- metadata: {
357
- path: args.path,
358
- appliedEdits,
359
- },
360
- };
361
- } catch (error) {
362
- return {
363
- content: [
364
- { type: "text", text: `Error editing document: ${error.message}` },
365
- ],
366
- isError: true,
367
- };
368
- }
369
- },
370
- },
371
- // Delete document - deletes a markdown document
372
- {
373
- name: "delete_document",
374
- description: "Delete a markdown document",
375
- schema: ToolSchemas.DeleteDocumentSchema,
376
- handler: async (args) => {
377
- try {
378
- const normalizedPath = await validatePath(
379
- args.path,
380
- allowedDirectories
381
- );
382
- // Check if the file exists and is a markdown file
383
- const stats = await fs.stat(normalizedPath);
384
- if (!stats.isFile() || !normalizedPath.endsWith(".md")) {
385
- return {
386
- content: [
387
- {
388
- type: "text",
389
- text: `Error: ${args.path} is not a markdown document`,
390
- },
391
- ],
392
- isError: true,
393
- };
341
+ }
342
+ // Sort navigation items by order
343
+ function sortNavigation(items) {
344
+ items.sort((a, b) => a.order - b.order);
345
+ for (const item of items) {
346
+ if (item.children && item.children.length > 0) {
347
+ sortNavigation(item.children);
348
+ }
394
349
  }
395
- // Delete the file
396
- await fs.unlink(normalizedPath);
397
- return {
398
- content: [{ type: "text", text: "Document deleted successfully" }],
399
- metadata: {
400
- path: args.path,
401
- },
402
- };
403
- } catch (error) {
404
- return {
405
- content: [
406
- { type: "text", text: `Error deleting document: ${error.message}` },
407
- ],
408
- isError: true,
409
- };
410
- }
411
- },
412
- },
413
- // Search documents - searches for markdown documents matching criteria
414
- {
415
- name: "search_documents",
416
- description: "Search for markdown documents matching criteria",
417
- schema: ToolSchemas.SearchDocumentsSchema,
418
- handler: async (args) => {
419
- try {
420
- // Get the list of documents first
421
- const listResult = await DocsHandlers.listDocuments(
422
- args.basePath || "",
423
- allowedDirectories
424
- );
425
- if (listResult.isError) {
426
- return listResult;
350
+ }
351
+ sortNavigation(navigation);
352
+ return navigation;
353
+ }
354
+ // Check documentation health
355
+ async function checkDocumentationHealth(basePath) {
356
+ const baseDir = path.join(docsDir, basePath);
357
+ const pattern = path.join(baseDir, "**/*.md");
358
+ const files = await glob(pattern);
359
+ const results = {
360
+ totalDocuments: files.length,
361
+ documentsWithMissingFrontmatter: 0,
362
+ documentsWithMissingTitle: 0,
363
+ documentsWithMissingDescription: 0,
364
+ brokenLinks: 0,
365
+ orphanedDocuments: 0,
366
+ issues: [],
367
+ };
368
+ // Check frontmatter and content
369
+ for (const file of files) {
370
+ const relativePath = path.relative(docsDir, file);
371
+ const content = await fs.readFile(file, "utf-8");
372
+ const { frontmatter } = parseFrontmatter(content);
373
+ if (Object.keys(frontmatter).length === 0) {
374
+ results.documentsWithMissingFrontmatter++;
375
+ results.issues.push(`${relativePath}: Missing frontmatter`);
376
+ }
377
+ if (!frontmatter.title) {
378
+ results.documentsWithMissingTitle++;
379
+ results.issues.push(`${relativePath}: Missing title in frontmatter`);
380
+ }
381
+ if (!frontmatter.description) {
382
+ results.documentsWithMissingDescription++;
383
+ results.issues.push(`${relativePath}: Missing description in frontmatter`);
427
384
  }
428
- let documents = listResult.metadata?.documents || [];
429
- // Filter by query if provided
430
- if (args.query) {
431
- documents = documents.filter((doc) => {
432
- // Check if query matches document path, name, or metadata
433
- const docString = JSON.stringify(doc).toLowerCase();
434
- return docString.includes(args.query.toLowerCase());
435
- });
385
+ // Check for internal links
386
+ const linkRegex = /\[.*?\]\((.*?)\)/g;
387
+ let match;
388
+ while ((match = linkRegex.exec(content)) !== null) {
389
+ const link = match[1];
390
+ // Only check relative links to markdown files
391
+ if (!link.startsWith("http") &&
392
+ !link.startsWith("#") &&
393
+ link.endsWith(".md")) {
394
+ const linkPath = path.join(path.dirname(file), link);
395
+ try {
396
+ await fs.access(linkPath);
397
+ }
398
+ catch {
399
+ results.brokenLinks++;
400
+ results.issues.push(`${relativePath}: Broken link to ${link}`);
401
+ }
402
+ }
436
403
  }
437
- // Filter by tags if provided
438
- if (args.tags && args.tags.length > 0) {
439
- documents = documents.filter((doc) => {
440
- const docTags = doc.metadata?.tags || [];
441
- return args.tags.some((tag) => docTags.includes(tag));
442
- });
404
+ }
405
+ // Generate navigation to check for orphaned documents
406
+ const navigation = await generateNavigation(basePath);
407
+ function collectPaths(items) {
408
+ let paths = [];
409
+ for (const item of items) {
410
+ paths.push(item.path);
411
+ if (item.children && item.children.length > 0) {
412
+ paths = paths.concat(collectPaths(item.children));
413
+ }
443
414
  }
444
- // Filter by status if provided
445
- if (args.status) {
446
- documents = documents.filter(
447
- (doc) => doc.metadata?.status === args.status
448
- );
415
+ return paths;
416
+ }
417
+ const navigationPaths = collectPaths(navigation);
418
+ for (const file of files) {
419
+ const relativePath = path.relative(docsDir, file);
420
+ if (!navigationPaths.includes(relativePath)) {
421
+ results.orphanedDocuments++;
422
+ results.issues.push(`${relativePath}: Orphaned document (not in navigation)`);
449
423
  }
450
- return {
451
- content: [
424
+ }
425
+ // Calculate health score (0-100)
426
+ const totalIssues = results.documentsWithMissingFrontmatter +
427
+ results.documentsWithMissingTitle +
428
+ results.documentsWithMissingDescription +
429
+ results.brokenLinks +
430
+ results.orphanedDocuments;
431
+ const maxIssues = results.totalDocuments * 5; // 5 possible issues per document
432
+ const healthScore = Math.max(0, 100 - Math.round((totalIssues / maxIssues) * 100));
433
+ return {
434
+ ...results,
435
+ healthScore,
436
+ };
437
+ }
438
+ // Server setup
439
+ const server = new Server({
440
+ name: "mcp-docs-service",
441
+ version: "0.3.0",
442
+ }, {
443
+ capabilities: {
444
+ tools: {},
445
+ },
446
+ });
447
+ // Tool handlers
448
+ server.setRequestHandler(ListToolsRequestSchema, async () => {
449
+ return {
450
+ tools: [
452
451
  {
453
- type: "text",
454
- text: `Found ${documents.length} matching documents`,
452
+ name: "read_document",
453
+ description: "Read a markdown document from the docs directory. Returns the document content " +
454
+ "including frontmatter. Use this tool when you need to examine the contents of a " +
455
+ "single document.",
456
+ inputSchema: zodToJsonSchema(ReadDocumentArgsSchema),
455
457
  },
456
- ],
457
- metadata: {
458
- documents,
459
- },
460
- };
461
- } catch (error) {
462
- return {
463
- content: [
464
458
  {
465
- type: "text",
466
- text: `Error searching documents: ${error.message}`,
459
+ name: "write_document",
460
+ description: "Create a new markdown document or completely overwrite an existing document with new content. " +
461
+ "Use with caution as it will overwrite existing documents without warning. " +
462
+ "Can create parent directories if they don't exist.",
463
+ inputSchema: zodToJsonSchema(WriteDocumentArgsSchema),
467
464
  },
468
- ],
469
- isError: true,
470
- };
471
- }
472
- },
473
- },
474
- // Check documentation health - checks the health of documentation
475
- {
476
- name: "check_documentation_health",
477
- description: "Check the health of documentation and identify issues",
478
- schema: ToolSchemas.CheckDocumentationHealthSchema,
479
- handler: async (args) => {
480
- try {
481
- // If basePath is provided, validate it
482
- let validatedBasePath = "";
483
- if (args.basePath) {
484
- try {
485
- validatedBasePath = await validatePath(
486
- args.basePath,
487
- allowedDirectories
488
- );
489
- } catch (error) {
490
- // If validation fails, use the first allowed directory
491
- console.warn(
492
- `Warning: Invalid basePath "${args.basePath}". Using default directory instead.`
493
- );
494
- validatedBasePath = allowedDirectories[0];
495
- }
496
- }
497
- return await DocsHandlers.checkDocumentationHealth(
498
- validatedBasePath,
499
- {
500
- checkLinks: args.checkLinks,
501
- checkMetadata: args.checkMetadata,
502
- checkOrphans: args.checkOrphans,
503
- requiredMetadataFields: args.requiredMetadataFields,
504
- },
505
- allowedDirectories
506
- );
507
- } catch (error) {
508
- return {
509
- content: [
510
465
  {
511
- type: "text",
512
- text: `Error checking documentation health: ${error.message}`,
466
+ name: "edit_document",
467
+ description: "Make line-based edits to a markdown document. Each edit replaces exact line sequences " +
468
+ "with new content. Returns a git-style diff showing the changes made.",
469
+ inputSchema: zodToJsonSchema(EditDocumentArgsSchema),
513
470
  },
514
- ],
515
- isError: true,
516
- };
517
- }
518
- },
519
- },
520
- ];
521
- // Combine all tools
522
- const tools = [...documentationTools];
523
- // Register tool handlers
524
- server.setRequestHandler(ListToolsRequestSchema, async () => {
525
- return {
526
- tools: tools.map((tool) => ({
527
- name: tool.name,
528
- description: tool.description,
529
- inputSchema: zodToJsonSchema(tool.schema),
530
- })),
531
- };
471
+ {
472
+ name: "list_documents",
473
+ description: "List all markdown documents in the docs directory or a subdirectory. " +
474
+ "Returns the relative paths to all documents.",
475
+ inputSchema: zodToJsonSchema(ListDocumentsArgsSchema),
476
+ },
477
+ {
478
+ name: "search_documents",
479
+ description: "Search for markdown documents containing specific text in their content or frontmatter. " +
480
+ "Returns the relative paths to matching documents.",
481
+ inputSchema: zodToJsonSchema(SearchDocumentsArgsSchema),
482
+ },
483
+ {
484
+ name: "generate_navigation",
485
+ description: "Generate a navigation structure from the markdown documents in the docs directory. " +
486
+ "Returns a JSON structure that can be used for navigation menus.",
487
+ inputSchema: zodToJsonSchema(GenerateNavigationArgsSchema),
488
+ },
489
+ {
490
+ name: "check_documentation_health",
491
+ description: "Check the health of the documentation by analyzing frontmatter, links, and navigation. " +
492
+ "Returns a report with issues and a health score.",
493
+ inputSchema: zodToJsonSchema(CheckDocumentationHealthArgsSchema),
494
+ },
495
+ ],
496
+ };
532
497
  });
533
498
  server.setRequestHandler(CallToolRequestSchema, async (request) => {
534
- const { name, arguments: args } = request.params;
535
- // Find the requested tool
536
- const tool = tools.find((t) => t.name === name);
537
- if (!tool) {
538
- return {
539
- content: [
540
- {
541
- type: "text",
542
- text: `Tool not found: ${name}`,
543
- },
544
- ],
545
- isError: true,
546
- };
547
- }
548
- try {
549
- // Parse and validate arguments
550
- const parsedArgs = tool.schema.parse(args);
551
- // Call the tool handler with the appropriate type
552
- const result = await tool.handler(parsedArgs);
553
- // Ensure the content field is always an array
554
- if (!result.content || !Array.isArray(result.content)) {
555
- result.content = [
556
- { type: "text", text: "Operation completed successfully" },
557
- ];
499
+ try {
500
+ const { name, arguments: args } = request.params;
501
+ switch (name) {
502
+ case "read_document": {
503
+ const parsed = ReadDocumentArgsSchema.safeParse(args);
504
+ if (!parsed.success) {
505
+ throw new Error(`Invalid arguments for read_document: ${parsed.error}`);
506
+ }
507
+ const validPath = await validatePath(parsed.data.path);
508
+ const content = await fs.readFile(validPath, "utf-8");
509
+ return {
510
+ content: [{ type: "text", text: content }],
511
+ metadata: {
512
+ path: parsed.data.path,
513
+ ...parseFrontmatter(content).frontmatter,
514
+ },
515
+ };
516
+ }
517
+ case "write_document": {
518
+ const parsed = WriteDocumentArgsSchema.safeParse(args);
519
+ if (!parsed.success) {
520
+ throw new Error(`Invalid arguments for write_document: ${parsed.error}`);
521
+ }
522
+ const validPath = await validatePath(parsed.data.path);
523
+ // Create parent directories if needed
524
+ if (parsed.data.createDirectories) {
525
+ const dirPath = path.dirname(validPath);
526
+ await fs.mkdir(dirPath, { recursive: true });
527
+ }
528
+ await fs.writeFile(validPath, parsed.data.content, "utf-8");
529
+ return {
530
+ content: [
531
+ { type: "text", text: `Successfully wrote to ${parsed.data.path}` },
532
+ ],
533
+ };
534
+ }
535
+ case "edit_document": {
536
+ const parsed = EditDocumentArgsSchema.safeParse(args);
537
+ if (!parsed.success) {
538
+ throw new Error(`Invalid arguments for edit_document: ${parsed.error}`);
539
+ }
540
+ const validPath = await validatePath(parsed.data.path);
541
+ const result = await applyDocumentEdits(validPath, parsed.data.edits, parsed.data.dryRun);
542
+ return {
543
+ content: [{ type: "text", text: result }],
544
+ };
545
+ }
546
+ case "list_documents": {
547
+ const parsed = ListDocumentsArgsSchema.safeParse(args);
548
+ if (!parsed.success) {
549
+ throw new Error(`Invalid arguments for list_documents: ${parsed.error}`);
550
+ }
551
+ const baseDir = path.join(docsDir, parsed.data.basePath);
552
+ const pattern = parsed.data.recursive
553
+ ? path.join(baseDir, "**/*.md")
554
+ : path.join(baseDir, "*.md");
555
+ const files = await glob(pattern);
556
+ const relativePaths = files.map((file) => path.relative(docsDir, file));
557
+ return {
558
+ content: [{ type: "text", text: relativePaths.join("\n") }],
559
+ };
560
+ }
561
+ case "search_documents": {
562
+ const parsed = SearchDocumentsArgsSchema.safeParse(args);
563
+ if (!parsed.success) {
564
+ throw new Error(`Invalid arguments for search_documents: ${parsed.error}`);
565
+ }
566
+ const baseDir = path.join(docsDir, parsed.data.basePath);
567
+ const pattern = path.join(baseDir, "**/*.md");
568
+ const files = await glob(pattern);
569
+ const results = [];
570
+ for (const file of files) {
571
+ const content = await fs.readFile(file, "utf-8");
572
+ if (content.toLowerCase().includes(parsed.data.query.toLowerCase())) {
573
+ results.push(path.relative(docsDir, file));
574
+ }
575
+ }
576
+ return {
577
+ content: [
578
+ {
579
+ type: "text",
580
+ text: results.length > 0
581
+ ? `Found ${results.length} matching documents:\n${results.join("\n")}`
582
+ : "No matching documents found",
583
+ },
584
+ ],
585
+ };
586
+ }
587
+ case "generate_navigation": {
588
+ const parsed = GenerateNavigationArgsSchema.safeParse(args);
589
+ if (!parsed.success) {
590
+ throw new Error(`Invalid arguments for generate_navigation: ${parsed.error}`);
591
+ }
592
+ const navigation = await generateNavigation(parsed.data.basePath);
593
+ // Write navigation to file if outputPath is provided
594
+ if (parsed.data.outputPath) {
595
+ const outputPath = await validatePath(parsed.data.outputPath);
596
+ await fs.writeFile(outputPath, JSON.stringify(navigation, null, 2), "utf-8");
597
+ }
598
+ return {
599
+ content: [
600
+ {
601
+ type: "text",
602
+ text: `Navigation structure:\n${JSON.stringify(navigation, null, 2)}`,
603
+ },
604
+ ],
605
+ };
606
+ }
607
+ case "check_documentation_health": {
608
+ const parsed = CheckDocumentationHealthArgsSchema.safeParse(args);
609
+ if (!parsed.success) {
610
+ throw new Error(`Invalid arguments for check_documentation_health: ${parsed.error}`);
611
+ }
612
+ const healthReport = await checkDocumentationHealth(parsed.data.basePath);
613
+ return {
614
+ content: [
615
+ {
616
+ type: "text",
617
+ text: `Documentation Health Report:
618
+ Health Score: ${healthReport.healthScore}/100
619
+
620
+ Summary:
621
+ - Total Documents: ${healthReport.totalDocuments}
622
+ - Documents with Missing Frontmatter: ${healthReport.documentsWithMissingFrontmatter}
623
+ - Documents with Missing Title: ${healthReport.documentsWithMissingTitle}
624
+ - Documents with Missing Description: ${healthReport.documentsWithMissingDescription}
625
+ - Broken Links: ${healthReport.brokenLinks}
626
+ - Orphaned Documents: ${healthReport.orphanedDocuments}
627
+
628
+ Issues:
629
+ ${healthReport.issues.map((issue) => `- ${issue}`).join("\n")}
630
+ `,
631
+ },
632
+ ],
633
+ };
634
+ }
635
+ default:
636
+ throw new Error(`Unknown tool: ${name}`);
637
+ }
638
+ }
639
+ catch (error) {
640
+ const errorMessage = error instanceof Error ? error.message : String(error);
641
+ return {
642
+ content: [{ type: "text", text: `Error: ${errorMessage}` }],
643
+ isError: true,
644
+ };
558
645
  }
559
- return result;
560
- } catch (error) {
561
- console.error(`Error executing tool ${name}:`, error);
562
- return {
563
- content: [
564
- {
565
- type: "text",
566
- text: `Error calling tool ${name}: ${error.message}`,
567
- },
568
- ],
569
- isError: true,
570
- };
571
- }
572
646
  });
573
- // If health check flag is set, run the health check and exit
647
+ // Run health check if requested
574
648
  if (runHealthCheck) {
575
- console.log("Running documentation health check...");
576
- try {
577
- const healthCheckResult = await DocsHandlers.checkDocumentationHealth(
578
- "", // Use the first allowed directory
579
- {
580
- checkLinks: true,
581
- checkMetadata: true,
582
- checkOrphans: true,
583
- requiredMetadataFields: ["title", "description", "status"],
584
- },
585
- allowedDirectories
586
- );
587
- if (healthCheckResult.isError) {
588
- console.error("Health check failed:", healthCheckResult.content[0].text);
589
- process.exit(1);
649
+ try {
650
+ const healthReport = await checkDocumentationHealth("");
651
+ console.log(`Documentation Health Report:`);
652
+ console.log(`Health Score: ${healthReport.healthScore}/100`);
653
+ console.log(`\nSummary:`);
654
+ console.log(`- Total Documents: ${healthReport.totalDocuments}`);
655
+ console.log(`- Documents with Missing Frontmatter: ${healthReport.documentsWithMissingFrontmatter}`);
656
+ console.log(`- Documents with Missing Title: ${healthReport.documentsWithMissingTitle}`);
657
+ console.log(`- Documents with Missing Description: ${healthReport.documentsWithMissingDescription}`);
658
+ console.log(`- Broken Links: ${healthReport.brokenLinks}`);
659
+ console.log(`- Orphaned Documents: ${healthReport.orphanedDocuments}`);
660
+ console.log(`\nIssues:`);
661
+ healthReport.issues.forEach((issue) => console.log(`- ${issue}`));
662
+ process.exit(0);
590
663
  }
591
- const metadata = healthCheckResult.metadata || {};
592
- console.log("\n=== DOCUMENTATION HEALTH CHECK RESULTS ===\n");
593
- console.log(`Overall Health Score: ${metadata.score || 0}%`);
594
- console.log(`Total Documents: ${metadata.totalDocuments || 0}`);
595
- console.log(
596
- `Metadata Completeness: ${metadata.metadataCompleteness || 0}%`
597
- );
598
- console.log(`Broken Links: ${metadata.brokenLinks || 0}`);
599
- if (metadata.issues && metadata.issues.length > 0) {
600
- console.log("\nIssues Found:");
601
- // Group issues by type
602
- const issuesByType = {};
603
- metadata.issues.forEach((issue) => {
604
- if (!issuesByType[issue.type]) {
605
- issuesByType[issue.type] = [];
606
- }
607
- issuesByType[issue.type].push(issue);
608
- });
609
- // Display issues by type
610
- for (const [type, issues] of Object.entries(issuesByType)) {
611
- console.log(
612
- `\n${type.replace("_", " ").toUpperCase()} (${issues.length}):`
613
- );
614
- issues.forEach((issue) => {
615
- console.log(`- ${issue.path}: ${issue.message}`);
616
- });
617
- }
618
- } else {
619
- console.log("\nNo issues found. Documentation is in good health!");
664
+ catch (error) {
665
+ console.error(`Error running health check: ${error}`);
666
+ process.exit(1);
620
667
  }
621
- console.log("\n=== END OF HEALTH CHECK ===\n");
622
- // Exit with success
623
- process.exit(0);
624
- } catch (error) {
625
- console.error("Error running health check:", error);
626
- process.exit(1);
627
- }
628
668
  }
629
- // Connect to transport and start the server
630
- const transport = new StdioServerTransport();
631
- await server.connect(transport);
632
- //# sourceMappingURL=index.js.map
669
+ // Start server
670
+ async function runServer() {
671
+ const transport = new StdioServerTransport();
672
+ await server.connect(transport);
673
+ console.log("MCP Documentation Management Service started.");
674
+ console.log("Using docs directory:", docsDir);
675
+ console.log("Reading from stdin, writing results to stdout...");
676
+ }
677
+ runServer().catch((error) => {
678
+ console.error("Fatal error running server:", error);
679
+ process.exit(1);
680
+ });
681
+ //# sourceMappingURL=index.js.map