mcp-docs-service 0.2.11 → 0.2.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,7 +1,10 @@
1
1
  #!/usr/bin/env node
2
2
  import { Server } from "@modelcontextprotocol/sdk/server/index.js";
3
3
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
- import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
4
+ import {
5
+ CallToolRequestSchema,
6
+ ListToolsRequestSchema,
7
+ } from "@modelcontextprotocol/sdk/types.js";
5
8
  import fs from "fs/promises";
6
9
  import path from "path";
7
10
  import { zodToJsonSchema } from "zod-to-json-schema";
@@ -12,67 +15,98 @@ import { normalizePath, expandHome, validatePath } from "./utils/path.js";
12
15
  import * as DocsHandlers from "./handlers/docs.js";
13
16
  // Import schemas
14
17
  import * as ToolSchemas from "./schemas/tools.js";
18
+
19
+ // Check if we're running under MCP Inspector
20
+ const isMCPInspector =
21
+ process.env.MCP_INSPECTOR === "true" ||
22
+ process.argv.some((arg) => arg.includes("modelcontextprotocol/inspector"));
23
+ // Create a logging function that respects MCP Inspector mode
24
+ const log = (...args) => {
25
+ if (!isMCPInspector) {
26
+ console.log(...args);
27
+ }
28
+ };
29
+
30
+ const errorLog = (...args) => {
31
+ a;
32
+ console.error(...args);
33
+ };
34
+
15
35
  // Command line argument parsing
16
36
  const args = process.argv.slice(2);
17
37
  let allowedDirectories = [];
18
38
  let runHealthCheck = false;
39
+
19
40
  // Check for health check flag
20
41
  if (args.includes("--health-check")) {
21
- runHealthCheck = true;
22
- // Remove the health check flag from args
23
- const healthCheckIndex = args.indexOf("--health-check");
24
- args.splice(healthCheckIndex, 1);
42
+ runHealthCheck = true;
43
+ // Remove the health check flag from args
44
+ const healthCheckIndex = args.indexOf("--health-check");
45
+ args.splice(healthCheckIndex, 1);
25
46
  }
47
+
26
48
  // Filter out any other flags (starting with --)
27
49
  const directoryArgs = args.filter((arg) => !arg.startsWith("--"));
28
50
  if (directoryArgs.length === 0) {
29
- // Use default docs directory if none is provided
30
- const defaultDocsDir = path.join(process.cwd(), "docs");
31
- try {
32
- const stats = await fs.stat(defaultDocsDir);
33
- if (stats.isDirectory()) {
34
- console.log(`Using default docs directory: ${defaultDocsDir}`);
35
- allowedDirectories = [normalizePath(path.resolve(defaultDocsDir))];
36
- }
37
- else {
38
- console.error(`Error: Default docs directory ${defaultDocsDir} is not a directory`);
39
- console.error("Usage: mcp-server-filesystem <allowed-directory> [additional-directories...]");
40
- process.exit(1);
41
- }
42
- }
43
- catch (error) {
44
- console.error(`Error: Default docs directory ${defaultDocsDir} does not exist or is not accessible`);
45
- console.error("Usage: mcp-server-filesystem <allowed-directory> [additional-directories...]");
46
- process.exit(1);
51
+ // Use default docs directory if none is provided
52
+ const defaultDocsDir = path.join(process.cwd(), "docs");
53
+ try {
54
+ const stats = await fs.stat(defaultDocsDir);
55
+ if (stats.isDirectory()) {
56
+ log(`Using default docs directory: ${defaultDocsDir}`);
57
+ allowedDirectories = [normalizePath(path.resolve(defaultDocsDir))];
58
+ } else {
59
+ errorLog(
60
+ `Error: Default docs directory ${defaultDocsDir} is not a directory`
61
+ );
62
+ errorLog(
63
+ "Usage: mcp-server-filesystem <allowed-directory> [additional-directories...]"
64
+ );
65
+ process.exit(1);
47
66
  }
48
- }
49
- else {
50
- // Store allowed directories in normalized form
51
- allowedDirectories = directoryArgs.map((dir) => normalizePath(path.resolve(expandHome(dir))));
52
- // Validate that all directories exist and are accessible
53
- await Promise.all(directoryArgs.map(async (dir) => {
54
- try {
55
- const stats = await fs.stat(dir);
56
- if (!stats.isDirectory()) {
57
- console.error(`Error: ${dir} is not a directory`);
58
- process.exit(1);
59
- }
67
+ } catch (error) {
68
+ errorLog(
69
+ `Error: Default docs directory ${defaultDocsDir} does not exist or is not accessible`
70
+ );
71
+ errorLog(
72
+ "Usage: mcp-server-filesystem <allowed-directory> [additional-directories...]"
73
+ );
74
+ process.exit(1);
75
+ }
76
+ } else {
77
+ // Store allowed directories in normalized form
78
+ allowedDirectories = directoryArgs.map((dir) =>
79
+ normalizePath(path.resolve(expandHome(dir)))
80
+ );
81
+ // Validate that all directories exist and are accessible
82
+ await Promise.all(
83
+ directoryArgs.map(async (dir) => {
84
+ try {
85
+ const stats = await fs.stat(dir);
86
+ if (!stats.isDirectory()) {
87
+ errorLog(`Error: ${dir} is not a directory`);
88
+ process.exit(1);
60
89
  }
61
- catch (error) {
62
- console.error(`Error accessing directory ${dir}:`, error);
63
- process.exit(1);
64
- }
65
- }));
90
+ } catch (error) {
91
+ errorLog(`Error accessing directory ${dir}:`, error);
92
+ process.exit(1);
93
+ }
94
+ })
95
+ );
66
96
  }
97
+
67
98
  // Create server
68
- const server = new Server({
99
+ const server = new Server(
100
+ {
69
101
  name: "secure-filesystem-server",
70
- version: "0.2.11",
71
- }, {
102
+ version: "1.0.0",
103
+ },
104
+ {
72
105
  capabilities: {
73
- tools: {},
106
+ tools: {},
74
107
  },
75
- });
108
+ }
109
+ );
76
110
  // Define tools
77
111
  // ===================================================================
78
112
  // DOCUMENTATION TOOLS
@@ -80,478 +114,520 @@ const server = new Server({
80
114
  // These tools are specifically designed for working with documentation
81
115
  // files (markdown with frontmatter)
82
116
  const documentationTools = [
83
- // Read document - reads a markdown document and extracts its content and metadata
84
- {
85
- name: "read_document",
86
- description: "Read a markdown document and extract its content and metadata",
87
- schema: ToolSchemas.ReadDocumentSchema,
88
- handler: async (args) => {
89
- return await DocsHandlers.readDocument(args.path, allowedDirectories);
90
- },
117
+ // Read document - reads a markdown document and extracts its content and metadata
118
+ {
119
+ name: "read_document",
120
+ description:
121
+ "Read a markdown document and extract its content and metadata",
122
+ schema: ToolSchemas.ReadDocumentSchema,
123
+ handler: async (args) => {
124
+ return await DocsHandlers.readDocument(args.path, allowedDirectories);
91
125
  },
92
- // List documents - lists all markdown documents in a directory
93
- {
94
- name: "list_documents",
95
- description: "List all markdown documents in a directory",
96
- schema: ToolSchemas.ListDocumentsSchema,
97
- handler: async (args) => {
98
- return await DocsHandlers.listDocuments(args.basePath || "", allowedDirectories);
99
- },
126
+ },
127
+ // List documents - lists all markdown documents in a directory
128
+ {
129
+ name: "list_documents",
130
+ description: "List all markdown documents in a directory",
131
+ schema: ToolSchemas.ListDocumentsSchema,
132
+ handler: async (args) => {
133
+ return await DocsHandlers.listDocuments(
134
+ args.basePath || "",
135
+ allowedDirectories
136
+ );
100
137
  },
101
- // Get structure - gets the structure of the documentation directory
102
- {
103
- name: "get_structure",
104
- description: "Get the structure of the documentation directory",
105
- schema: ToolSchemas.GetStructureSchema,
106
- handler: async (args) => {
107
- return await DocsHandlers.getStructure(args.basePath || "", allowedDirectories);
108
- },
138
+ },
139
+ // Get structure - gets the structure of the documentation directory
140
+ {
141
+ name: "get_structure",
142
+ description: "Get the structure of the documentation directory",
143
+ schema: ToolSchemas.GetStructureSchema,
144
+ handler: async (args) => {
145
+ return await DocsHandlers.getStructure(
146
+ args.basePath || "",
147
+ allowedDirectories
148
+ );
109
149
  },
110
- // Get navigation - gets the navigation structure for the documentation
111
- {
112
- name: "get_navigation",
113
- description: "Get the navigation structure for the documentation",
114
- schema: ToolSchemas.GetNavigationSchema,
115
- handler: async (args) => {
116
- return await DocsHandlers.getNavigation(args.basePath || "", allowedDirectories);
117
- },
150
+ },
151
+ // Get navigation - gets the navigation structure for the documentation
152
+ {
153
+ name: "get_navigation",
154
+ description: "Get the navigation structure for the documentation",
155
+ schema: ToolSchemas.GetNavigationSchema,
156
+ handler: async (args) => {
157
+ return await DocsHandlers.getNavigation(
158
+ args.basePath || "",
159
+ allowedDirectories
160
+ );
118
161
  },
119
- // Get docs knowledge base - creates a comprehensive knowledge base of documentation
120
- {
121
- name: "get_docs_knowledge_base",
122
- description: "Create a comprehensive knowledge base of documentation for LLM context",
123
- schema: ToolSchemas.GetDocsKnowledgeBaseSchema,
124
- handler: async (args) => {
162
+ },
163
+ // Get docs knowledge base - creates a comprehensive knowledge base of documentation
164
+ {
165
+ name: "get_docs_knowledge_base",
166
+ description:
167
+ "Create a comprehensive knowledge base of documentation for LLM context",
168
+ schema: ToolSchemas.GetDocsKnowledgeBaseSchema,
169
+ handler: async (args) => {
170
+ try {
171
+ // First get the navigation structure
172
+ const navResult = await DocsHandlers.getNavigation(
173
+ args.basePath || "",
174
+ allowedDirectories
175
+ );
176
+ if (navResult.isError) {
177
+ return navResult;
178
+ }
179
+ // Get all documents
180
+ const docsResult = await DocsHandlers.listDocuments(
181
+ args.basePath || "",
182
+ allowedDirectories
183
+ );
184
+ if (docsResult.isError) {
185
+ return docsResult;
186
+ }
187
+ const documents = docsResult.metadata?.documents || [];
188
+ const navigation = navResult.metadata?.navigation || [];
189
+ // Create a map of path to document for quick lookup
190
+ const documentMap = new Map();
191
+ documents.forEach((doc) => {
192
+ documentMap.set(doc.path, doc);
193
+ });
194
+ // Create knowledge base structure
195
+ const knowledgeBase = {
196
+ navigation,
197
+ documents: [],
198
+ categories: {},
199
+ tags: {},
200
+ };
201
+ // Process documents to extract summaries if requested
202
+ const includeSummaries = args.includeSummaries !== false; // Default to true
203
+ const maxSummaryLength = args.maxSummaryLength || 500;
204
+ // Process all documents
205
+ for (const doc of documents) {
206
+ // Create document entry with metadata
207
+ const docEntry = {
208
+ path: doc.path,
209
+ name: doc.name,
210
+ metadata: doc.metadata || {},
211
+ };
212
+ // Add summary if requested
213
+ if (includeSummaries) {
125
214
  try {
126
- // First get the navigation structure
127
- const navResult = await DocsHandlers.getNavigation(args.basePath || "", allowedDirectories);
128
- if (navResult.isError) {
129
- return navResult;
130
- }
131
- // Get all documents
132
- const docsResult = await DocsHandlers.listDocuments(args.basePath || "", allowedDirectories);
133
- if (docsResult.isError) {
134
- return docsResult;
135
- }
136
- const documents = docsResult.metadata?.documents || [];
137
- const navigation = navResult.metadata?.navigation || [];
138
- // Create a map of path to document for quick lookup
139
- const documentMap = new Map();
140
- documents.forEach((doc) => {
141
- documentMap.set(doc.path, doc);
142
- });
143
- // Create knowledge base structure
144
- const knowledgeBase = {
145
- navigation,
146
- documents: [],
147
- categories: {},
148
- tags: {},
149
- };
150
- // Process documents to extract summaries if requested
151
- const includeSummaries = args.includeSummaries !== false; // Default to true
152
- const maxSummaryLength = args.maxSummaryLength || 500;
153
- // Process all documents
154
- for (const doc of documents) {
155
- // Create document entry with metadata
156
- const docEntry = {
157
- path: doc.path,
158
- name: doc.name,
159
- metadata: doc.metadata || {},
160
- };
161
- // Add summary if requested
162
- if (includeSummaries) {
163
- try {
164
- const docContent = await DocsHandlers.readDocument(doc.path, allowedDirectories);
165
- if (!docContent.isError && docContent.metadata?.content) {
166
- // Extract a summary (first few paragraphs)
167
- const content = docContent.metadata.content;
168
- const paragraphs = content.split("\n\n");
169
- let summary = "";
170
- // Get first few paragraphs up to maxSummaryLength
171
- for (const para of paragraphs) {
172
- if (summary.length + para.length <= maxSummaryLength) {
173
- summary += para + "\n\n";
174
- }
175
- else {
176
- // Add partial paragraph to reach maxSummaryLength
177
- const remainingLength = maxSummaryLength - summary.length;
178
- if (remainingLength > 0) {
179
- summary += para.substring(0, remainingLength) + "...";
180
- }
181
- break;
182
- }
183
- }
184
- docEntry.summary = summary.trim();
185
- }
186
- }
187
- catch (error) {
188
- // Skip summary if there's an error
189
- docEntry.summary = "Error generating summary";
190
- }
191
- }
192
- // Add to knowledge base
193
- knowledgeBase.documents.push(docEntry);
194
- // Organize by categories (based on directory structure)
195
- const dirPath = path.dirname(doc.path);
196
- if (!knowledgeBase.categories[dirPath]) {
197
- knowledgeBase.categories[dirPath] = [];
198
- }
199
- knowledgeBase.categories[dirPath].push(docEntry);
200
- // Organize by tags
201
- if (doc.metadata?.tags) {
202
- for (const tag of doc.metadata.tags) {
203
- if (!knowledgeBase.tags[tag]) {
204
- knowledgeBase.tags[tag] = [];
205
- }
206
- knowledgeBase.tags[tag].push(docEntry);
207
- }
215
+ const docContent = await DocsHandlers.readDocument(
216
+ doc.path,
217
+ allowedDirectories
218
+ );
219
+ if (!docContent.isError && docContent.metadata?.content) {
220
+ // Extract a summary (first few paragraphs)
221
+ const content = docContent.metadata.content;
222
+ const paragraphs = content.split("\n\n");
223
+ let summary = "";
224
+ // Get first few paragraphs up to maxSummaryLength
225
+ for (const para of paragraphs) {
226
+ if (summary.length + para.length <= maxSummaryLength) {
227
+ summary += para + "\n\n";
228
+ } else {
229
+ // Add partial paragraph to reach maxSummaryLength
230
+ const remainingLength = maxSummaryLength - summary.length;
231
+ if (remainingLength > 0) {
232
+ summary += para.substring(0, remainingLength) + "...";
208
233
  }
234
+ break;
235
+ }
209
236
  }
210
- return {
211
- content: [
212
- {
213
- type: "text",
214
- text: `Generated knowledge base with ${knowledgeBase.documents.length} documents`,
215
- },
216
- ],
217
- metadata: {
218
- knowledgeBase,
219
- },
220
- };
237
+ docEntry.summary = summary.trim();
238
+ }
239
+ } catch (error) {
240
+ // Skip summary if there's an error
241
+ docEntry.summary = "Error generating summary";
221
242
  }
222
- catch (error) {
223
- return {
224
- content: [
225
- {
226
- type: "text",
227
- text: `Error generating knowledge base: ${error.message}`,
228
- },
229
- ],
230
- isError: true,
231
- };
243
+ }
244
+ // Add to knowledge base
245
+ knowledgeBase.documents.push(docEntry);
246
+ // Organize by categories (based on directory structure)
247
+ const dirPath = path.dirname(doc.path);
248
+ if (!knowledgeBase.categories[dirPath]) {
249
+ knowledgeBase.categories[dirPath] = [];
250
+ }
251
+ knowledgeBase.categories[dirPath].push(docEntry);
252
+ // Organize by tags
253
+ if (doc.metadata?.tags) {
254
+ for (const tag of doc.metadata.tags) {
255
+ if (!knowledgeBase.tags[tag]) {
256
+ knowledgeBase.tags[tag] = [];
257
+ }
258
+ knowledgeBase.tags[tag].push(docEntry);
232
259
  }
233
- },
260
+ }
261
+ }
262
+ return {
263
+ content: [
264
+ {
265
+ type: "text",
266
+ text: `Generated knowledge base with ${knowledgeBase.documents.length} documents`,
267
+ },
268
+ ],
269
+ metadata: {
270
+ knowledgeBase,
271
+ },
272
+ };
273
+ } catch (error) {
274
+ return {
275
+ content: [
276
+ {
277
+ type: "text",
278
+ text: `Error generating knowledge base: ${error.message}`,
279
+ },
280
+ ],
281
+ isError: true,
282
+ };
283
+ }
234
284
  },
235
- // Write document - writes content to a markdown document with frontmatter
236
- {
237
- name: "write_document",
238
- description: "Write content to a markdown document with frontmatter",
239
- schema: ToolSchemas.WriteDocumentSchema,
240
- handler: async (args) => {
241
- try {
242
- const normalizedPath = await validatePath(args.path, allowedDirectories);
243
- // Convert metadata to frontmatter and combine with content
244
- const frontmatter = args.metadata
245
- ? matter.stringify(args.content, args.metadata)
246
- : args.content;
247
- // Ensure the directory exists
248
- const dirPath = path.dirname(normalizedPath);
249
- await fs.mkdir(dirPath, { recursive: true });
250
- // Write the file
251
- await fs.writeFile(normalizedPath, frontmatter);
252
- return {
253
- content: [{ type: "text", text: "Document written successfully" }],
254
- metadata: {
255
- path: args.path,
256
- },
257
- };
258
- }
259
- catch (error) {
260
- return {
261
- content: [
262
- { type: "text", text: `Error writing document: ${error.message}` },
263
- ],
264
- isError: true,
265
- };
266
- }
267
- },
285
+ },
286
+ // Write document - writes content to a markdown document with frontmatter
287
+ {
288
+ name: "write_document",
289
+ description: "Write content to a markdown document with frontmatter",
290
+ schema: ToolSchemas.WriteDocumentSchema,
291
+ handler: async (args) => {
292
+ try {
293
+ const normalizedPath = await validatePath(
294
+ args.path,
295
+ allowedDirectories
296
+ );
297
+ // Convert metadata to frontmatter and combine with content
298
+ const frontmatter = args.metadata
299
+ ? matter.stringify(args.content, args.metadata)
300
+ : args.content;
301
+ // Ensure the directory exists
302
+ const dirPath = path.dirname(normalizedPath);
303
+ await fs.mkdir(dirPath, { recursive: true });
304
+ // Write the file
305
+ await fs.writeFile(normalizedPath, frontmatter);
306
+ return {
307
+ content: [{ type: "text", text: "Document written successfully" }],
308
+ metadata: {
309
+ path: args.path,
310
+ },
311
+ };
312
+ } catch (error) {
313
+ return {
314
+ content: [
315
+ { type: "text", text: `Error writing document: ${error.message}` },
316
+ ],
317
+ isError: true,
318
+ };
319
+ }
268
320
  },
269
- // Edit document - applies edits to a markdown document while preserving frontmatter
270
- {
271
- name: "edit_document",
272
- description: "Apply edits to a markdown document while preserving frontmatter",
273
- schema: ToolSchemas.EditDocumentSchema,
274
- handler: async (args) => {
275
- try {
276
- // First read the document to get its current content and metadata
277
- const docResult = await DocsHandlers.readDocument(args.path, allowedDirectories);
278
- if (docResult.isError) {
279
- return docResult;
280
- }
281
- const normalizedPath = await validatePath(args.path, allowedDirectories);
282
- // Read the current content
283
- const content = await fs.readFile(normalizedPath, "utf-8");
284
- // Apply edits
285
- let newContent = content;
286
- let appliedEdits = 0;
287
- for (const edit of args.edits) {
288
- if (newContent.includes(edit.oldText)) {
289
- newContent = newContent.replace(edit.oldText, edit.newText);
290
- appliedEdits++;
291
- }
292
- }
293
- // Write the updated content
294
- await fs.writeFile(normalizedPath, newContent);
295
- return {
296
- content: [{ type: "text", text: "Document edited successfully" }],
297
- metadata: {
298
- path: args.path,
299
- appliedEdits,
300
- },
301
- };
302
- }
303
- catch (error) {
304
- return {
305
- content: [
306
- { type: "text", text: `Error editing document: ${error.message}` },
307
- ],
308
- isError: true,
309
- };
310
- }
311
- },
321
+ },
322
+ // Edit document - applies edits to a markdown document while preserving frontmatter
323
+ {
324
+ name: "edit_document",
325
+ description:
326
+ "Apply edits to a markdown document while preserving frontmatter",
327
+ schema: ToolSchemas.EditDocumentSchema,
328
+ handler: async (args) => {
329
+ try {
330
+ // First read the document to get its current content and metadata
331
+ const docResult = await DocsHandlers.readDocument(
332
+ args.path,
333
+ allowedDirectories
334
+ );
335
+ if (docResult.isError) {
336
+ return docResult;
337
+ }
338
+ const normalizedPath = await validatePath(
339
+ args.path,
340
+ allowedDirectories
341
+ );
342
+ // Read the current content
343
+ const content = await fs.readFile(normalizedPath, "utf-8");
344
+ // Apply edits
345
+ let newContent = content;
346
+ let appliedEdits = 0;
347
+ for (const edit of args.edits) {
348
+ if (newContent.includes(edit.oldText)) {
349
+ newContent = newContent.replace(edit.oldText, edit.newText);
350
+ appliedEdits++;
351
+ }
352
+ }
353
+ // Write the updated content
354
+ await fs.writeFile(normalizedPath, newContent);
355
+ return {
356
+ content: [{ type: "text", text: "Document edited successfully" }],
357
+ metadata: {
358
+ path: args.path,
359
+ appliedEdits,
360
+ },
361
+ };
362
+ } catch (error) {
363
+ return {
364
+ content: [
365
+ { type: "text", text: `Error editing document: ${error.message}` },
366
+ ],
367
+ isError: true,
368
+ };
369
+ }
312
370
  },
313
- // Delete document - deletes a markdown document
314
- {
315
- name: "delete_document",
316
- description: "Delete a markdown document",
317
- schema: ToolSchemas.DeleteDocumentSchema,
318
- handler: async (args) => {
319
- try {
320
- const normalizedPath = await validatePath(args.path, allowedDirectories);
321
- // Check if the file exists and is a markdown file
322
- const stats = await fs.stat(normalizedPath);
323
- if (!stats.isFile() || !normalizedPath.endsWith(".md")) {
324
- return {
325
- content: [
326
- {
327
- type: "text",
328
- text: `Error: ${args.path} is not a markdown document`,
329
- },
330
- ],
331
- isError: true,
332
- };
333
- }
334
- // Delete the file
335
- await fs.unlink(normalizedPath);
336
- return {
337
- content: [{ type: "text", text: "Document deleted successfully" }],
338
- metadata: {
339
- path: args.path,
340
- },
341
- };
342
- }
343
- catch (error) {
344
- return {
345
- content: [
346
- { type: "text", text: `Error deleting document: ${error.message}` },
347
- ],
348
- isError: true,
349
- };
350
- }
351
- },
371
+ },
372
+ // Delete document - deletes a markdown document
373
+ {
374
+ name: "delete_document",
375
+ description: "Delete a markdown document",
376
+ schema: ToolSchemas.DeleteDocumentSchema,
377
+ handler: async (args) => {
378
+ try {
379
+ const normalizedPath = await validatePath(
380
+ args.path,
381
+ allowedDirectories
382
+ );
383
+ // Check if the file exists and is a markdown file
384
+ const stats = await fs.stat(normalizedPath);
385
+ if (!stats.isFile() || !normalizedPath.endsWith(".md")) {
386
+ return {
387
+ content: [
388
+ {
389
+ type: "text",
390
+ text: `Error: ${args.path} is not a markdown document`,
391
+ },
392
+ ],
393
+ isError: true,
394
+ };
395
+ }
396
+ // Delete the file
397
+ await fs.unlink(normalizedPath);
398
+ return {
399
+ content: [{ type: "text", text: "Document deleted successfully" }],
400
+ metadata: {
401
+ path: args.path,
402
+ },
403
+ };
404
+ } catch (error) {
405
+ return {
406
+ content: [
407
+ { type: "text", text: `Error deleting document: ${error.message}` },
408
+ ],
409
+ isError: true,
410
+ };
411
+ }
352
412
  },
353
- // Search documents - searches for markdown documents matching criteria
354
- {
355
- name: "search_documents",
356
- description: "Search for markdown documents matching criteria",
357
- schema: ToolSchemas.SearchDocumentsSchema,
358
- handler: async (args) => {
359
- try {
360
- // Get the list of documents first
361
- const listResult = await DocsHandlers.listDocuments(args.basePath || "", allowedDirectories);
362
- if (listResult.isError) {
363
- return listResult;
364
- }
365
- let documents = listResult.metadata?.documents || [];
366
- // Filter by query if provided
367
- if (args.query) {
368
- documents = documents.filter((doc) => {
369
- // Check if query matches document path, name, or metadata
370
- const docString = JSON.stringify(doc).toLowerCase();
371
- return docString.includes(args.query.toLowerCase());
372
- });
373
- }
374
- // Filter by tags if provided
375
- if (args.tags && args.tags.length > 0) {
376
- documents = documents.filter((doc) => {
377
- const docTags = doc.metadata?.tags || [];
378
- return args.tags.some((tag) => docTags.includes(tag));
379
- });
380
- }
381
- // Filter by status if provided
382
- if (args.status) {
383
- documents = documents.filter((doc) => doc.metadata?.status === args.status);
384
- }
385
- return {
386
- content: [
387
- {
388
- type: "text",
389
- text: `Found ${documents.length} matching documents`,
390
- },
391
- ],
392
- metadata: {
393
- documents,
394
- },
395
- };
396
- }
397
- catch (error) {
398
- return {
399
- content: [
400
- {
401
- type: "text",
402
- text: `Error searching documents: ${error.message}`,
403
- },
404
- ],
405
- isError: true,
406
- };
407
- }
408
- },
413
+ },
414
+ // Search documents - searches for markdown documents matching criteria
415
+ {
416
+ name: "search_documents",
417
+ description: "Search for markdown documents matching criteria",
418
+ schema: ToolSchemas.SearchDocumentsSchema,
419
+ handler: async (args) => {
420
+ try {
421
+ // Get the list of documents first
422
+ const listResult = await DocsHandlers.listDocuments(
423
+ args.basePath || "",
424
+ allowedDirectories
425
+ );
426
+ if (listResult.isError) {
427
+ return listResult;
428
+ }
429
+ let documents = listResult.metadata?.documents || [];
430
+ // Filter by query if provided
431
+ if (args.query) {
432
+ documents = documents.filter((doc) => {
433
+ // Check if query matches document path, name, or metadata
434
+ const docString = JSON.stringify(doc).toLowerCase();
435
+ return docString.includes(args.query.toLowerCase());
436
+ });
437
+ }
438
+ // Filter by tags if provided
439
+ if (args.tags && args.tags.length > 0) {
440
+ documents = documents.filter((doc) => {
441
+ const docTags = doc.metadata?.tags || [];
442
+ return args.tags.some((tag) => docTags.includes(tag));
443
+ });
444
+ }
445
+ // Filter by status if provided
446
+ if (args.status) {
447
+ documents = documents.filter(
448
+ (doc) => doc.metadata?.status === args.status
449
+ );
450
+ }
451
+ return {
452
+ content: [
453
+ {
454
+ type: "text",
455
+ text: `Found ${documents.length} matching documents`,
456
+ },
457
+ ],
458
+ metadata: {
459
+ documents,
460
+ },
461
+ };
462
+ } catch (error) {
463
+ return {
464
+ content: [
465
+ {
466
+ type: "text",
467
+ text: `Error searching documents: ${error.message}`,
468
+ },
469
+ ],
470
+ isError: true,
471
+ };
472
+ }
409
473
  },
410
- // Check documentation health - checks the health of documentation
411
- {
412
- name: "check_documentation_health",
413
- description: "Check the health of documentation and identify issues",
414
- schema: ToolSchemas.CheckDocumentationHealthSchema,
415
- handler: async (args) => {
416
- try {
417
- // If basePath is provided, validate it
418
- let validatedBasePath = "";
419
- if (args.basePath) {
420
- try {
421
- validatedBasePath = await validatePath(args.basePath, allowedDirectories);
422
- }
423
- catch (error) {
424
- // If validation fails, use the first allowed directory
425
- console.warn(`Warning: Invalid basePath "${args.basePath}". Using default directory instead.`);
426
- validatedBasePath = allowedDirectories[0];
427
- }
428
- }
429
- return await DocsHandlers.checkDocumentationHealth(validatedBasePath, {
430
- checkLinks: args.checkLinks,
431
- checkMetadata: args.checkMetadata,
432
- checkOrphans: args.checkOrphans,
433
- requiredMetadataFields: args.requiredMetadataFields,
434
- }, allowedDirectories);
435
- }
436
- catch (error) {
437
- return {
438
- content: [
439
- {
440
- type: "text",
441
- text: `Error checking documentation health: ${error.message}`,
442
- },
443
- ],
444
- isError: true,
445
- };
446
- }
447
- },
474
+ },
475
+ // Check documentation health - checks the health of documentation
476
+ {
477
+ name: "check_documentation_health",
478
+ description: "Check the health of documentation and identify issues",
479
+ schema: ToolSchemas.CheckDocumentationHealthSchema,
480
+ handler: async (args) => {
481
+ try {
482
+ // If basePath is provided, validate it
483
+ let validatedBasePath = "";
484
+ if (args.basePath) {
485
+ try {
486
+ validatedBasePath = await validatePath(
487
+ args.basePath,
488
+ allowedDirectories
489
+ );
490
+ } catch (error) {
491
+ // If validation fails, use the first allowed directory
492
+ console.warn(
493
+ `Warning: Invalid basePath "${args.basePath}". Using default directory instead.`
494
+ );
495
+ validatedBasePath = allowedDirectories[0];
496
+ }
497
+ }
498
+ return await DocsHandlers.checkDocumentationHealth(
499
+ validatedBasePath,
500
+ {
501
+ checkLinks: args.checkLinks,
502
+ checkMetadata: args.checkMetadata,
503
+ checkOrphans: args.checkOrphans,
504
+ requiredMetadataFields: args.requiredMetadataFields,
505
+ },
506
+ allowedDirectories
507
+ );
508
+ } catch (error) {
509
+ return {
510
+ content: [
511
+ {
512
+ type: "text",
513
+ text: `Error checking documentation health: ${error.message}`,
514
+ },
515
+ ],
516
+ isError: true,
517
+ };
518
+ }
448
519
  },
520
+ },
449
521
  ];
450
522
  // Combine all tools
451
523
  const tools = [...documentationTools];
452
524
  // Register tool handlers
453
525
  server.setRequestHandler(ListToolsRequestSchema, async () => {
454
- return {
455
- tools: tools.map((tool) => ({
456
- name: tool.name,
457
- description: tool.description,
458
- inputSchema: zodToJsonSchema(tool.schema),
459
- })),
460
- };
526
+ return {
527
+ tools: tools.map((tool) => ({
528
+ name: tool.name,
529
+ description: tool.description,
530
+ inputSchema: zodToJsonSchema(tool.schema),
531
+ })),
532
+ };
461
533
  });
462
534
  server.setRequestHandler(CallToolRequestSchema, async (request) => {
463
- const { name, arguments: args } = request.params;
464
- // Find the requested tool
465
- const tool = tools.find((t) => t.name === name);
466
- if (!tool) {
467
- return {
468
- content: [
469
- {
470
- type: "text",
471
- text: `Tool not found: ${name}`,
472
- },
473
- ],
474
- isError: true,
475
- };
476
- }
477
- try {
478
- // Parse and validate arguments
479
- const parsedArgs = tool.schema.parse(args);
480
- // Call the tool handler with the appropriate type
481
- const result = await tool.handler(parsedArgs);
482
- // Ensure the content field is always an array
483
- if (!result.content || !Array.isArray(result.content)) {
484
- result.content = [
485
- { type: "text", text: "Operation completed successfully" },
486
- ];
487
- }
488
- return result;
489
- }
490
- catch (error) {
491
- console.error(`Error executing tool ${name}:`, error);
492
- return {
493
- content: [
494
- {
495
- type: "text",
496
- text: `Error calling tool ${name}: ${error.message}`,
497
- },
498
- ],
499
- isError: true,
500
- };
535
+ const { name, arguments: args } = request.params;
536
+ // Find the requested tool
537
+ const tool = tools.find((t) => t.name === name);
538
+ if (!tool) {
539
+ return {
540
+ content: [
541
+ {
542
+ type: "text",
543
+ text: `Tool not found: ${name}`,
544
+ },
545
+ ],
546
+ isError: true,
547
+ };
548
+ }
549
+ try {
550
+ // Parse and validate arguments
551
+ const parsedArgs = tool.schema.parse(args);
552
+ // Call the tool handler with the appropriate type
553
+ const result = await tool.handler(parsedArgs);
554
+ // Ensure the content field is always an array
555
+ if (!result.content || !Array.isArray(result.content)) {
556
+ result.content = [
557
+ { type: "text", text: "Operation completed successfully" },
558
+ ];
501
559
  }
560
+ return result;
561
+ } catch (error) {
562
+ console.error(`Error executing tool ${name}:`, error);
563
+ return {
564
+ content: [
565
+ {
566
+ type: "text",
567
+ text: `Error calling tool ${name}: ${error.message}`,
568
+ },
569
+ ],
570
+ isError: true,
571
+ };
572
+ }
502
573
  });
503
574
  // If health check flag is set, run the health check and exit
504
575
  if (runHealthCheck) {
505
- console.log("Running documentation health check...");
506
- try {
507
- const healthCheckResult = await DocsHandlers.checkDocumentationHealth("", // Use the first allowed directory
508
- {
509
- checkLinks: true,
510
- checkMetadata: true,
511
- checkOrphans: true,
512
- requiredMetadataFields: ["title", "description", "status"],
513
- }, allowedDirectories);
514
- if (healthCheckResult.isError) {
515
- console.error("Health check failed:", healthCheckResult.content[0].text);
516
- process.exit(1);
517
- }
518
- const metadata = healthCheckResult.metadata || {};
519
- console.log("\n=== DOCUMENTATION HEALTH CHECK RESULTS ===\n");
520
- console.log(`Overall Health Score: ${metadata.score || 0}%`);
521
- console.log(`Total Documents: ${metadata.totalDocuments || 0}`);
522
- console.log(`Metadata Completeness: ${metadata.metadataCompleteness || 0}%`);
523
- console.log(`Broken Links: ${metadata.brokenLinks || 0}`);
524
- if (metadata.issues && metadata.issues.length > 0) {
525
- console.log("\nIssues Found:");
526
- // Group issues by type
527
- const issuesByType = {};
528
- metadata.issues.forEach((issue) => {
529
- if (!issuesByType[issue.type]) {
530
- issuesByType[issue.type] = [];
531
- }
532
- issuesByType[issue.type].push(issue);
533
- });
534
- // Display issues by type
535
- for (const [type, issues] of Object.entries(issuesByType)) {
536
- console.log(`\n${type.replace("_", " ").toUpperCase()} (${issues.length}):`);
537
- issues.forEach((issue) => {
538
- console.log(`- ${issue.path}: ${issue.message}`);
539
- });
540
- }
541
- }
542
- else {
543
- console.log("\nNo issues found. Documentation is in good health!");
544
- }
545
- console.log("\n=== END OF HEALTH CHECK ===\n");
546
- // Exit with success
547
- process.exit(0);
576
+ console.log("Running documentation health check...");
577
+ try {
578
+ const healthCheckResult = await DocsHandlers.checkDocumentationHealth(
579
+ "", // Use the first allowed directory
580
+ {
581
+ checkLinks: true,
582
+ checkMetadata: true,
583
+ checkOrphans: true,
584
+ requiredMetadataFields: ["title", "description", "status"],
585
+ },
586
+ allowedDirectories
587
+ );
588
+ if (healthCheckResult.isError) {
589
+ console.error("Health check failed:", healthCheckResult.content[0].text);
590
+ process.exit(1);
548
591
  }
549
- catch (error) {
550
- console.error("Error running health check:", error);
551
- process.exit(1);
592
+ const metadata = healthCheckResult.metadata || {};
593
+ console.log("\n=== DOCUMENTATION HEALTH CHECK RESULTS ===\n");
594
+ console.log(`Overall Health Score: ${metadata.score || 0}%`);
595
+ console.log(`Total Documents: ${metadata.totalDocuments || 0}`);
596
+ console.log(
597
+ `Metadata Completeness: ${metadata.metadataCompleteness || 0}%`
598
+ );
599
+ console.log(`Broken Links: ${metadata.brokenLinks || 0}`);
600
+ if (metadata.issues && metadata.issues.length > 0) {
601
+ console.log("\nIssues Found:");
602
+ // Group issues by type
603
+ const issuesByType = {};
604
+ metadata.issues.forEach((issue) => {
605
+ if (!issuesByType[issue.type]) {
606
+ issuesByType[issue.type] = [];
607
+ }
608
+ issuesByType[issue.type].push(issue);
609
+ });
610
+ // Display issues by type
611
+ for (const [type, issues] of Object.entries(issuesByType)) {
612
+ console.log(
613
+ `\n${type.replace("_", " ").toUpperCase()} (${issues.length}):`
614
+ );
615
+ issues.forEach((issue) => {
616
+ console.log(`- ${issue.path}: ${issue.message}`);
617
+ });
618
+ }
619
+ } else {
620
+ console.log("\nNo issues found. Documentation is in good health!");
552
621
  }
622
+ console.log("\n=== END OF HEALTH CHECK ===\n");
623
+ // Exit with success
624
+ process.exit(0);
625
+ } catch (error) {
626
+ console.error("Error running health check:", error);
627
+ process.exit(1);
628
+ }
553
629
  }
554
630
  // Connect to transport and start the server
555
631
  const transport = new StdioServerTransport();
556
632
  await server.connect(transport);
557
- //# sourceMappingURL=index.js.map
633
+ //# sourceMappingURL=index.js.map