mcp-docs-service 0.1.1 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +43 -0
- package/LICENSE +1 -1
- package/README.md +28 -15
- package/dist/handlers/docs.d.ts +17 -0
- package/dist/handlers/docs.js +280 -0
- package/dist/handlers/docs.js.map +1 -0
- package/dist/handlers/file.d.ts +32 -0
- package/dist/handlers/file.js +222 -0
- package/dist/handlers/file.js.map +1 -0
- package/dist/handlers/index.d.ts +1 -0
- package/dist/handlers/index.js +3 -0
- package/dist/handlers/index.js.map +1 -0
- package/dist/index.d.ts +2 -24
- package/dist/index.js +431 -49
- package/dist/index.js.map +1 -1
- package/dist/schemas/index.d.ts +1 -0
- package/dist/schemas/index.js +3 -0
- package/dist/schemas/index.js.map +1 -0
- package/dist/schemas/tools.d.ts +141 -0
- package/dist/schemas/tools.js +46 -0
- package/dist/schemas/tools.js.map +1 -0
- package/dist/types/docs.d.ts +49 -0
- package/dist/types/docs.js +2 -0
- package/dist/types/docs.js.map +1 -0
- package/dist/types/file.d.ts +21 -0
- package/dist/types/file.js +2 -0
- package/dist/types/file.js.map +1 -0
- package/dist/types/index.d.ts +34 -43
- package/dist/types/index.js +3 -5
- package/dist/types/index.js.map +1 -1
- package/dist/types/tools.d.ts +11 -0
- package/dist/types/tools.js +2 -0
- package/dist/types/tools.js.map +1 -0
- package/dist/utils/file.d.ts +24 -0
- package/dist/utils/file.js +94 -0
- package/dist/utils/file.js.map +1 -0
- package/dist/utils/index.d.ts +1 -60
- package/dist/utils/index.js +2 -151
- package/dist/utils/index.js.map +1 -1
- package/dist/utils/path.d.ts +16 -0
- package/dist/utils/path.js +39 -0
- package/dist/utils/path.js.map +1 -0
- package/package.json +20 -6
- package/dist/cli/bin.d.ts +0 -6
- package/dist/cli/bin.js +0 -49
- package/dist/cli/bin.js.map +0 -1
- package/dist/cli/index.d.ts +0 -16
- package/dist/cli/index.js +0 -96
- package/dist/cli/index.js.map +0 -1
- package/dist/core/docAnalyzer.d.ts +0 -25
- package/dist/core/docAnalyzer.js +0 -118
- package/dist/core/docAnalyzer.js.map +0 -1
- package/dist/core/docManager.d.ts +0 -48
- package/dist/core/docManager.js +0 -257
- package/dist/core/docManager.js.map +0 -1
- package/dist/core/docProcessor.d.ts +0 -20
- package/dist/core/docProcessor.js +0 -127
- package/dist/core/docProcessor.js.map +0 -1
- package/dist/core/mcpDocsServer.d.ts +0 -61
- package/dist/core/mcpDocsServer.js +0 -395
- package/dist/core/mcpDocsServer.js.map +0 -1
package/CHANGELOG.md
ADDED
@@ -0,0 +1,43 @@
|
|
1
|
+
# Changelog
|
2
|
+
|
3
|
+
All notable changes to the MCP Docs Service will be documented in this file.
|
4
|
+
|
5
|
+
## [0.2.1] - 2023-03-12
|
6
|
+
|
7
|
+
### Added
|
8
|
+
|
9
|
+
- Additional documentation improvements
|
10
|
+
- Enhanced knowledge base generation capabilities
|
11
|
+
|
12
|
+
## [0.2.0] - 2023-03-12
|
13
|
+
|
14
|
+
### Added
|
15
|
+
|
16
|
+
- New `get_docs_knowledge_base` tool for creating comprehensive knowledge bases for LLM context
|
17
|
+
- Knowledge base generation example in `docs/examples/knowledge-base-generator.md`
|
18
|
+
- Support for document summaries in knowledge base
|
19
|
+
- Organization of documents by categories and tags in knowledge base
|
20
|
+
- Updated documentation to reflect new features
|
21
|
+
|
22
|
+
### Changed
|
23
|
+
|
24
|
+
- Improved documentation structure and organization
|
25
|
+
- Updated roadmap to include knowledge base features
|
26
|
+
- Enhanced basic usage tutorial with knowledge base examples
|
27
|
+
- Removed unnecessary file operation tools in favor of document-specific tools
|
28
|
+
|
29
|
+
### Fixed
|
30
|
+
|
31
|
+
- TypeScript type errors in knowledge base implementation
|
32
|
+
- Various documentation improvements and corrections
|
33
|
+
|
34
|
+
## [0.1.1] - 2023-03-11
|
35
|
+
|
36
|
+
### Added
|
37
|
+
|
38
|
+
- Initial release of MCP Docs Service
|
39
|
+
- Basic documentation management functionality
|
40
|
+
- Support for reading, writing, editing, and deleting markdown files with frontmatter
|
41
|
+
- Search functionality for documentation
|
42
|
+
- Navigation generation
|
43
|
+
- Documentation structure analysis
|
package/LICENSE
CHANGED
package/README.md
CHANGED
@@ -7,9 +7,26 @@ The MCP Documentation Service is a custom implementation of the Model Context Pr
|
|
7
7
|
- **Document Management**: Create, read, update, and delete markdown documentation files
|
8
8
|
- **Metadata Management**: Work with document frontmatter (YAML metadata)
|
9
9
|
- **Search**: Search through documentation using keywords and filters
|
10
|
+
- **Knowledge Base Generation**: Create comprehensive knowledge bases for LLM context
|
11
|
+
- **Structure Analysis**: Analyze documentation structure and relationships
|
12
|
+
- **Navigation Generation**: Generate navigation structures for documentation
|
13
|
+
- **Tag Management**: Organize documentation by tags and categories
|
10
14
|
- **Analytics**: Analyze documentation health and get suggestions for improvement
|
11
15
|
- **Custom Directory Support**: Specify a custom docs directory and create it if it doesn't exist
|
12
16
|
|
17
|
+
## Documentation
|
18
|
+
|
19
|
+
Comprehensive documentation is available in the `docs` directory:
|
20
|
+
|
21
|
+
- [Getting Started Guide](docs/guides/getting-started.md) - Introduction to the MCP Docs Manager
|
22
|
+
- [API Overview](docs/api/overview.md) - Overview of the API and available tools
|
23
|
+
- [Tools Reference](docs/api/tools-reference.md) - Complete reference of all available tools
|
24
|
+
- [Basic Usage Tutorial](docs/tutorials/basic-usage.md) - Tutorial for basic usage
|
25
|
+
- [Examples](docs/examples/) - Code examples for common tasks
|
26
|
+
- [Navigation Generator](docs/examples/navigation-generator.md) - Example of how to generate navigation for documentation
|
27
|
+
- [Knowledge Base Generator](docs/examples/knowledge-base-generator.md) - Example of how to generate a knowledge base for LLM context
|
28
|
+
- [Roadmap](docs/roadmap.md) - Development roadmap and planned features
|
29
|
+
|
13
30
|
## Installation
|
14
31
|
|
15
32
|
### Via npx (Recommended)
|
@@ -193,21 +210,17 @@ get_document(path="architecture/overview.md")
|
|
193
210
|
|
194
211
|
## Available Commands
|
195
212
|
|
196
|
-
###
|
197
|
-
|
198
|
-
- **
|
199
|
-
- **
|
200
|
-
- **
|
201
|
-
- **
|
202
|
-
- **
|
203
|
-
- **
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
- **search_documents(query="search term", directory="", tags=["tag1"], status="published")**: Search documents
|
208
|
-
- **analyze_docs(directory="")**: Analyze documentation health
|
209
|
-
- **get_health_score()**: Get overall documentation health score
|
210
|
-
- **get_suggestions()**: Get suggestions for improving documentation
|
213
|
+
### Documentation Tools
|
214
|
+
|
215
|
+
- **read_document(path="path/to/doc.md")**: Read a markdown document and extract its content and metadata
|
216
|
+
- **list_documents(basePath="")**: List all markdown documents in a directory
|
217
|
+
- **get_structure(basePath="")**: Get the structure of the documentation directory
|
218
|
+
- **get_navigation(basePath="")**: Get the navigation structure for the documentation
|
219
|
+
- **get_docs_knowledge_base(basePath="", includeSummaries=true, maxSummaryLength=500)**: Create a comprehensive knowledge base of documentation for LLM context
|
220
|
+
- **write_document(path="path/to/doc.md", content="content", metadata={...})**: Write content to a markdown document with frontmatter
|
221
|
+
- **edit_document(path="path/to/doc.md", edits=[{oldText: "...", newText: "..."}])**: Apply edits to a markdown document while preserving frontmatter
|
222
|
+
- **delete_document(path="path/to/doc.md")**: Delete a markdown document
|
223
|
+
- **search_documents(basePath="", query="search term", tags=["tag1"], status="published")**: Search for markdown documents matching criteria
|
211
224
|
|
212
225
|
## License
|
213
226
|
|
@@ -0,0 +1,17 @@
|
|
1
|
+
import { ToolResponse } from "../types/tools.js";
|
2
|
+
/**
|
3
|
+
* Reads a markdown document and extracts its content and metadata
|
4
|
+
*/
|
5
|
+
export declare function readDocument(docPath: string, allowedDirectories: string[]): Promise<ToolResponse>;
|
6
|
+
/**
|
7
|
+
* Lists all markdown documents in a directory
|
8
|
+
*/
|
9
|
+
export declare function listDocuments(basePath: string, allowedDirectories: string[]): Promise<ToolResponse>;
|
10
|
+
/**
|
11
|
+
* Gets the structure of the documentation directory
|
12
|
+
*/
|
13
|
+
export declare function getStructure(basePath: string, allowedDirectories: string[]): Promise<ToolResponse>;
|
14
|
+
/**
|
15
|
+
* Gets the navigation structure for the documentation
|
16
|
+
*/
|
17
|
+
export declare function getNavigation(basePath: string, allowedDirectories: string[]): Promise<ToolResponse>;
|
@@ -0,0 +1,280 @@
|
|
1
|
+
import fs from "fs/promises";
|
2
|
+
import path from "path";
|
3
|
+
import { validatePath } from "../utils/path.js";
|
4
|
+
import matter from "gray-matter";
|
5
|
+
/**
|
6
|
+
* Reads a markdown document and extracts its content and metadata
|
7
|
+
*/
|
8
|
+
export async function readDocument(docPath, allowedDirectories) {
|
9
|
+
try {
|
10
|
+
const normalizedPath = await validatePath(docPath, allowedDirectories);
|
11
|
+
// Read the file
|
12
|
+
const content = await fs.readFile(normalizedPath, "utf-8");
|
13
|
+
// Parse frontmatter
|
14
|
+
const { data: metadata, content: markdownContent } = matter(content);
|
15
|
+
return {
|
16
|
+
content: [{ type: "text", text: "Document read successfully" }],
|
17
|
+
metadata: {
|
18
|
+
path: docPath,
|
19
|
+
content: markdownContent,
|
20
|
+
metadata,
|
21
|
+
},
|
22
|
+
};
|
23
|
+
}
|
24
|
+
catch (error) {
|
25
|
+
return {
|
26
|
+
content: [
|
27
|
+
{ type: "text", text: `Error reading document: ${error.message}` },
|
28
|
+
],
|
29
|
+
isError: true,
|
30
|
+
};
|
31
|
+
}
|
32
|
+
}
|
33
|
+
/**
|
34
|
+
* Lists all markdown documents in a directory
|
35
|
+
*/
|
36
|
+
export async function listDocuments(basePath, allowedDirectories) {
|
37
|
+
try {
|
38
|
+
const normalizedBasePath = basePath
|
39
|
+
? await validatePath(basePath, allowedDirectories)
|
40
|
+
: allowedDirectories[0];
|
41
|
+
const documents = [];
|
42
|
+
async function processDirectory(dirPath) {
|
43
|
+
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
44
|
+
for (const entry of entries) {
|
45
|
+
const entryPath = path.join(dirPath, entry.name);
|
46
|
+
if (entry.isDirectory()) {
|
47
|
+
await processDirectory(entryPath);
|
48
|
+
}
|
49
|
+
else if (entry.name.endsWith(".md")) {
|
50
|
+
try {
|
51
|
+
const content = await fs.readFile(entryPath, "utf-8");
|
52
|
+
const { data: metadata } = matter(content);
|
53
|
+
documents.push({
|
54
|
+
path: entryPath,
|
55
|
+
name: entry.name,
|
56
|
+
metadata: metadata,
|
57
|
+
});
|
58
|
+
}
|
59
|
+
catch (error) {
|
60
|
+
console.error(`Error processing ${entryPath}: ${error.message}`);
|
61
|
+
}
|
62
|
+
}
|
63
|
+
}
|
64
|
+
}
|
65
|
+
await processDirectory(normalizedBasePath);
|
66
|
+
return {
|
67
|
+
content: [{ type: "text", text: `Found ${documents.length} documents` }],
|
68
|
+
metadata: {
|
69
|
+
documents,
|
70
|
+
},
|
71
|
+
};
|
72
|
+
}
|
73
|
+
catch (error) {
|
74
|
+
return {
|
75
|
+
content: [
|
76
|
+
{ type: "text", text: `Error listing documents: ${error.message}` },
|
77
|
+
],
|
78
|
+
isError: true,
|
79
|
+
};
|
80
|
+
}
|
81
|
+
}
|
82
|
+
/**
|
83
|
+
* Gets the structure of the documentation directory
|
84
|
+
*/
|
85
|
+
export async function getStructure(basePath, allowedDirectories) {
|
86
|
+
try {
|
87
|
+
const normalizedBasePath = basePath
|
88
|
+
? await validatePath(basePath, allowedDirectories)
|
89
|
+
: allowedDirectories[0];
|
90
|
+
async function buildStructure(dirPath, relativePath = "") {
|
91
|
+
try {
|
92
|
+
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
93
|
+
const children = [];
|
94
|
+
let metadata;
|
95
|
+
// Check if there's an index.md file to get directory metadata
|
96
|
+
const indexPath = path.join(dirPath, "index.md");
|
97
|
+
try {
|
98
|
+
const indexStat = await fs.stat(indexPath);
|
99
|
+
if (indexStat.isFile()) {
|
100
|
+
const content = await fs.readFile(indexPath, "utf-8");
|
101
|
+
const { data } = matter(content);
|
102
|
+
metadata = data;
|
103
|
+
}
|
104
|
+
}
|
105
|
+
catch (error) {
|
106
|
+
// No index.md file, that's fine
|
107
|
+
}
|
108
|
+
// Process all entries
|
109
|
+
for (const entry of entries) {
|
110
|
+
const entryPath = path.join(dirPath, entry.name);
|
111
|
+
const entryRelativePath = path.join(relativePath, entry.name);
|
112
|
+
if (entry.isDirectory()) {
|
113
|
+
const subDir = await buildStructure(entryPath, entryRelativePath);
|
114
|
+
children.push(subDir);
|
115
|
+
}
|
116
|
+
else if (entry.name.endsWith(".md") && entry.name !== "index.md") {
|
117
|
+
try {
|
118
|
+
const content = await fs.readFile(entryPath, "utf-8");
|
119
|
+
const { data } = matter(content);
|
120
|
+
children.push({
|
121
|
+
name: entry.name,
|
122
|
+
path: entryRelativePath,
|
123
|
+
type: "file",
|
124
|
+
metadata: data,
|
125
|
+
children: [],
|
126
|
+
});
|
127
|
+
}
|
128
|
+
catch (error) {
|
129
|
+
children.push({
|
130
|
+
name: entry.name,
|
131
|
+
path: entryRelativePath,
|
132
|
+
type: "file",
|
133
|
+
error: error.message,
|
134
|
+
children: [],
|
135
|
+
});
|
136
|
+
}
|
137
|
+
}
|
138
|
+
}
|
139
|
+
// Sort children by order metadata if available, then by name
|
140
|
+
children.sort((a, b) => {
|
141
|
+
const orderA = a.metadata?.order ?? Infinity;
|
142
|
+
const orderB = b.metadata?.order ?? Infinity;
|
143
|
+
if (orderA !== orderB) {
|
144
|
+
return orderA - orderB;
|
145
|
+
}
|
146
|
+
return a.name.localeCompare(b.name);
|
147
|
+
});
|
148
|
+
return {
|
149
|
+
name: path.basename(dirPath),
|
150
|
+
path: relativePath,
|
151
|
+
type: "directory",
|
152
|
+
metadata,
|
153
|
+
children,
|
154
|
+
};
|
155
|
+
}
|
156
|
+
catch (error) {
|
157
|
+
return {
|
158
|
+
name: path.basename(dirPath),
|
159
|
+
path: relativePath,
|
160
|
+
type: "directory",
|
161
|
+
error: error.message,
|
162
|
+
children: [],
|
163
|
+
};
|
164
|
+
}
|
165
|
+
}
|
166
|
+
const structure = await buildStructure(normalizedBasePath);
|
167
|
+
return {
|
168
|
+
content: [
|
169
|
+
{
|
170
|
+
type: "text",
|
171
|
+
text: "Documentation structure retrieved successfully",
|
172
|
+
},
|
173
|
+
],
|
174
|
+
metadata: {
|
175
|
+
structure,
|
176
|
+
},
|
177
|
+
};
|
178
|
+
}
|
179
|
+
catch (error) {
|
180
|
+
return {
|
181
|
+
content: [
|
182
|
+
{ type: "text", text: `Error getting structure: ${error.message}` },
|
183
|
+
],
|
184
|
+
isError: true,
|
185
|
+
};
|
186
|
+
}
|
187
|
+
}
|
188
|
+
/**
|
189
|
+
* Gets the navigation structure for the documentation
|
190
|
+
*/
|
191
|
+
export async function getNavigation(basePath, allowedDirectories) {
|
192
|
+
try {
|
193
|
+
// First get the structure
|
194
|
+
const structureResponse = await getStructure(basePath, allowedDirectories);
|
195
|
+
if (structureResponse.isError) {
|
196
|
+
return structureResponse;
|
197
|
+
}
|
198
|
+
const structure = structureResponse.metadata?.structure;
|
199
|
+
// Build navigation from structure
|
200
|
+
function buildNavigation(structure) {
|
201
|
+
const sections = [];
|
202
|
+
function processNode(node, parentPath = []) {
|
203
|
+
// Skip nodes with errors
|
204
|
+
if (node.error) {
|
205
|
+
return;
|
206
|
+
}
|
207
|
+
if (node.type === "directory") {
|
208
|
+
// Create a section for this directory
|
209
|
+
const section = {
|
210
|
+
title: node.metadata?.title || node.name,
|
211
|
+
path: node.path ? `/${node.path}` : null,
|
212
|
+
items: [],
|
213
|
+
order: node.metadata?.order ?? Infinity,
|
214
|
+
};
|
215
|
+
// Process children
|
216
|
+
for (const child of node.children) {
|
217
|
+
if (child.type === "file") {
|
218
|
+
// Add file as an item
|
219
|
+
section.items.push({
|
220
|
+
title: child.metadata?.title || child.name.replace(/\.md$/, ""),
|
221
|
+
path: `/${child.path}`,
|
222
|
+
order: child.metadata?.order ?? Infinity,
|
223
|
+
});
|
224
|
+
}
|
225
|
+
else if (child.type === "directory") {
|
226
|
+
// Process subdirectory
|
227
|
+
const childSections = processNode(child, [
|
228
|
+
...parentPath,
|
229
|
+
node.name,
|
230
|
+
]);
|
231
|
+
if (childSections) {
|
232
|
+
sections.push(...childSections);
|
233
|
+
}
|
234
|
+
}
|
235
|
+
}
|
236
|
+
// Sort items by order
|
237
|
+
section.items.sort((a, b) => {
|
238
|
+
if (a.order !== b.order) {
|
239
|
+
return a.order - b.order;
|
240
|
+
}
|
241
|
+
return a.title.localeCompare(b.title);
|
242
|
+
});
|
243
|
+
// Only add section if it has items
|
244
|
+
if (section.items.length > 0) {
|
245
|
+
sections.push(section);
|
246
|
+
}
|
247
|
+
return sections;
|
248
|
+
}
|
249
|
+
return null;
|
250
|
+
}
|
251
|
+
processNode(structure);
|
252
|
+
// Sort sections by order
|
253
|
+
sections.sort((a, b) => {
|
254
|
+
if (a.order !== b.order) {
|
255
|
+
return a.order - b.order;
|
256
|
+
}
|
257
|
+
return a.title.localeCompare(b.title);
|
258
|
+
});
|
259
|
+
return sections;
|
260
|
+
}
|
261
|
+
const navigation = buildNavigation(structure);
|
262
|
+
return {
|
263
|
+
content: [
|
264
|
+
{ type: "text", text: "Navigation structure retrieved successfully" },
|
265
|
+
],
|
266
|
+
metadata: {
|
267
|
+
navigation,
|
268
|
+
},
|
269
|
+
};
|
270
|
+
}
|
271
|
+
catch (error) {
|
272
|
+
return {
|
273
|
+
content: [
|
274
|
+
{ type: "text", text: `Error getting navigation: ${error.message}` },
|
275
|
+
],
|
276
|
+
isError: true,
|
277
|
+
};
|
278
|
+
}
|
279
|
+
}
|
280
|
+
//# sourceMappingURL=docs.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"docs.js","sourceRoot":"","sources":["../../src/handlers/docs.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,aAAa,CAAC;AAC7B,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAE,YAAY,EAAE,MAAM,kBAAkB,CAAC;AAShD,OAAO,MAAM,MAAM,aAAa,CAAC;AAEjC;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAChC,OAAe,EACf,kBAA4B;IAE5B,IAAI,CAAC;QACH,MAAM,cAAc,GAAG,MAAM,YAAY,CAAC,OAAO,EAAE,kBAAkB,CAAC,CAAC;QAEvE,gBAAgB;QAChB,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,cAAc,EAAE,OAAO,CAAC,CAAC;QAE3D,oBAAoB;QACpB,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,eAAe,EAAE,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC;QAErE,OAAO;YACL,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,4BAA4B,EAAE,CAAC;YAC/D,QAAQ,EAAE;gBACR,IAAI,EAAE,OAAO;gBACb,OAAO,EAAE,eAAe;gBACxB,QAAQ;aACT;SACF,CAAC;IACJ,CAAC;IAAC,OAAO,KAAU,EAAE,CAAC;QACpB,OAAO;YACL,OAAO,EAAE;gBACP,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,2BAA2B,KAAK,CAAC,OAAO,EAAE,EAAE;aACnE;YACD,OAAO,EAAE,IAAI;SACd,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,QAAgB,EAChB,kBAA4B;IAE5B,IAAI,CAAC;QACH,MAAM,kBAAkB,GAAG,QAAQ;YACjC,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,EAAE,kBAAkB,CAAC;YAClD,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAE1B,MAAM,SAAS,GAAoB,EAAE,CAAC;QAEtC,KAAK,UAAU,gBAAgB,CAAC,OAAe;YAC7C,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;YAEnE,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;gBAC5B,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;gBAEjD,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;oBACxB,MAAM,gBAAgB,CAAC,SAAS,CAAC,CAAC;gBACpC,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;oBACtC,IAAI,CAAC;wBACH,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;wBACtD,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC;wBAE3C,SAAS,CAAC,IAAI,CAAC;4BACb,IAAI,EAAE,SAAS;4BACf,IAAI,EAAE,KAAK,CAAC,IAAI;4BAChB,QAAQ,EAAE,QAA4B;yBACvC,CAAC,CAAC;oBACL,CAAC;oBAAC,OAAO,KAAU,EAAE,CAAC;wBACpB,OAAO,CAAC,KAAK,CAAC,oBAAoB,SAAS,KAAK,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;oBACnE,CAAC;gBACH,CAAC;YACH,CAAC;QACH,CAAC;QAED,MAAM,gBAAgB,CAAC,kBAAkB,CAAC,CAAC;QAE3C,OAAO;YACL,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,SAAS,SAAS,CAAC,MAAM,YAAY,EAAE,CAAC;YACxE,QAAQ,EAAE;gBACR,SAAS;aACV;SACF,CAAC;IACJ,CAAC;IAAC,OAAO,KAAU,EAAE,CAAC;QACpB,OAAO;YACL,OAAO,EAAE;gBACP,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,4BAA4B,KAAK,CAAC,OAAO,EAAE,EAAE;aACpE;YACD,OAAO,EAAE,IAAI;SACd,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAChC,QAAgB,EAChB,kBAA4B;IAE5B,IAAI,CAAC;QACH,MAAM,kBAAkB,GAAG,QAAQ;YACjC,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,EAAE,kBAAkB,CAAC;YAClD,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAE1B,KAAK,UAAU,cAAc,CAC3B,OAAe,EACf,YAAY,GAAG,EAAE;YAEjB,IAAI,CAAC;gBACH,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;gBACnE,MAAM,QAAQ,GAAgB,EAAE,CAAC;gBAEjC,IAAI,QAAsC,CAAC;gBAE3C,8DAA8D;gBAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC;gBACjD,IAAI,CAAC;oBACH,MAAM,SAAS,GAAG,MAAM,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC3C,IAAI,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC;wBACvB,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;wBACtD,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC;wBACjC,QAAQ,GAAG,IAAwB,CAAC;oBACtC,CAAC;gBACH,CAAC;gBAAC,OAAO,KAAK,EAAE,CAAC;oBACf,gCAAgC;gBAClC,CAAC;gBAED,sBAAsB;gBACtB,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;oBAC5B,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;oBACjD,MAAM,iBAAiB,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;oBAE9D,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;wBACxB,MAAM,MAAM,GAAG,MAAM,cAAc,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;wBAClE,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;oBACxB,CAAC;yBAAM,IAAI,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBACnE,IAAI,CAAC;4BACH,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;4BACtD,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC;4BAEjC,QAAQ,CAAC,IAAI,CAAC;gCACZ,IAAI,EAAE,KAAK,CAAC,IAAI;gCAChB,IAAI,EAAE,iBAAiB;gCACvB,IAAI,EAAE,MAAM;gCACZ,QAAQ,EAAE,IAAwB;gCAClC,QAAQ,EAAE,EAAE;6BACb,CAAC,CAAC;wBACL,CAAC;wBAAC,OAAO,KAAU,EAAE,CAAC;4BACpB,QAAQ,CAAC,IAAI,CAAC;gCACZ,IAAI,EAAE,KAAK,CAAC,IAAI;gCAChB,IAAI,EAAE,iBAAiB;gCACvB,IAAI,EAAE,MAAM;gCACZ,KAAK,EAAE,KAAK,CAAC,OAAO;gCACpB,QAAQ,EAAE,EAAE;6BACb,CAAC,CAAC;wBACL,CAAC;oBACH,CAAC;gBACH,CAAC;gBAED,6DAA6D;gBAC7D,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;oBACrB,MAAM,MAAM,GAAG,CAAC,CAAC,QAAQ,EAAE,KAAK,IAAI,QAAQ,CAAC;oBAC7C,MAAM,MAAM,GAAG,CAAC,CAAC,QAAQ,EAAE,KAAK,IAAI,QAAQ,CAAC;oBAE7C,IAAI,MAAM,KAAK,MAAM,EAAE,CAAC;wBACtB,OAAO,MAAM,GAAG,MAAM,CAAC;oBACzB,CAAC;oBAED,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;gBACtC,CAAC,CAAC,CAAC;gBAEH,OAAO;oBACL,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC;oBAC5B,IAAI,EAAE,YAAY;oBAClB,IAAI,EAAE,WAAW;oBACjB,QAAQ;oBACR,QAAQ;iBACT,CAAC;YACJ,CAAC;YAAC,OAAO,KAAU,EAAE,CAAC;gBACpB,OAAO;oBACL,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC;oBAC5B,IAAI,EAAE,YAAY;oBAClB,IAAI,EAAE,WAAW;oBACjB,KAAK,EAAE,KAAK,CAAC,OAAO;oBACpB,QAAQ,EAAE,EAAE;iBACb,CAAC;YACJ,CAAC;QACH,CAAC;QAED,MAAM,SAAS,GAAG,MAAM,cAAc,CAAC,kBAAkB,CAAC,CAAC;QAE3D,OAAO;YACL,OAAO,EAAE;gBACP;oBACE,IAAI,EAAE,MAAM;oBACZ,IAAI,EAAE,gDAAgD;iBACvD;aACF;YACD,QAAQ,EAAE;gBACR,SAAS;aACV;SACF,CAAC;IACJ,CAAC;IAAC,OAAO,KAAU,EAAE,CAAC;QACpB,OAAO;YACL,OAAO,EAAE;gBACP,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,4BAA4B,KAAK,CAAC,OAAO,EAAE,EAAE;aACpE;YACD,OAAO,EAAE,IAAI;SACd,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,QAAgB,EAChB,kBAA4B;IAE5B,IAAI,CAAC;QACH,0BAA0B;QAC1B,MAAM,iBAAiB,GAAG,MAAM,YAAY,CAAC,QAAQ,EAAE,kBAAkB,CAAC,CAAC;QAE3E,IAAI,iBAAiB,CAAC,OAAO,EAAE,CAAC;YAC9B,OAAO,iBAAiB,CAAC;QAC3B,CAAC;QAED,MAAM,SAAS,GAAG,iBAAiB,CAAC,QAAQ,EAAE,SAAsB,CAAC;QAErE,kCAAkC;QAClC,SAAS,eAAe,CAAC,SAAoB;YAC3C,MAAM,QAAQ,GAAwB,EAAE,CAAC;YAEzC,SAAS,WAAW,CAAC,IAAe,EAAE,aAAuB,EAAE;gBAC7D,yBAAyB;gBACzB,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;oBACf,OAAO;gBACT,CAAC;gBAED,IAAI,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;oBAC9B,sCAAsC;oBACtC,MAAM,OAAO,GAAsB;wBACjC,KAAK,EAAE,IAAI,CAAC,QAAQ,EAAE,KAAK,IAAI,IAAI,CAAC,IAAI;wBACxC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,IAAI;wBACxC,KAAK,EAAE,EAAE;wBACT,KAAK,EAAE,IAAI,CAAC,QAAQ,EAAE,KAAK,IAAI,QAAQ;qBACxC,CAAC;oBAEF,mBAAmB;oBACnB,KAAK,MAAM,KAAK,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAClC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;4BAC1B,sBAAsB;4BACtB,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC;gCACjB,KAAK,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;gCAC/D,IAAI,EAAE,IAAI,KAAK,CAAC,IAAI,EAAE;gCACtB,KAAK,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,IAAI,QAAQ;6BACzC,CAAC,CAAC;wBACL,CAAC;6BAAM,IAAI,KAAK,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;4BACtC,uBAAuB;4BACvB,MAAM,aAAa,GAAG,WAAW,CAAC,KAAK,EAAE;gCACvC,GAAG,UAAU;gCACb,IAAI,CAAC,IAAI;6BACV,CAAC,CAAC;4BACH,IAAI,aAAa,EAAE,CAAC;gCAClB,QAAQ,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC;4BAClC,CAAC;wBACH,CAAC;oBACH,CAAC;oBAED,sBAAsB;oBACtB,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAiB,EAAE,CAAiB,EAAE,EAAE;wBAC1D,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,KAAK,EAAE,CAAC;4BACxB,OAAO,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC;wBAC3B,CAAC;wBACD,OAAO,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;oBACxC,CAAC,CAAC,CAAC;oBAEH,mCAAmC;oBACnC,IAAI,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;wBAC7B,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;oBACzB,CAAC;oBAED,OAAO,QAAQ,CAAC;gBAClB,CAAC;gBAED,OAAO,IAAI,CAAC;YACd,CAAC;YAED,WAAW,CAAC,SAAS,CAAC,CAAC;YAEvB,yBAAyB;YACzB,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;gBACrB,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,KAAK,EAAE,CAAC;oBACxB,OAAO,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC;gBAC3B,CAAC;gBACD,OAAO,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;YACxC,CAAC,CAAC,CAAC;YAEH,OAAO,QAAQ,CAAC;QAClB,CAAC;QAED,MAAM,UAAU,GAAG,eAAe,CAAC,SAAS,CAAC,CAAC;QAE9C,OAAO;YACL,OAAO,EAAE;gBACP,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,6CAA6C,EAAE;aACtE;YACD,QAAQ,EAAE;gBACR,UAAU;aACX;SACF,CAAC;IACJ,CAAC;IAAC,OAAO,KAAU,EAAE,CAAC;QACpB,OAAO;YACL,OAAO,EAAE;gBACP,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,6BAA6B,KAAK,CAAC,OAAO,EAAE,EAAE;aACrE;YACD,OAAO,EAAE,IAAI;SACd,CAAC;IACJ,CAAC;AACH,CAAC"}
|
@@ -0,0 +1,32 @@
|
|
1
|
+
import { ToolResponse } from "../types/tools.js";
|
2
|
+
/**
|
3
|
+
* Reads a file and returns its content
|
4
|
+
*/
|
5
|
+
export declare function readFile(filePath: string, allowedDirectories: string[]): Promise<ToolResponse>;
|
6
|
+
/**
|
7
|
+
* Writes content to a file
|
8
|
+
*/
|
9
|
+
export declare function writeFile(filePath: string, content: string, allowedDirectories: string[]): Promise<ToolResponse>;
|
10
|
+
/**
|
11
|
+
* Lists files in a directory
|
12
|
+
*/
|
13
|
+
export declare function listFiles(dirPath: string, allowedDirectories: string[]): Promise<ToolResponse>;
|
14
|
+
/**
|
15
|
+
* Gets information about a file
|
16
|
+
*/
|
17
|
+
export declare function getFileInfo(filePath: string, allowedDirectories: string[]): Promise<ToolResponse>;
|
18
|
+
/**
|
19
|
+
* Searches for files matching a pattern
|
20
|
+
*/
|
21
|
+
export declare function searchForFiles(rootPath: string, pattern: string, excludePatterns: string[] | undefined, allowedDirectories: string[]): Promise<ToolResponse>;
|
22
|
+
/**
|
23
|
+
* Applies edits to a file
|
24
|
+
*/
|
25
|
+
export declare function editFile(filePath: string, edits: Array<{
|
26
|
+
oldText: string;
|
27
|
+
newText: string;
|
28
|
+
}>, allowedDirectories: string[]): Promise<ToolResponse>;
|
29
|
+
/**
|
30
|
+
* Gets the directory structure as a tree
|
31
|
+
*/
|
32
|
+
export declare function getDirectoryTree(dirPath: string, allowedDirectories: string[]): Promise<ToolResponse>;
|
@@ -0,0 +1,222 @@
|
|
1
|
+
import fs from "fs/promises";
|
2
|
+
import path from "path";
|
3
|
+
import { validatePath } from "../utils/path.js";
|
4
|
+
import { getFileStats, searchFiles, applyFileEdits } from "../utils/file.js";
|
5
|
+
/**
|
6
|
+
* Reads a file and returns its content
|
7
|
+
*/
|
8
|
+
export async function readFile(filePath, allowedDirectories) {
|
9
|
+
try {
|
10
|
+
const normalizedPath = await validatePath(filePath, allowedDirectories);
|
11
|
+
const content = await fs.readFile(normalizedPath, "utf-8");
|
12
|
+
return {
|
13
|
+
content: [{ type: "text", text: "File read successfully" }],
|
14
|
+
metadata: {
|
15
|
+
path: filePath,
|
16
|
+
content,
|
17
|
+
},
|
18
|
+
};
|
19
|
+
}
|
20
|
+
catch (error) {
|
21
|
+
return {
|
22
|
+
content: [{ type: "text", text: `Error reading file: ${error.message}` }],
|
23
|
+
isError: true,
|
24
|
+
};
|
25
|
+
}
|
26
|
+
}
|
27
|
+
/**
|
28
|
+
* Writes content to a file
|
29
|
+
*/
|
30
|
+
export async function writeFile(filePath, content, allowedDirectories) {
|
31
|
+
try {
|
32
|
+
const normalizedPath = await validatePath(filePath, allowedDirectories);
|
33
|
+
// Ensure the directory exists
|
34
|
+
const dirPath = path.dirname(normalizedPath);
|
35
|
+
await fs.mkdir(dirPath, { recursive: true });
|
36
|
+
// Write the file
|
37
|
+
await fs.writeFile(normalizedPath, content);
|
38
|
+
return {
|
39
|
+
content: [{ type: "text", text: "File written successfully" }],
|
40
|
+
metadata: {
|
41
|
+
path: filePath,
|
42
|
+
},
|
43
|
+
};
|
44
|
+
}
|
45
|
+
catch (error) {
|
46
|
+
return {
|
47
|
+
content: [{ type: "text", text: `Error writing file: ${error.message}` }],
|
48
|
+
isError: true,
|
49
|
+
};
|
50
|
+
}
|
51
|
+
}
|
52
|
+
/**
|
53
|
+
* Lists files in a directory
|
54
|
+
*/
|
55
|
+
export async function listFiles(dirPath, allowedDirectories) {
|
56
|
+
try {
|
57
|
+
const normalizedPath = await validatePath(dirPath, allowedDirectories);
|
58
|
+
const entries = await fs.readdir(normalizedPath, { withFileTypes: true });
|
59
|
+
const files = entries.map((entry) => ({
|
60
|
+
name: entry.name,
|
61
|
+
isDirectory: entry.isDirectory(),
|
62
|
+
isFile: entry.isFile(),
|
63
|
+
}));
|
64
|
+
return {
|
65
|
+
content: [
|
66
|
+
{ type: "text", text: `Listed ${files.length} files in ${dirPath}` },
|
67
|
+
],
|
68
|
+
metadata: {
|
69
|
+
path: dirPath,
|
70
|
+
files,
|
71
|
+
},
|
72
|
+
};
|
73
|
+
}
|
74
|
+
catch (error) {
|
75
|
+
return {
|
76
|
+
content: [
|
77
|
+
{ type: "text", text: `Error listing files: ${error.message}` },
|
78
|
+
],
|
79
|
+
isError: true,
|
80
|
+
};
|
81
|
+
}
|
82
|
+
}
|
83
|
+
/**
|
84
|
+
* Gets information about a file
|
85
|
+
*/
|
86
|
+
export async function getFileInfo(filePath, allowedDirectories) {
|
87
|
+
try {
|
88
|
+
const normalizedPath = await validatePath(filePath, allowedDirectories);
|
89
|
+
const fileInfo = await getFileStats(normalizedPath);
|
90
|
+
return {
|
91
|
+
content: [
|
92
|
+
{ type: "text", text: "File information retrieved successfully" },
|
93
|
+
],
|
94
|
+
metadata: {
|
95
|
+
path: filePath,
|
96
|
+
info: fileInfo,
|
97
|
+
},
|
98
|
+
};
|
99
|
+
}
|
100
|
+
catch (error) {
|
101
|
+
return {
|
102
|
+
content: [
|
103
|
+
{ type: "text", text: `Error getting file info: ${error.message}` },
|
104
|
+
],
|
105
|
+
isError: true,
|
106
|
+
};
|
107
|
+
}
|
108
|
+
}
|
109
|
+
/**
|
110
|
+
* Searches for files matching a pattern
|
111
|
+
*/
|
112
|
+
export async function searchForFiles(rootPath, pattern, excludePatterns = [], allowedDirectories) {
|
113
|
+
try {
|
114
|
+
const normalizedPath = await validatePath(rootPath, allowedDirectories);
|
115
|
+
const files = await searchFiles(normalizedPath, pattern, excludePatterns);
|
116
|
+
return {
|
117
|
+
content: [
|
118
|
+
{
|
119
|
+
type: "text",
|
120
|
+
text: `Found ${files.length} files matching pattern "${pattern}"`,
|
121
|
+
},
|
122
|
+
],
|
123
|
+
metadata: {
|
124
|
+
rootPath,
|
125
|
+
pattern,
|
126
|
+
excludePatterns,
|
127
|
+
files,
|
128
|
+
},
|
129
|
+
};
|
130
|
+
}
|
131
|
+
catch (error) {
|
132
|
+
return {
|
133
|
+
content: [
|
134
|
+
{ type: "text", text: `Error searching files: ${error.message}` },
|
135
|
+
],
|
136
|
+
isError: true,
|
137
|
+
};
|
138
|
+
}
|
139
|
+
}
|
140
|
+
/**
|
141
|
+
* Applies edits to a file
|
142
|
+
*/
|
143
|
+
export async function editFile(filePath, edits, allowedDirectories) {
|
144
|
+
try {
|
145
|
+
const normalizedPath = await validatePath(filePath, allowedDirectories);
|
146
|
+
const diff = await applyFileEdits(normalizedPath, edits, false);
|
147
|
+
return {
|
148
|
+
content: [{ type: "text", text: "File edited successfully" }],
|
149
|
+
metadata: {
|
150
|
+
path: filePath,
|
151
|
+
diff,
|
152
|
+
},
|
153
|
+
};
|
154
|
+
}
|
155
|
+
catch (error) {
|
156
|
+
return {
|
157
|
+
content: [{ type: "text", text: `Error editing file: ${error.message}` }],
|
158
|
+
isError: true,
|
159
|
+
};
|
160
|
+
}
|
161
|
+
}
|
162
|
+
/**
|
163
|
+
* Gets the directory structure as a tree
|
164
|
+
*/
|
165
|
+
export async function getDirectoryTree(dirPath, allowedDirectories) {
|
166
|
+
try {
|
167
|
+
const normalizedPath = await validatePath(dirPath, allowedDirectories);
|
168
|
+
async function buildTree(currentPath) {
|
169
|
+
const entries = await fs.readdir(currentPath, { withFileTypes: true });
|
170
|
+
const result = [];
|
171
|
+
for (const entry of entries) {
|
172
|
+
const entryPath = path.join(currentPath, entry.name);
|
173
|
+
if (entry.isDirectory()) {
|
174
|
+
const children = await buildTree(entryPath);
|
175
|
+
result.push({
|
176
|
+
name: entry.name,
|
177
|
+
path: entryPath,
|
178
|
+
type: "directory",
|
179
|
+
children,
|
180
|
+
});
|
181
|
+
}
|
182
|
+
else {
|
183
|
+
result.push({
|
184
|
+
name: entry.name,
|
185
|
+
path: entryPath,
|
186
|
+
type: "file",
|
187
|
+
});
|
188
|
+
}
|
189
|
+
}
|
190
|
+
// Sort entries: directories first, then files, both alphabetically
|
191
|
+
result.sort((a, b) => {
|
192
|
+
if (a.type !== b.type) {
|
193
|
+
return a.type === "directory" ? -1 : 1;
|
194
|
+
}
|
195
|
+
return a.name.localeCompare(b.name);
|
196
|
+
});
|
197
|
+
return result;
|
198
|
+
}
|
199
|
+
const tree = await buildTree(normalizedPath);
|
200
|
+
return {
|
201
|
+
content: [
|
202
|
+
{ type: "text", text: "Directory tree retrieved successfully" },
|
203
|
+
],
|
204
|
+
metadata: {
|
205
|
+
path: dirPath,
|
206
|
+
tree,
|
207
|
+
},
|
208
|
+
};
|
209
|
+
}
|
210
|
+
catch (error) {
|
211
|
+
return {
|
212
|
+
content: [
|
213
|
+
{
|
214
|
+
type: "text",
|
215
|
+
text: `Error getting directory tree: ${error.message}`,
|
216
|
+
},
|
217
|
+
],
|
218
|
+
isError: true,
|
219
|
+
};
|
220
|
+
}
|
221
|
+
}
|
222
|
+
//# sourceMappingURL=file.js.map
|