@qodo/sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +118 -0
- package/README.md +121 -0
- package/dist/api/agent.d.ts +69 -0
- package/dist/api/agent.d.ts.map +1 -0
- package/dist/api/agent.js +1034 -0
- package/dist/api/agent.js.map +1 -0
- package/dist/api/analytics.d.ts +43 -0
- package/dist/api/analytics.d.ts.map +1 -0
- package/dist/api/analytics.js +163 -0
- package/dist/api/analytics.js.map +1 -0
- package/dist/api/http.d.ts +5 -0
- package/dist/api/http.d.ts.map +1 -0
- package/dist/api/http.js +59 -0
- package/dist/api/http.js.map +1 -0
- package/dist/api/index.d.ts +12 -0
- package/dist/api/index.d.ts.map +1 -0
- package/dist/api/index.js +17 -0
- package/dist/api/index.js.map +1 -0
- package/dist/api/taskTracking.d.ts +54 -0
- package/dist/api/taskTracking.d.ts.map +1 -0
- package/dist/api/taskTracking.js +208 -0
- package/dist/api/taskTracking.js.map +1 -0
- package/dist/api/types.d.ts +92 -0
- package/dist/api/types.d.ts.map +1 -0
- package/dist/api/types.js +2 -0
- package/dist/api/types.js.map +1 -0
- package/dist/api/utils.d.ts +8 -0
- package/dist/api/utils.d.ts.map +1 -0
- package/dist/api/utils.js +54 -0
- package/dist/api/utils.js.map +1 -0
- package/dist/api/websocket.d.ts +74 -0
- package/dist/api/websocket.d.ts.map +1 -0
- package/dist/api/websocket.js +685 -0
- package/dist/api/websocket.js.map +1 -0
- package/dist/auth/index.d.ts +25 -0
- package/dist/auth/index.d.ts.map +1 -0
- package/dist/auth/index.js +85 -0
- package/dist/auth/index.js.map +1 -0
- package/dist/clients/index.d.ts +8 -0
- package/dist/clients/index.d.ts.map +1 -0
- package/dist/clients/index.js +7 -0
- package/dist/clients/index.js.map +1 -0
- package/dist/clients/info/InfoClient.d.ts +37 -0
- package/dist/clients/info/InfoClient.d.ts.map +1 -0
- package/dist/clients/info/InfoClient.js +69 -0
- package/dist/clients/info/InfoClient.js.map +1 -0
- package/dist/clients/info/index.d.ts +4 -0
- package/dist/clients/info/index.d.ts.map +1 -0
- package/dist/clients/info/index.js +2 -0
- package/dist/clients/info/index.js.map +1 -0
- package/dist/clients/info/types.d.ts +21 -0
- package/dist/clients/info/types.d.ts.map +1 -0
- package/dist/clients/info/types.js +2 -0
- package/dist/clients/info/types.js.map +1 -0
- package/dist/clients/sessions/SessionsClient.d.ts +34 -0
- package/dist/clients/sessions/SessionsClient.d.ts.map +1 -0
- package/dist/clients/sessions/SessionsClient.js +71 -0
- package/dist/clients/sessions/SessionsClient.js.map +1 -0
- package/dist/clients/sessions/index.d.ts +4 -0
- package/dist/clients/sessions/index.d.ts.map +1 -0
- package/dist/clients/sessions/index.js +2 -0
- package/dist/clients/sessions/index.js.map +1 -0
- package/dist/clients/sessions/types.d.ts +20 -0
- package/dist/clients/sessions/types.d.ts.map +1 -0
- package/dist/clients/sessions/types.js +2 -0
- package/dist/clients/sessions/types.js.map +1 -0
- package/dist/config/ConfigManager.d.ts +43 -0
- package/dist/config/ConfigManager.d.ts.map +1 -0
- package/dist/config/ConfigManager.js +472 -0
- package/dist/config/ConfigManager.js.map +1 -0
- package/dist/config/index.d.ts +6 -0
- package/dist/config/index.d.ts.map +1 -0
- package/dist/config/index.js +7 -0
- package/dist/config/index.js.map +1 -0
- package/dist/config/urlConfig.d.ts +15 -0
- package/dist/config/urlConfig.d.ts.map +1 -0
- package/dist/config/urlConfig.js +75 -0
- package/dist/config/urlConfig.js.map +1 -0
- package/dist/constants/errors.d.ts +2 -0
- package/dist/constants/errors.d.ts.map +1 -0
- package/dist/constants/errors.js +2 -0
- package/dist/constants/errors.js.map +1 -0
- package/dist/constants/index.d.ts +7 -0
- package/dist/constants/index.d.ts.map +1 -0
- package/dist/constants/index.js +11 -0
- package/dist/constants/index.js.map +1 -0
- package/dist/constants/tools.d.ts +4 -0
- package/dist/constants/tools.d.ts.map +1 -0
- package/dist/constants/tools.js +4 -0
- package/dist/constants/tools.js.map +1 -0
- package/dist/constants/versions.d.ts +2 -0
- package/dist/constants/versions.d.ts.map +1 -0
- package/dist/constants/versions.js +2 -0
- package/dist/constants/versions.js.map +1 -0
- package/dist/context/buildUserContext.d.ts +18 -0
- package/dist/context/buildUserContext.d.ts.map +1 -0
- package/dist/context/buildUserContext.js +34 -0
- package/dist/context/buildUserContext.js.map +1 -0
- package/dist/context/index.d.ts +9 -0
- package/dist/context/index.d.ts.map +1 -0
- package/dist/context/index.js +9 -0
- package/dist/context/index.js.map +1 -0
- package/dist/context/messageManager.d.ts +42 -0
- package/dist/context/messageManager.d.ts.map +1 -0
- package/dist/context/messageManager.js +322 -0
- package/dist/context/messageManager.js.map +1 -0
- package/dist/context/taskFocus.d.ts +2 -0
- package/dist/context/taskFocus.d.ts.map +1 -0
- package/dist/context/taskFocus.js +26 -0
- package/dist/context/taskFocus.js.map +1 -0
- package/dist/context/userInput.d.ts +3 -0
- package/dist/context/userInput.d.ts.map +1 -0
- package/dist/context/userInput.js +20 -0
- package/dist/context/userInput.js.map +1 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +21 -0
- package/dist/index.js.map +1 -0
- package/dist/mcp/MCPManager.d.ts +125 -0
- package/dist/mcp/MCPManager.d.ts.map +1 -0
- package/dist/mcp/MCPManager.js +616 -0
- package/dist/mcp/MCPManager.js.map +1 -0
- package/dist/mcp/approvedTools.d.ts +4 -0
- package/dist/mcp/approvedTools.d.ts.map +1 -0
- package/dist/mcp/approvedTools.js +19 -0
- package/dist/mcp/approvedTools.js.map +1 -0
- package/dist/mcp/baseServer.d.ts +75 -0
- package/dist/mcp/baseServer.d.ts.map +1 -0
- package/dist/mcp/baseServer.js +107 -0
- package/dist/mcp/baseServer.js.map +1 -0
- package/dist/mcp/builtinServers.d.ts +15 -0
- package/dist/mcp/builtinServers.d.ts.map +1 -0
- package/dist/mcp/builtinServers.js +155 -0
- package/dist/mcp/builtinServers.js.map +1 -0
- package/dist/mcp/dynamicBEServer.d.ts +20 -0
- package/dist/mcp/dynamicBEServer.d.ts.map +1 -0
- package/dist/mcp/dynamicBEServer.js +52 -0
- package/dist/mcp/dynamicBEServer.js.map +1 -0
- package/dist/mcp/index.d.ts +19 -0
- package/dist/mcp/index.d.ts.map +1 -0
- package/dist/mcp/index.js +24 -0
- package/dist/mcp/index.js.map +1 -0
- package/dist/mcp/mcpInitialization.d.ts +2 -0
- package/dist/mcp/mcpInitialization.d.ts.map +1 -0
- package/dist/mcp/mcpInitialization.js +56 -0
- package/dist/mcp/mcpInitialization.js.map +1 -0
- package/dist/mcp/servers/filesystem.d.ts +75 -0
- package/dist/mcp/servers/filesystem.d.ts.map +1 -0
- package/dist/mcp/servers/filesystem.js +992 -0
- package/dist/mcp/servers/filesystem.js.map +1 -0
- package/dist/mcp/servers/gerrit.d.ts +19 -0
- package/dist/mcp/servers/gerrit.d.ts.map +1 -0
- package/dist/mcp/servers/gerrit.js +515 -0
- package/dist/mcp/servers/gerrit.js.map +1 -0
- package/dist/mcp/servers/git.d.ts +18 -0
- package/dist/mcp/servers/git.d.ts.map +1 -0
- package/dist/mcp/servers/git.js +441 -0
- package/dist/mcp/servers/git.js.map +1 -0
- package/dist/mcp/servers/ripgrep.d.ts +34 -0
- package/dist/mcp/servers/ripgrep.d.ts.map +1 -0
- package/dist/mcp/servers/ripgrep.js +517 -0
- package/dist/mcp/servers/ripgrep.js.map +1 -0
- package/dist/mcp/servers/shell.d.ts +20 -0
- package/dist/mcp/servers/shell.d.ts.map +1 -0
- package/dist/mcp/servers/shell.js +603 -0
- package/dist/mcp/servers/shell.js.map +1 -0
- package/dist/mcp/serversRegistry.d.ts +55 -0
- package/dist/mcp/serversRegistry.d.ts.map +1 -0
- package/dist/mcp/serversRegistry.js +410 -0
- package/dist/mcp/serversRegistry.js.map +1 -0
- package/dist/mcp/toolProcessor.d.ts +42 -0
- package/dist/mcp/toolProcessor.d.ts.map +1 -0
- package/dist/mcp/toolProcessor.js +200 -0
- package/dist/mcp/toolProcessor.js.map +1 -0
- package/dist/mcp/types.d.ts +29 -0
- package/dist/mcp/types.d.ts.map +1 -0
- package/dist/mcp/types.js +2 -0
- package/dist/mcp/types.js.map +1 -0
- package/dist/parser/index.d.ts +72 -0
- package/dist/parser/index.d.ts.map +1 -0
- package/dist/parser/index.js +967 -0
- package/dist/parser/index.js.map +1 -0
- package/dist/parser/types.d.ts +153 -0
- package/dist/parser/types.d.ts.map +1 -0
- package/dist/parser/types.js +6 -0
- package/dist/parser/types.js.map +1 -0
- package/dist/parser/utils.d.ts +18 -0
- package/dist/parser/utils.d.ts.map +1 -0
- package/dist/parser/utils.js +64 -0
- package/dist/parser/utils.js.map +1 -0
- package/dist/sdk/QodoSDK.d.ts +152 -0
- package/dist/sdk/QodoSDK.d.ts.map +1 -0
- package/dist/sdk/QodoSDK.js +786 -0
- package/dist/sdk/QodoSDK.js.map +1 -0
- package/dist/sdk/bootstrap.d.ts +16 -0
- package/dist/sdk/bootstrap.d.ts.map +1 -0
- package/dist/sdk/bootstrap.js +21 -0
- package/dist/sdk/bootstrap.js.map +1 -0
- package/dist/sdk/builders.d.ts +54 -0
- package/dist/sdk/builders.d.ts.map +1 -0
- package/dist/sdk/builders.js +117 -0
- package/dist/sdk/builders.js.map +1 -0
- package/dist/sdk/defaults.d.ts +11 -0
- package/dist/sdk/defaults.d.ts.map +1 -0
- package/dist/sdk/defaults.js +39 -0
- package/dist/sdk/defaults.js.map +1 -0
- package/dist/sdk/discovery.d.ts +2 -0
- package/dist/sdk/discovery.d.ts.map +1 -0
- package/dist/sdk/discovery.js +25 -0
- package/dist/sdk/discovery.js.map +1 -0
- package/dist/sdk/events.d.ts +168 -0
- package/dist/sdk/events.d.ts.map +1 -0
- package/dist/sdk/events.js +52 -0
- package/dist/sdk/events.js.map +1 -0
- package/dist/sdk/index.d.ts +17 -0
- package/dist/sdk/index.d.ts.map +1 -0
- package/dist/sdk/index.js +17 -0
- package/dist/sdk/index.js.map +1 -0
- package/dist/sdk/runner/AgentRunner.d.ts +22 -0
- package/dist/sdk/runner/AgentRunner.d.ts.map +1 -0
- package/dist/sdk/runner/AgentRunner.js +222 -0
- package/dist/sdk/runner/AgentRunner.js.map +1 -0
- package/dist/sdk/runner/finalize.d.ts +9 -0
- package/dist/sdk/runner/finalize.d.ts.map +1 -0
- package/dist/sdk/runner/finalize.js +115 -0
- package/dist/sdk/runner/finalize.js.map +1 -0
- package/dist/sdk/runner/formats.d.ts +7 -0
- package/dist/sdk/runner/formats.d.ts.map +1 -0
- package/dist/sdk/runner/formats.js +91 -0
- package/dist/sdk/runner/formats.js.map +1 -0
- package/dist/sdk/runner/index.d.ts +9 -0
- package/dist/sdk/runner/index.d.ts.map +1 -0
- package/dist/sdk/runner/index.js +9 -0
- package/dist/sdk/runner/index.js.map +1 -0
- package/dist/sdk/runner/progress.d.ts +3 -0
- package/dist/sdk/runner/progress.d.ts.map +1 -0
- package/dist/sdk/runner/progress.js +16 -0
- package/dist/sdk/runner/progress.js.map +1 -0
- package/dist/sdk/schemas.d.ts +50 -0
- package/dist/sdk/schemas.d.ts.map +1 -0
- package/dist/sdk/schemas.js +145 -0
- package/dist/sdk/schemas.js.map +1 -0
- package/dist/session/SessionContext.d.ts +86 -0
- package/dist/session/SessionContext.d.ts.map +1 -0
- package/dist/session/SessionContext.js +395 -0
- package/dist/session/SessionContext.js.map +1 -0
- package/dist/session/environment.d.ts +42 -0
- package/dist/session/environment.d.ts.map +1 -0
- package/dist/session/environment.js +27 -0
- package/dist/session/environment.js.map +1 -0
- package/dist/session/history.d.ts +3 -0
- package/dist/session/history.d.ts.map +1 -0
- package/dist/session/history.js +67 -0
- package/dist/session/history.js.map +1 -0
- package/dist/session/index.d.ts +10 -0
- package/dist/session/index.d.ts.map +1 -0
- package/dist/session/index.js +9 -0
- package/dist/session/index.js.map +1 -0
- package/dist/session/serverData.d.ts +38 -0
- package/dist/session/serverData.d.ts.map +1 -0
- package/dist/session/serverData.js +241 -0
- package/dist/session/serverData.js.map +1 -0
- package/dist/tracking/Tracker.d.ts +55 -0
- package/dist/tracking/Tracker.d.ts.map +1 -0
- package/dist/tracking/Tracker.js +217 -0
- package/dist/tracking/Tracker.js.map +1 -0
- package/dist/tracking/index.d.ts +8 -0
- package/dist/tracking/index.d.ts.map +1 -0
- package/dist/tracking/index.js +8 -0
- package/dist/tracking/index.js.map +1 -0
- package/dist/tracking/schemas.d.ts +292 -0
- package/dist/tracking/schemas.d.ts.map +1 -0
- package/dist/tracking/schemas.js +91 -0
- package/dist/tracking/schemas.js.map +1 -0
- package/dist/types.d.ts +4 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/dist/utils/extractSetFlags.d.ts +6 -0
- package/dist/utils/extractSetFlags.d.ts.map +1 -0
- package/dist/utils/extractSetFlags.js +16 -0
- package/dist/utils/extractSetFlags.js.map +1 -0
- package/dist/utils/formatTimeAgo.d.ts +2 -0
- package/dist/utils/formatTimeAgo.d.ts.map +1 -0
- package/dist/utils/formatTimeAgo.js +20 -0
- package/dist/utils/formatTimeAgo.js.map +1 -0
- package/dist/utils/index.d.ts +12 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +12 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/machineId.d.ts +14 -0
- package/dist/utils/machineId.d.ts.map +1 -0
- package/dist/utils/machineId.js +66 -0
- package/dist/utils/machineId.js.map +1 -0
- package/dist/utils/pathUtils.d.ts +22 -0
- package/dist/utils/pathUtils.d.ts.map +1 -0
- package/dist/utils/pathUtils.js +54 -0
- package/dist/utils/pathUtils.js.map +1 -0
- package/dist/version.d.ts +2 -0
- package/dist/version.d.ts.map +1 -0
- package/dist/version.js +23 -0
- package/dist/version.js.map +1 -0
- package/package.json +93 -0
|
@@ -0,0 +1,992 @@
|
|
|
1
|
+
import * as fsSync from 'fs';
|
|
2
|
+
import { promises as fs } from 'fs';
|
|
3
|
+
import * as diff from 'diff';
|
|
4
|
+
import * as path from 'path';
|
|
5
|
+
import { canonicalizePathSync, normalizeDriveLetter } from '../../utils/index.js';
|
|
6
|
+
const DEFAULT_MAX_DEPTH = 3;
|
|
7
|
+
const DEFAULT_MAX_DIRECTORIES = 2000;
|
|
8
|
+
const DEFAULT_EXCLUDED_DIRS = [
|
|
9
|
+
'.git',
|
|
10
|
+
'node_modules',
|
|
11
|
+
'.idea',
|
|
12
|
+
'.vscode',
|
|
13
|
+
'dist',
|
|
14
|
+
'build',
|
|
15
|
+
'coverage',
|
|
16
|
+
'.next',
|
|
17
|
+
'.turbo',
|
|
18
|
+
'.cache',
|
|
19
|
+
'out',
|
|
20
|
+
'target',
|
|
21
|
+
'venv',
|
|
22
|
+
'.venv',
|
|
23
|
+
];
|
|
24
|
+
function matchesPattern(name, pattern) {
|
|
25
|
+
const lowerName = name.toLowerCase();
|
|
26
|
+
const lowerPattern = pattern.toLowerCase();
|
|
27
|
+
if (!lowerPattern) {
|
|
28
|
+
return false;
|
|
29
|
+
}
|
|
30
|
+
if (lowerPattern === '*') {
|
|
31
|
+
return true;
|
|
32
|
+
}
|
|
33
|
+
if (lowerPattern.includes('*')) {
|
|
34
|
+
const parts = lowerPattern.split('*').filter(Boolean);
|
|
35
|
+
let currentIndex = 0;
|
|
36
|
+
for (const part of parts) {
|
|
37
|
+
const index = lowerName.indexOf(part, currentIndex);
|
|
38
|
+
if (index === -1) {
|
|
39
|
+
return false;
|
|
40
|
+
}
|
|
41
|
+
currentIndex = index + part.length;
|
|
42
|
+
}
|
|
43
|
+
return true;
|
|
44
|
+
}
|
|
45
|
+
return lowerName === lowerPattern;
|
|
46
|
+
}
|
|
47
|
+
export class TextNotFoundError extends Error {
|
|
48
|
+
editIndex;
|
|
49
|
+
oldText;
|
|
50
|
+
constructor(message, editIndex, oldText) {
|
|
51
|
+
super(message);
|
|
52
|
+
this.name = 'TextNotFoundError';
|
|
53
|
+
this.editIndex = editIndex;
|
|
54
|
+
this.oldText = oldText;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
export class FilesystemServerEnhanced {
|
|
58
|
+
name = "filesystem";
|
|
59
|
+
version = "1.0.0";
|
|
60
|
+
allowedDirectories = [];
|
|
61
|
+
sessionLog;
|
|
62
|
+
changeCounter = 0;
|
|
63
|
+
constructor(allowedDirectories = []) {
|
|
64
|
+
// Canonicalize the allowed directories up front so that different
|
|
65
|
+
// representations of the same physical path (e.g. Windows `subst`
|
|
66
|
+
// drives like `S:\\repo` vs `C:\\long\\path\\repo`) are treated as
|
|
67
|
+
// identical roots. This also normalizes Windows drive letters.
|
|
68
|
+
this.allowedDirectories = allowedDirectories.map(dir => canonicalizePathSync(dir));
|
|
69
|
+
this.sessionLog = {
|
|
70
|
+
sessionId: this.generateSessionId(),
|
|
71
|
+
startTime: new Date().toISOString(),
|
|
72
|
+
entries: []
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
async initialize() {
|
|
76
|
+
try {
|
|
77
|
+
// Validate that all allowed directories exist and are accessible
|
|
78
|
+
for (const dir of this.allowedDirectories) {
|
|
79
|
+
try {
|
|
80
|
+
await fs.access(dir, fs.constants.R_OK);
|
|
81
|
+
}
|
|
82
|
+
catch (error) {
|
|
83
|
+
console.warn(`Warning: Directory ${dir} is not accessible:`, error);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
return true;
|
|
87
|
+
}
|
|
88
|
+
catch (error) {
|
|
89
|
+
console.error("Failed to initialize filesystem server:", error);
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
async listTools() {
|
|
94
|
+
const tools = [
|
|
95
|
+
{
|
|
96
|
+
name: "read_files",
|
|
97
|
+
description: "Read the contents of one or more files, optionally selecting line ranges per file (range, head, tail). Returns the content of each file along with its path. If reading multiple files, failed reads for individual files won't stop the operation - you'll get results for the files that could be read successfully.",
|
|
98
|
+
inputSchema: {
|
|
99
|
+
type: "object",
|
|
100
|
+
properties: {
|
|
101
|
+
paths: {
|
|
102
|
+
type: "array",
|
|
103
|
+
items: {
|
|
104
|
+
oneOf: [
|
|
105
|
+
{ type: "string" },
|
|
106
|
+
{
|
|
107
|
+
type: "object",
|
|
108
|
+
properties: {
|
|
109
|
+
path: { type: "string" },
|
|
110
|
+
startLine: { type: "number", description: "1-based inclusive start line for range selection" },
|
|
111
|
+
endLine: { type: "number", description: "1-based inclusive end line for range selection" },
|
|
112
|
+
head: { type: "number", description: "Number of lines from the beginning of the file" },
|
|
113
|
+
tail: { type: "number", description: "Number of lines from the end of the file" }
|
|
114
|
+
},
|
|
115
|
+
required: ["path"],
|
|
116
|
+
additionalProperties: false
|
|
117
|
+
}
|
|
118
|
+
]
|
|
119
|
+
},
|
|
120
|
+
description: "Array of file specs to read. Each item can be either a string path or an object with { path, startLine?, endLine?, head?, tail? } to select specific lines. Examples: ['src/index.ts', { path: 'README.md', head: 50 }, { path: 'logs/app.log', tail: 100 }, { path: 'src/app.ts', startLine: 120, endLine: 200 }]. Each item can have different ranges."
|
|
121
|
+
}
|
|
122
|
+
},
|
|
123
|
+
required: ["paths"]
|
|
124
|
+
},
|
|
125
|
+
autoApproved: false
|
|
126
|
+
},
|
|
127
|
+
{
|
|
128
|
+
name: "write_file",
|
|
129
|
+
description: "Create a new file or completely overwrite an existing file with new content. Use with caution as it will overwrite existing files without warning. Handles text content with proper encoding.",
|
|
130
|
+
inputSchema: {
|
|
131
|
+
type: "object",
|
|
132
|
+
properties: {
|
|
133
|
+
path: {
|
|
134
|
+
type: "string",
|
|
135
|
+
description: "Path where to create/write the file. Examples: 'src/components/Button.tsx', 'docs/README.md', 'config/settings.json', 'scripts/build.sh'"
|
|
136
|
+
},
|
|
137
|
+
content: {
|
|
138
|
+
type: "string",
|
|
139
|
+
description: "Content to write to the file. Examples: TypeScript component code, markdown documentation, JSON configuration, shell script commands"
|
|
140
|
+
}
|
|
141
|
+
},
|
|
142
|
+
required: ["path", "content"]
|
|
143
|
+
},
|
|
144
|
+
autoApproved: false
|
|
145
|
+
},
|
|
146
|
+
{
|
|
147
|
+
name: "edit_file",
|
|
148
|
+
description: "Make precise line-based edits to a text file by replacing exact text matches. Each edit operation finds and replaces specific text sequences. Returns a git-style diff showing the changes made. Use this for targeted modifications rather than rewriting entire files.",
|
|
149
|
+
inputSchema: {
|
|
150
|
+
type: "object",
|
|
151
|
+
properties: {
|
|
152
|
+
path: {
|
|
153
|
+
type: "string",
|
|
154
|
+
description: "Path to the file to edit. Examples: 'src/index.ts', 'package.json', 'README.md', 'config/app.json'"
|
|
155
|
+
},
|
|
156
|
+
edits: {
|
|
157
|
+
type: "array",
|
|
158
|
+
items: {
|
|
159
|
+
type: "object",
|
|
160
|
+
properties: {
|
|
161
|
+
oldText: {
|
|
162
|
+
type: "string",
|
|
163
|
+
description: "Exact text to search for and replace - must match exactly including whitespace and line breaks. Examples: 'console.log(\"Hello World\");', 'const version = \"1.0.0\";', '# Old Title'"
|
|
164
|
+
},
|
|
165
|
+
newText: {
|
|
166
|
+
type: "string",
|
|
167
|
+
description: "Text to replace the oldText with. Examples: 'console.log(\"Hello Universe!\");', 'const version = \"1.1.0\";', '# New Improved Title'"
|
|
168
|
+
}
|
|
169
|
+
},
|
|
170
|
+
required: ["oldText", "newText"]
|
|
171
|
+
},
|
|
172
|
+
description: "Array of edit operations to perform. Each operation replaces oldText with newText. Multiple edits are applied in sequence. Examples: [{'oldText': 'const port = 3000;', 'newText': 'const port = process.env.PORT || 3000;'}] for single edit, [{'oldText': 'import React from \\'react\\';', 'newText': 'import React, { useState } from \\'react\\';'}, {'oldText': 'function App() {', 'newText': 'function App() {\\n const [count, setCount] = useState(0);'}] for multiple edits"
|
|
173
|
+
},
|
|
174
|
+
},
|
|
175
|
+
required: ["path", "edits"]
|
|
176
|
+
},
|
|
177
|
+
autoApproved: false
|
|
178
|
+
},
|
|
179
|
+
{
|
|
180
|
+
name: "create_directory",
|
|
181
|
+
description: "Create a new directory or ensure a directory exists. Can create multiple nested directories in one operation. If the directory already exists, this operation will succeed silently. Perfect for setting up directory structures for projects or ensuring required paths exist.",
|
|
182
|
+
inputSchema: {
|
|
183
|
+
type: "object",
|
|
184
|
+
properties: {
|
|
185
|
+
path: {
|
|
186
|
+
type: "string",
|
|
187
|
+
description: "Path of the directory to create. Can be nested paths that will be created recursively. Examples: 'src/components', 'docs/api', 'tests/unit/helpers', 'build/assets/images'"
|
|
188
|
+
}
|
|
189
|
+
},
|
|
190
|
+
required: ["path"]
|
|
191
|
+
},
|
|
192
|
+
autoApproved: false
|
|
193
|
+
},
|
|
194
|
+
{
|
|
195
|
+
name: "list_files_in_directories",
|
|
196
|
+
description: "List files and immediate subdirectories in one or more directories (non-recursive). Returns the direct children of each specified path, with each directory's contents clearly labeled and files/directories distinguished with [FILE] and [DIR] prefixes. If listing multiple directories, failed reads for individual directories won't stop the operation - you'll get results for the directories that could be read successfully. Typically, you should call directory_tree with a shallow depth before using this tool to get an overview of the directory layout.",
|
|
197
|
+
inputSchema: {
|
|
198
|
+
type: "object",
|
|
199
|
+
properties: {
|
|
200
|
+
paths: {
|
|
201
|
+
type: "array",
|
|
202
|
+
items: { type: "string" },
|
|
203
|
+
description: "Array of directory paths to list. Examples: ['.', 'src'] for current and src directories, ['src/components', 'src/utils'] for multiple source folders, ['docs'] for single directory, ['config', 'scripts', 'tests'] for project organization folders"
|
|
204
|
+
}
|
|
205
|
+
},
|
|
206
|
+
required: ["paths"]
|
|
207
|
+
},
|
|
208
|
+
autoApproved: true
|
|
209
|
+
},
|
|
210
|
+
{
|
|
211
|
+
name: "directory_tree",
|
|
212
|
+
description: "Get a recursive tree view of the folder structure only (directories, not files) as a JSON structure. Traversal is intentionally shallow and capped for performance on large repositories. Each directory entry includes 'name', 'type' (always 'directory'), and 'children' array containing subdirectories. The root object also includes a 'meta' field with truncation details (e.g. maxDepth, maxDirectories, excluded, truncated). Use this to understand the high-level directory hierarchy without file clutter.",
|
|
213
|
+
inputSchema: {
|
|
214
|
+
type: "object",
|
|
215
|
+
properties: {
|
|
216
|
+
path: {
|
|
217
|
+
type: "string",
|
|
218
|
+
description: "Path of the directory to traverse recursively for folder structure. Examples: '.' for current directory structure, 'src' for source folder hierarchy, 'docs' for documentation folders, 'tests' for test directory structure"
|
|
219
|
+
},
|
|
220
|
+
maxDepth: {
|
|
221
|
+
type: "number",
|
|
222
|
+
description: "Maximum directory depth to traverse (1 = only this directory). Defaults to 3 if not provided."
|
|
223
|
+
},
|
|
224
|
+
maxDirectories: {
|
|
225
|
+
type: "number",
|
|
226
|
+
description: "Global cap on the total number of directories to include in the tree. Defaults to 2000 if not provided."
|
|
227
|
+
},
|
|
228
|
+
exclude: {
|
|
229
|
+
type: "array",
|
|
230
|
+
items: { type: "string" },
|
|
231
|
+
description: "Directory names or simple glob-like patterns to skip (e.g. 'node_modules', '.git'). Defaults to a built-in list of common build/output directories if not provided."
|
|
232
|
+
}
|
|
233
|
+
},
|
|
234
|
+
required: ["path"]
|
|
235
|
+
},
|
|
236
|
+
autoApproved: true
|
|
237
|
+
},
|
|
238
|
+
{
|
|
239
|
+
name: "move_file",
|
|
240
|
+
description: "Move or rename files and directories. Can move files between directories and rename them in a single operation. If the destination exists, the operation will fail. Works across different directories and can be used for simple renaming within the same directory. Use this to reorganize project structure or rename files.",
|
|
241
|
+
inputSchema: {
|
|
242
|
+
type: "object",
|
|
243
|
+
properties: {
|
|
244
|
+
source: {
|
|
245
|
+
type: "string",
|
|
246
|
+
description: "Source path of the file or directory to move/rename. Examples: 'src/oldComponent.tsx', 'docs/old-readme.md', 'temp/data.json', 'src/utils' (directory)"
|
|
247
|
+
},
|
|
248
|
+
destination: {
|
|
249
|
+
type: "string",
|
|
250
|
+
description: "Destination path where the file/directory should be moved or renamed to. Examples: 'src/components/NewComponent.tsx', 'docs/README.md', 'data/processed/data.json', 'src/shared/utils'"
|
|
251
|
+
}
|
|
252
|
+
},
|
|
253
|
+
required: ["source", "destination"]
|
|
254
|
+
},
|
|
255
|
+
autoApproved: false
|
|
256
|
+
},
|
|
257
|
+
{
|
|
258
|
+
name: "get_file_info",
|
|
259
|
+
description: "Retrieve detailed metadata about a file or directory. Returns comprehensive information including size, creation time, last modified time, permissions, and type. This tool is perfect for understanding file characteristics without reading the actual content. Use this to check file properties, sizes, or modification dates.",
|
|
260
|
+
inputSchema: {
|
|
261
|
+
type: "object",
|
|
262
|
+
properties: {
|
|
263
|
+
path: {
|
|
264
|
+
type: "string",
|
|
265
|
+
description: "Path to the file or directory to get information about. Examples: 'package.json', 'src/index.ts', 'docs' (directory), 'build/output.js', 'README.md'"
|
|
266
|
+
}
|
|
267
|
+
},
|
|
268
|
+
required: ["path"]
|
|
269
|
+
},
|
|
270
|
+
autoApproved: true
|
|
271
|
+
},
|
|
272
|
+
{
|
|
273
|
+
name: "list_allowed_directories",
|
|
274
|
+
description: "Get the list of directories that the filesystem server is allowed to access. Use this tool to understand which directories are available for file operations before attempting to access files or directories. This helps you understand the scope of filesystem access and avoid permission errors.",
|
|
275
|
+
inputSchema: {
|
|
276
|
+
type: "object",
|
|
277
|
+
properties: {},
|
|
278
|
+
required: []
|
|
279
|
+
},
|
|
280
|
+
autoApproved: true
|
|
281
|
+
},
|
|
282
|
+
{
|
|
283
|
+
name: "delete_files",
|
|
284
|
+
description: "Delete files and/or directories simultaneously. Use with caution as this operation cannot be undone. For directories, this will recursively delete all contents. Failed deletions for individual files won't stop the entire operation - you'll get a report of what was successfully deleted and what failed.",
|
|
285
|
+
inputSchema: {
|
|
286
|
+
type: "object",
|
|
287
|
+
properties: {
|
|
288
|
+
paths: {
|
|
289
|
+
type: "array",
|
|
290
|
+
items: { type: "string" },
|
|
291
|
+
description: "Array of file and directory paths to delete. Examples: ['temp/cache.json'] for single file, ['build', 'dist', 'node_modules/.cache'] for multiple directories, ['src/deprecated.ts', 'docs/old-guide.md'] for multiple files, ['logs/debug.log', 'logs/error.log'] for log files. Array examples: ['temp/cache.json'], ['build', 'dist'], ['src/deprecated.ts', 'docs/old-guide.md', 'temp/old-data.json']"
|
|
292
|
+
}
|
|
293
|
+
},
|
|
294
|
+
required: ["paths"]
|
|
295
|
+
},
|
|
296
|
+
autoApproved: false
|
|
297
|
+
}
|
|
298
|
+
];
|
|
299
|
+
return { tools };
|
|
300
|
+
}
|
|
301
|
+
async callTool(request, _extra) {
|
|
302
|
+
try {
|
|
303
|
+
switch (request.name) {
|
|
304
|
+
case "read_files":
|
|
305
|
+
return await this.readMultipleFiles(request.arguments.paths);
|
|
306
|
+
case "write_file":
|
|
307
|
+
return await this.writeFile(request.arguments.path, request.arguments.content);
|
|
308
|
+
case "edit_file":
|
|
309
|
+
return await this.editFile(request.arguments.path, request.arguments.edits);
|
|
310
|
+
case "create_directory":
|
|
311
|
+
return await this.createDirectory(request.arguments.path);
|
|
312
|
+
case "list_files_in_directories":
|
|
313
|
+
return await this.listFilesInDirectories(request.arguments.paths);
|
|
314
|
+
case "directory_tree":
|
|
315
|
+
return await this.directoryTree(request.arguments.path, request.arguments);
|
|
316
|
+
case "move_file":
|
|
317
|
+
return await this.moveFile(request.arguments.source, request.arguments.destination);
|
|
318
|
+
case "get_file_info":
|
|
319
|
+
return await this.getFileInfo(request.arguments.path);
|
|
320
|
+
case "list_allowed_directories":
|
|
321
|
+
return await this.listAllowedDirectories();
|
|
322
|
+
case "delete_files":
|
|
323
|
+
return await this.deleteMultipleFiles(request.arguments.paths);
|
|
324
|
+
default:
|
|
325
|
+
throw new Error(`Unknown tool: ${request.name}`);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
catch (error) {
|
|
329
|
+
return {
|
|
330
|
+
content: [{ type: "text", text: error.message }],
|
|
331
|
+
isError: true,
|
|
332
|
+
};
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
resolvePath(filePath) {
|
|
336
|
+
// If already absolute, just normalize
|
|
337
|
+
if (path.isAbsolute(filePath)) {
|
|
338
|
+
return path.resolve(filePath);
|
|
339
|
+
}
|
|
340
|
+
// Prefer the first validated allowed directory as the base for relative paths.
|
|
341
|
+
// BuiltInServers ensures this list is ordered as:
|
|
342
|
+
// [executionCwd?, projectRoots..., process.cwd(), ...]
|
|
343
|
+
const baseDir = this.allowedDirectories[0] || normalizeDriveLetter(process.cwd());
|
|
344
|
+
return path.resolve(baseDir, filePath);
|
|
345
|
+
}
|
|
346
|
+
validatePath(filePath) {
|
|
347
|
+
// Resolve the path to get the absolute path and handle any relative path components
|
|
348
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
349
|
+
// Check if the path is within any allowed directory (including the directory itself and all subdirectories)
|
|
350
|
+
const isAllowed = this.allowedDirectories.some(allowedDir => {
|
|
351
|
+
// Canonicalize the allowed root so that aliasing (e.g. Windows `subst`
|
|
352
|
+
// virtual drives or symlinks) does not break sandbox checks. This also
|
|
353
|
+
// normalizes drive letters.
|
|
354
|
+
const normalizedAllowedDir = canonicalizePathSync(allowedDir);
|
|
355
|
+
// For the target path we need to be careful with non-existent files on
|
|
356
|
+
// Windows subst/virtual drives. realpathSync will fail for a file that
|
|
357
|
+
// doesn't exist yet (e.g. a brand-new file to be created), but the
|
|
358
|
+
// parent directory *does* exist and can be canonicalized to the
|
|
359
|
+
// underlying physical path. To handle this, we canonicalize the parent
|
|
360
|
+
// directory when the full path does not exist and then re-join the
|
|
361
|
+
// basename so that paths like:
|
|
362
|
+
// allowedDir = "S:\\repo" (subst of C:\\long\\path\\repo)
|
|
363
|
+
// resolvedPath = "S:\\repo\\src\\newFile.ts" (not yet created)
|
|
364
|
+
// become:
|
|
365
|
+
// normalizedAllowedDir = "C:\\long\\path\\repo"
|
|
366
|
+
// normalizedResolvedPath = "C:\\long\\path\\repo\\src\\newFile.ts".
|
|
367
|
+
let normalizedResolvedPath;
|
|
368
|
+
try {
|
|
369
|
+
if (fsSync.existsSync(resolvedPath)) {
|
|
370
|
+
normalizedResolvedPath = canonicalizePathSync(resolvedPath);
|
|
371
|
+
}
|
|
372
|
+
else {
|
|
373
|
+
const parent = path.dirname(resolvedPath);
|
|
374
|
+
const parentCanonical = canonicalizePathSync(parent);
|
|
375
|
+
normalizedResolvedPath = normalizeDriveLetter(path.join(parentCanonical, path.basename(resolvedPath)));
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
catch {
|
|
379
|
+
normalizedResolvedPath = canonicalizePathSync(resolvedPath);
|
|
380
|
+
}
|
|
381
|
+
// Allow access to the allowed directory itself
|
|
382
|
+
if (normalizedResolvedPath === normalizedAllowedDir) {
|
|
383
|
+
return true;
|
|
384
|
+
}
|
|
385
|
+
// Allow access to any subdirectory or file within the allowed directory
|
|
386
|
+
// Use path.relative to check if the resolved path is within the allowed directory
|
|
387
|
+
const relativePath = path.relative(normalizedAllowedDir, normalizedResolvedPath);
|
|
388
|
+
// If the relative path doesn't start with '..' and isn't empty, it's within the allowed directory
|
|
389
|
+
return relativePath && !relativePath.startsWith('..') && !path.isAbsolute(relativePath);
|
|
390
|
+
});
|
|
391
|
+
if (!isAllowed) {
|
|
392
|
+
throw new Error(`Access denied: Path ${resolvedPath} is not within allowed directories`);
|
|
393
|
+
}
|
|
394
|
+
return resolvedPath;
|
|
395
|
+
}
|
|
396
|
+
generateSessionId() {
|
|
397
|
+
return `session_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
398
|
+
}
|
|
399
|
+
generateChangeId() {
|
|
400
|
+
this.changeCounter++;
|
|
401
|
+
return `change_${this.changeCounter.toString().padStart(4, '0')}`;
|
|
402
|
+
}
|
|
403
|
+
logChange(operation, path, details) {
|
|
404
|
+
// Check if there's already an entry for this file path
|
|
405
|
+
const existingEntryIndex = this.sessionLog.entries.findIndex(entry => entry.path === path);
|
|
406
|
+
if (existingEntryIndex !== -1) {
|
|
407
|
+
// Merge with existing entry
|
|
408
|
+
const existingEntry = this.sessionLog.entries[existingEntryIndex];
|
|
409
|
+
// Update timestamp to the latest operation
|
|
410
|
+
existingEntry.timestamp = new Date().toISOString();
|
|
411
|
+
// Merge operations based on type
|
|
412
|
+
if (operation === 'edit' && (existingEntry.operation === 'edit' || existingEntry.operation === 'write' || existingEntry.operation === 'create')) {
|
|
413
|
+
// For edit operations, we want to show the cumulative diff
|
|
414
|
+
existingEntry.operation = operation;
|
|
415
|
+
// Update with cumulative diff and edit count
|
|
416
|
+
existingEntry.details.diff = details.diff;
|
|
417
|
+
existingEntry.details.editCount = details.editCount;
|
|
418
|
+
// Keep the original content from the first operation
|
|
419
|
+
if (!existingEntry.details.originalContent && details.originalContent) {
|
|
420
|
+
existingEntry.details.originalContent = details.originalContent;
|
|
421
|
+
}
|
|
422
|
+
// Update summary to show cumulative edits
|
|
423
|
+
const totalEdits = details.editCount || 1;
|
|
424
|
+
existingEntry.details.summary = `Applied ${totalEdits} edit${totalEdits > 1 ? 's' : ''} to file (cumulative)`;
|
|
425
|
+
}
|
|
426
|
+
else if (operation === 'write' && (existingEntry.operation === 'create' || existingEntry.operation === 'write' || existingEntry.operation === 'edit')) {
|
|
427
|
+
// Write operation overwrites everything, so replace the entry
|
|
428
|
+
existingEntry.operation = operation;
|
|
429
|
+
existingEntry.details = details;
|
|
430
|
+
}
|
|
431
|
+
else if (operation === 'create' && existingEntry.operation === 'delete') {
|
|
432
|
+
// Delete followed by create - treat as write
|
|
433
|
+
existingEntry.operation = 'write';
|
|
434
|
+
existingEntry.details = details;
|
|
435
|
+
}
|
|
436
|
+
else if (operation === 'delete') {
|
|
437
|
+
// Delete operation - replace the entry since file is gone
|
|
438
|
+
existingEntry.operation = operation;
|
|
439
|
+
existingEntry.details = details;
|
|
440
|
+
}
|
|
441
|
+
else if (operation === 'move') {
|
|
442
|
+
// Move operation - update the entry
|
|
443
|
+
existingEntry.operation = operation;
|
|
444
|
+
existingEntry.details = { ...existingEntry.details, ...details };
|
|
445
|
+
}
|
|
446
|
+
else {
|
|
447
|
+
// For other cases, just update the details
|
|
448
|
+
existingEntry.details = { ...existingEntry.details, ...details };
|
|
449
|
+
existingEntry.details.summary = this.mergeSummaries(existingEntry.details.summary, details.summary);
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
else {
|
|
453
|
+
// Create new entry
|
|
454
|
+
const entry = {
|
|
455
|
+
id: this.generateChangeId(),
|
|
456
|
+
timestamp: new Date().toISOString(),
|
|
457
|
+
operation,
|
|
458
|
+
path,
|
|
459
|
+
details
|
|
460
|
+
};
|
|
461
|
+
this.sessionLog.entries.push(entry);
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
mergeSummaries(existing, newSummary) {
|
|
465
|
+
// Create a combined summary that shows the progression
|
|
466
|
+
return `${existing} → ${newSummary}`;
|
|
467
|
+
}
|
|
468
|
+
async findRepositoryRoot(startPath) {
|
|
469
|
+
let currentPath = path.resolve(startPath);
|
|
470
|
+
// If startPath is a file, start from its directory
|
|
471
|
+
try {
|
|
472
|
+
const stat = await fs.stat(currentPath);
|
|
473
|
+
if (stat.isFile()) {
|
|
474
|
+
currentPath = path.dirname(currentPath);
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
catch {
|
|
478
|
+
// If we can't stat the path, assume it's a directory
|
|
479
|
+
}
|
|
480
|
+
// Traverse up the directory tree looking for .git
|
|
481
|
+
while (currentPath !== path.dirname(currentPath)) { // Stop at filesystem root
|
|
482
|
+
try {
|
|
483
|
+
const gitPath = path.join(currentPath, '.git');
|
|
484
|
+
await fs.access(gitPath);
|
|
485
|
+
return currentPath; // Found .git directory
|
|
486
|
+
}
|
|
487
|
+
catch {
|
|
488
|
+
// .git not found, continue up
|
|
489
|
+
}
|
|
490
|
+
currentPath = path.dirname(currentPath);
|
|
491
|
+
}
|
|
492
|
+
return null; // No repository root found
|
|
493
|
+
}
|
|
494
|
+
getRelativePathFromRepo(filePath, repoRoot) {
|
|
495
|
+
if (!repoRoot) {
|
|
496
|
+
return filePath; // Return original path if no repo root found
|
|
497
|
+
}
|
|
498
|
+
const relativePath = path.relative(repoRoot, filePath);
|
|
499
|
+
return relativePath || filePath;
|
|
500
|
+
}
|
|
501
|
+
async readMultipleFiles(fileSpecs) {
|
|
502
|
+
const results = [];
|
|
503
|
+
for (const spec of fileSpecs) {
|
|
504
|
+
const filePath = typeof spec === 'string' ? spec : spec.path;
|
|
505
|
+
try {
|
|
506
|
+
const validatedPath = this.validatePath(filePath);
|
|
507
|
+
const content = await fs.readFile(validatedPath, 'utf-8');
|
|
508
|
+
const lines = content.split(/\r?\n/);
|
|
509
|
+
let selectedContent = content;
|
|
510
|
+
let rangeApplied = { type: 'all', totalLines: lines.length };
|
|
511
|
+
if (typeof spec !== 'string') {
|
|
512
|
+
const startProvided = typeof spec.startLine === 'number';
|
|
513
|
+
const endProvided = typeof spec.endLine === 'number';
|
|
514
|
+
const headProvided = typeof spec.head === 'number' && spec.head > 0;
|
|
515
|
+
const tailProvided = typeof spec.tail === 'number' && spec.tail > 0;
|
|
516
|
+
if (startProvided || endProvided) {
|
|
517
|
+
const start = Math.max(1, Math.floor(startProvided ? spec.startLine : 1));
|
|
518
|
+
const end = Math.max(1, Math.floor(endProvided ? spec.endLine : lines.length));
|
|
519
|
+
const normalizedStart = Math.min(start, lines.length);
|
|
520
|
+
const normalizedEnd = Math.min(Math.max(normalizedStart, end), lines.length);
|
|
521
|
+
selectedContent = lines.slice(normalizedStart - 1, normalizedEnd).join('\n');
|
|
522
|
+
rangeApplied = { type: 'range', startLine: normalizedStart, endLine: normalizedEnd, totalLines: lines.length };
|
|
523
|
+
}
|
|
524
|
+
else if (headProvided) {
|
|
525
|
+
const count = Math.min(Math.floor(spec.head), lines.length);
|
|
526
|
+
selectedContent = lines.slice(0, count).join('\n');
|
|
527
|
+
rangeApplied = { type: 'head', head: count, totalLines: lines.length };
|
|
528
|
+
}
|
|
529
|
+
else if (tailProvided) {
|
|
530
|
+
const count = Math.min(Math.floor(spec.tail), lines.length);
|
|
531
|
+
selectedContent = lines.slice(Math.max(0, lines.length - count)).join('\n');
|
|
532
|
+
rangeApplied = { type: 'tail', tail: count, totalLines: lines.length };
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
results.push({ path: filePath, content: selectedContent, rangeApplied: rangeApplied.type === 'all' ? undefined : rangeApplied });
|
|
536
|
+
}
|
|
537
|
+
catch (error) {
|
|
538
|
+
results.push({ path: filePath, error: error.message });
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
return {
|
|
542
|
+
content: [{ type: 'text', text: JSON.stringify(results, null, 2) }],
|
|
543
|
+
isError: false
|
|
544
|
+
};
|
|
545
|
+
}
|
|
546
|
+
async writeFile(filePath, content) {
|
|
547
|
+
const validatedPath = this.validatePath(filePath);
|
|
548
|
+
try {
|
|
549
|
+
// Check if file exists to determine if this is create or write
|
|
550
|
+
let isCreate = false;
|
|
551
|
+
let originalContent = '';
|
|
552
|
+
try {
|
|
553
|
+
originalContent = await fs.readFile(validatedPath, 'utf-8');
|
|
554
|
+
}
|
|
555
|
+
catch {
|
|
556
|
+
isCreate = true;
|
|
557
|
+
}
|
|
558
|
+
// Ensure directory exists
|
|
559
|
+
await fs.mkdir(path.dirname(validatedPath), { recursive: true });
|
|
560
|
+
await fs.writeFile(validatedPath, content, 'utf-8');
|
|
561
|
+
// Generate patch for both create and write operations
|
|
562
|
+
const patch = diff.createPatch(filePath, originalContent, // Empty string for new files
|
|
563
|
+
content, isCreate ? 'new file' : 'original', isCreate ? 'created' : 'modified');
|
|
564
|
+
// Log the change with patch
|
|
565
|
+
this.logChange(isCreate ? 'create' : 'write', filePath, {
|
|
566
|
+
diff: patch,
|
|
567
|
+
size: Buffer.byteLength(content, 'utf-8'),
|
|
568
|
+
originalContent: isCreate ? '' : originalContent,
|
|
569
|
+
summary: isCreate
|
|
570
|
+
? `Created new file with ${content.split('\n').length} lines`
|
|
571
|
+
: `Overwrote file with ${content.split('\n').length} lines`
|
|
572
|
+
});
|
|
573
|
+
// Find repository root and calculate relative path
|
|
574
|
+
const repoRoot = await this.findRepositoryRoot(validatedPath);
|
|
575
|
+
const relativePathFromRepo = this.getRelativePathFromRepo(validatedPath, repoRoot);
|
|
576
|
+
const extraContent = {
|
|
577
|
+
beforeContent: originalContent,
|
|
578
|
+
afterContent: content,
|
|
579
|
+
relativePath: relativePathFromRepo,
|
|
580
|
+
repositoryRoot: repoRoot
|
|
581
|
+
};
|
|
582
|
+
return {
|
|
583
|
+
content: [{ type: "text", text: `Successfully wrote file: ${filePath}` }],
|
|
584
|
+
extraContent: extraContent,
|
|
585
|
+
isError: false,
|
|
586
|
+
};
|
|
587
|
+
}
|
|
588
|
+
catch (error) {
|
|
589
|
+
throw new Error(`Failed to write file ${filePath}: ${error.message}`);
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
static async createPatch(filePath, originalContent, modifiedContent) {
|
|
593
|
+
return diff.createPatch(filePath, originalContent, modifiedContent, '', '');
|
|
594
|
+
}
|
|
595
|
+
static async getDiff(filePath, edits) {
|
|
596
|
+
// Align preview path resolution with the runtime server behavior:
|
|
597
|
+
// prefer execution CWD / project roots when available, otherwise fall back
|
|
598
|
+
// to process.cwd()-relative resolution.
|
|
599
|
+
let resolvedPath = path.resolve(filePath);
|
|
600
|
+
if (!path.isAbsolute(filePath)) {
|
|
601
|
+
try {
|
|
602
|
+
const { SessionContext } = await import('../../session/SessionContext.js');
|
|
603
|
+
const session = SessionContext.getInstance();
|
|
604
|
+
const execCwd = session?.getExecutionCwd?.();
|
|
605
|
+
const roots = session?.getProjectRootPaths?.();
|
|
606
|
+
const baseDir = execCwd || (Array.isArray(roots) && roots.length > 0 ? roots[0] : process.cwd());
|
|
607
|
+
resolvedPath = path.resolve(baseDir, filePath);
|
|
608
|
+
}
|
|
609
|
+
catch {
|
|
610
|
+
// SessionContext not available (e.g., very early init); fall back to process.cwd
|
|
611
|
+
resolvedPath = path.resolve(filePath);
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
const originalContent = await fs.readFile(resolvedPath, 'utf-8');
|
|
615
|
+
let modifiedContent = originalContent;
|
|
616
|
+
// Apply edits
|
|
617
|
+
for (let i = 0; i < edits.length; i++) {
|
|
618
|
+
const edit = edits[i];
|
|
619
|
+
if (!modifiedContent.includes(edit.oldText)) {
|
|
620
|
+
throw new TextNotFoundError(`Text not found: "${edit.oldText}"`, i, edit.oldText);
|
|
621
|
+
}
|
|
622
|
+
modifiedContent = modifiedContent.replace(edit.oldText, edit.newText);
|
|
623
|
+
}
|
|
624
|
+
return diff.createPatch(filePath, originalContent, modifiedContent, '', '');
|
|
625
|
+
}
|
|
626
|
+
async editFile(filePath, edits) {
|
|
627
|
+
const validatedPath = this.validatePath(filePath);
|
|
628
|
+
try {
|
|
629
|
+
const currentContent = await fs.readFile(validatedPath, 'utf-8');
|
|
630
|
+
let modifiedContent = currentContent;
|
|
631
|
+
// Apply edits
|
|
632
|
+
for (let i = 0; i < edits.length; i++) {
|
|
633
|
+
const edit = edits[i];
|
|
634
|
+
if (!modifiedContent.includes(edit.oldText)) {
|
|
635
|
+
throw new TextNotFoundError(`Text not found: "${edit.oldText}"`, i, edit.oldText);
|
|
636
|
+
}
|
|
637
|
+
modifiedContent = modifiedContent.replace(edit.oldText, edit.newText);
|
|
638
|
+
}
|
|
639
|
+
// Apply changes
|
|
640
|
+
await fs.writeFile(validatedPath, modifiedContent, 'utf-8');
|
|
641
|
+
// For logging, we need to determine the original content for cumulative diff
|
|
642
|
+
// Check if there's already an entry for this file to get the original content
|
|
643
|
+
const existingEntry = this.sessionLog.entries.find(entry => entry.path === filePath);
|
|
644
|
+
const originalContent = existingEntry?.details.originalContent || currentContent;
|
|
645
|
+
// Create diff from original content to final content
|
|
646
|
+
const cumulativeDiff = diff.createPatch(filePath, originalContent, modifiedContent, 'original', 'modified');
|
|
647
|
+
// Log the change with cumulative information
|
|
648
|
+
this.logChange('edit', filePath, {
|
|
649
|
+
diff: cumulativeDiff,
|
|
650
|
+
originalContent: originalContent,
|
|
651
|
+
editCount: (existingEntry?.details.editCount || 0) + 1,
|
|
652
|
+
summary: `Applied ${edits.length} edit${edits.length > 1 ? 's' : ''} to file`
|
|
653
|
+
});
|
|
654
|
+
// Return the current edit diff for immediate feedback
|
|
655
|
+
const currentDiff = diff.createPatch(filePath, currentContent, modifiedContent, '', '');
|
|
656
|
+
const repoRoot = await this.findRepositoryRoot(validatedPath);
|
|
657
|
+
const relativePathFromRepo = this.getRelativePathFromRepo(validatedPath, repoRoot);
|
|
658
|
+
const extraContent = {
|
|
659
|
+
beforeContent: currentContent,
|
|
660
|
+
afterContent: modifiedContent,
|
|
661
|
+
relativePath: relativePathFromRepo,
|
|
662
|
+
repositoryRoot: repoRoot
|
|
663
|
+
};
|
|
664
|
+
return {
|
|
665
|
+
content: [{ type: "text", text: `File edited successfully:\n\n${currentDiff}` }],
|
|
666
|
+
extraContent: extraContent,
|
|
667
|
+
isError: false
|
|
668
|
+
};
|
|
669
|
+
}
|
|
670
|
+
catch (error) {
|
|
671
|
+
throw new Error(`Failed to edit file ${filePath}: ${error.message}`);
|
|
672
|
+
}
|
|
673
|
+
}
|
|
674
|
+
async createDirectory(dirPath) {
|
|
675
|
+
const validatedPath = this.validatePath(dirPath);
|
|
676
|
+
try {
|
|
677
|
+
// Check if directory already exists
|
|
678
|
+
let alreadyExists = false;
|
|
679
|
+
try {
|
|
680
|
+
const stat = await fs.stat(validatedPath);
|
|
681
|
+
alreadyExists = stat.isDirectory();
|
|
682
|
+
}
|
|
683
|
+
catch {
|
|
684
|
+
// Directory doesn't exist, which is what we want
|
|
685
|
+
}
|
|
686
|
+
await fs.mkdir(validatedPath, { recursive: true });
|
|
687
|
+
// Log the change only if directory was actually created
|
|
688
|
+
if (!alreadyExists) {
|
|
689
|
+
this.logChange('mkdir', dirPath, {
|
|
690
|
+
summary: `Created directory`
|
|
691
|
+
});
|
|
692
|
+
}
|
|
693
|
+
return {
|
|
694
|
+
content: [{ type: "text", text: `Successfully created directory: ${dirPath}` }],
|
|
695
|
+
isError: false
|
|
696
|
+
};
|
|
697
|
+
}
|
|
698
|
+
catch (error) {
|
|
699
|
+
throw new Error(`Failed to create directory ${dirPath}: ${error.message}`);
|
|
700
|
+
}
|
|
701
|
+
}
|
|
702
|
+
async listFilesInDirectories(dirPaths) {
|
|
703
|
+
const results = [];
|
|
704
|
+
const errors = [];
|
|
705
|
+
for (const dirPath of dirPaths) {
|
|
706
|
+
try {
|
|
707
|
+
const validatedPath = this.validatePath(dirPath);
|
|
708
|
+
const entries = await fs.readdir(validatedPath, { withFileTypes: true });
|
|
709
|
+
// Add directory header
|
|
710
|
+
results.push(`\n=== ${dirPath} ===`);
|
|
711
|
+
if (entries.length === 0) {
|
|
712
|
+
results.push("(empty directory)");
|
|
713
|
+
}
|
|
714
|
+
else {
|
|
715
|
+
const listing = entries.map(entry => {
|
|
716
|
+
const prefix = entry.isDirectory() ? '[DIR]' : '[FILE]';
|
|
717
|
+
return `${prefix} ${entry.name}`;
|
|
718
|
+
}).join('\n');
|
|
719
|
+
results.push(listing);
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
catch (error) {
|
|
723
|
+
errors.push(`Failed to list ${dirPath}: ${error.message}`);
|
|
724
|
+
}
|
|
725
|
+
}
|
|
726
|
+
// Combine results and errors
|
|
727
|
+
let finalText = results.join('\n');
|
|
728
|
+
if (errors.length > 0) {
|
|
729
|
+
finalText += '\n\n=== Errors ===\n' + errors.join('\n');
|
|
730
|
+
}
|
|
731
|
+
return {
|
|
732
|
+
content: [{ type: "text", text: finalText.trim() }],
|
|
733
|
+
isError: false
|
|
734
|
+
};
|
|
735
|
+
}
|
|
736
|
+
async directoryTree(dirPath, args) {
|
|
737
|
+
const validatedPath = this.validatePath(dirPath);
|
|
738
|
+
const options = {
|
|
739
|
+
maxDepth: typeof args?.maxDepth === 'number' && args.maxDepth > 0
|
|
740
|
+
? Math.floor(args.maxDepth)
|
|
741
|
+
: DEFAULT_MAX_DEPTH,
|
|
742
|
+
maxDirectories: typeof args?.maxDirectories === 'number' && args.maxDirectories > 0
|
|
743
|
+
? Math.floor(args.maxDirectories)
|
|
744
|
+
: DEFAULT_MAX_DIRECTORIES,
|
|
745
|
+
exclude: Array.isArray(args?.exclude) && args.exclude.length > 0
|
|
746
|
+
? args.exclude.map((value) => typeof value === 'string' ? value : String(value))
|
|
747
|
+
: DEFAULT_EXCLUDED_DIRS,
|
|
748
|
+
};
|
|
749
|
+
const state = { directoriesVisited: 0, truncated: false };
|
|
750
|
+
try {
|
|
751
|
+
const tree = await this.buildDirectoryTree(validatedPath, options, 1, state);
|
|
752
|
+
const result = {
|
|
753
|
+
name: tree?.name ?? path.basename(validatedPath),
|
|
754
|
+
type: 'directory',
|
|
755
|
+
children: tree?.children ?? [],
|
|
756
|
+
meta: {
|
|
757
|
+
truncated: state.truncated,
|
|
758
|
+
maxDepth: options.maxDepth,
|
|
759
|
+
maxDirectories: options.maxDirectories,
|
|
760
|
+
excluded: options.exclude,
|
|
761
|
+
},
|
|
762
|
+
};
|
|
763
|
+
return {
|
|
764
|
+
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
|
765
|
+
isError: false
|
|
766
|
+
};
|
|
767
|
+
}
|
|
768
|
+
catch (error) {
|
|
769
|
+
throw new Error(`Failed to build directory tree for ${dirPath}: ${error.message}`);
|
|
770
|
+
}
|
|
771
|
+
}
|
|
772
|
+
async buildDirectoryTree(dirPath, options, depth, state) {
|
|
773
|
+
const name = path.basename(dirPath);
|
|
774
|
+
// Exclude directories by name or simple pattern matching
|
|
775
|
+
if (options.exclude.some(pattern => matchesPattern(name, pattern))) {
|
|
776
|
+
return null;
|
|
777
|
+
}
|
|
778
|
+
let stat;
|
|
779
|
+
try {
|
|
780
|
+
stat = await fs.stat(dirPath);
|
|
781
|
+
}
|
|
782
|
+
catch {
|
|
783
|
+
// If we can't stat the path, mark as truncated and return an empty node
|
|
784
|
+
state.truncated = true;
|
|
785
|
+
return { name, type: 'directory', children: [] };
|
|
786
|
+
}
|
|
787
|
+
if (!stat.isDirectory()) {
|
|
788
|
+
// Not a directory; nothing to traverse
|
|
789
|
+
return null;
|
|
790
|
+
}
|
|
791
|
+
if (depth > options.maxDepth || state.directoriesVisited >= options.maxDirectories) {
|
|
792
|
+
state.truncated = true;
|
|
793
|
+
return { name, type: 'directory', children: [] };
|
|
794
|
+
}
|
|
795
|
+
state.directoriesVisited++;
|
|
796
|
+
let entries;
|
|
797
|
+
try {
|
|
798
|
+
entries = await fs.readdir(dirPath, { withFileTypes: true });
|
|
799
|
+
}
|
|
800
|
+
catch {
|
|
801
|
+
state.truncated = true;
|
|
802
|
+
return { name, type: 'directory', children: [] };
|
|
803
|
+
}
|
|
804
|
+
const children = [];
|
|
805
|
+
for (const entry of entries) {
|
|
806
|
+
if (!entry.isDirectory()) {
|
|
807
|
+
continue;
|
|
808
|
+
}
|
|
809
|
+
if (state.directoriesVisited >= options.maxDirectories) {
|
|
810
|
+
state.truncated = true;
|
|
811
|
+
break;
|
|
812
|
+
}
|
|
813
|
+
const entryPath = path.join(dirPath, entry.name);
|
|
814
|
+
try {
|
|
815
|
+
const child = await this.buildDirectoryTree(entryPath, options, depth + 1, state);
|
|
816
|
+
if (child) {
|
|
817
|
+
children.push(child);
|
|
818
|
+
}
|
|
819
|
+
if (state.truncated) {
|
|
820
|
+
break;
|
|
821
|
+
}
|
|
822
|
+
}
|
|
823
|
+
catch {
|
|
824
|
+
// Skip directories we can't access
|
|
825
|
+
}
|
|
826
|
+
}
|
|
827
|
+
return { name, type: 'directory', children };
|
|
828
|
+
}
|
|
829
|
+
async moveFile(source, destination) {
|
|
830
|
+
const validatedSource = this.validatePath(source);
|
|
831
|
+
const validatedDestination = this.validatePath(destination);
|
|
832
|
+
try {
|
|
833
|
+
// Check if destination exists
|
|
834
|
+
try {
|
|
835
|
+
await fs.access(validatedDestination);
|
|
836
|
+
throw new Error(`Destination already exists: ${destination}`);
|
|
837
|
+
}
|
|
838
|
+
catch (error) {
|
|
839
|
+
// Good, destination doesn't exist
|
|
840
|
+
if (error.code !== 'ENOENT') {
|
|
841
|
+
throw error;
|
|
842
|
+
}
|
|
843
|
+
}
|
|
844
|
+
// Ensure destination directory exists
|
|
845
|
+
await fs.mkdir(path.dirname(validatedDestination), { recursive: true });
|
|
846
|
+
await fs.rename(validatedSource, validatedDestination);
|
|
847
|
+
// Log the change
|
|
848
|
+
this.logChange('move', destination, {
|
|
849
|
+
oldPath: source,
|
|
850
|
+
summary: `Moved from ${source} to ${destination}`
|
|
851
|
+
});
|
|
852
|
+
return {
|
|
853
|
+
content: [{ type: "text", text: `Successfully moved ${source} to ${destination}` }],
|
|
854
|
+
isError: false
|
|
855
|
+
};
|
|
856
|
+
}
|
|
857
|
+
catch (error) {
|
|
858
|
+
throw new Error(`Failed to move ${source} to ${destination}: ${error.message}`);
|
|
859
|
+
}
|
|
860
|
+
}
|
|
861
|
+
async searchFilesRecursive(dirPath, pattern, excludePatterns) {
|
|
862
|
+
const results = [];
|
|
863
|
+
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
|
864
|
+
for (const entry of entries) {
|
|
865
|
+
const entryPath = path.join(dirPath, entry.name);
|
|
866
|
+
// Check if entry matches exclude patterns
|
|
867
|
+
const shouldExclude = excludePatterns.some(excludePattern => {
|
|
868
|
+
return entry.name.toLowerCase().includes(excludePattern.toLowerCase());
|
|
869
|
+
});
|
|
870
|
+
if (shouldExclude) {
|
|
871
|
+
continue;
|
|
872
|
+
}
|
|
873
|
+
// Check if entry matches search pattern
|
|
874
|
+
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
|
|
875
|
+
results.push(entryPath);
|
|
876
|
+
}
|
|
877
|
+
// Recursively search directories
|
|
878
|
+
if (entry.isDirectory()) {
|
|
879
|
+
try {
|
|
880
|
+
const subResults = await this.searchFilesRecursive(entryPath, pattern, excludePatterns);
|
|
881
|
+
results.push(...subResults);
|
|
882
|
+
}
|
|
883
|
+
catch (error) {
|
|
884
|
+
}
|
|
885
|
+
}
|
|
886
|
+
}
|
|
887
|
+
return results;
|
|
888
|
+
}
|
|
889
|
+
async getFileInfo(filePath) {
|
|
890
|
+
const validatedPath = this.validatePath(filePath);
|
|
891
|
+
try {
|
|
892
|
+
const stat = await fs.stat(validatedPath);
|
|
893
|
+
const fileInfo = {
|
|
894
|
+
name: path.basename(validatedPath),
|
|
895
|
+
type: stat.isDirectory() ? 'directory' : 'file',
|
|
896
|
+
size: stat.size,
|
|
897
|
+
modified: stat.mtime.toISOString(),
|
|
898
|
+
created: stat.birthtime.toISOString(),
|
|
899
|
+
permissions: '0' + (stat.mode & parseInt('777', 8)).toString(8)
|
|
900
|
+
};
|
|
901
|
+
return {
|
|
902
|
+
content: [{ type: "text", text: JSON.stringify(fileInfo, null, 2) }],
|
|
903
|
+
isError: false
|
|
904
|
+
};
|
|
905
|
+
}
|
|
906
|
+
catch (error) {
|
|
907
|
+
throw new Error(`Failed to get file info for ${filePath}: ${error.message}`);
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
async listAllowedDirectories() {
|
|
911
|
+
return {
|
|
912
|
+
content: [{ type: "text", text: JSON.stringify(this.allowedDirectories, null, 2) }],
|
|
913
|
+
isError: false
|
|
914
|
+
};
|
|
915
|
+
}
|
|
916
|
+
async deleteMultipleFiles(filePaths) {
|
|
917
|
+
const results = [];
|
|
918
|
+
let successCount = 0;
|
|
919
|
+
let errorCount = 0;
|
|
920
|
+
for (const filePath of filePaths) {
|
|
921
|
+
try {
|
|
922
|
+
const validatedPath = this.validatePath(filePath);
|
|
923
|
+
// Check if file/directory exists and get its info
|
|
924
|
+
const stat = await fs.stat(validatedPath);
|
|
925
|
+
const isDirectory = stat.isDirectory();
|
|
926
|
+
const itemType = isDirectory ? 'directory' : 'file';
|
|
927
|
+
// For directories, we need to use recursive removal
|
|
928
|
+
if (isDirectory) {
|
|
929
|
+
await fs.rm(validatedPath, { recursive: true, force: true });
|
|
930
|
+
}
|
|
931
|
+
else {
|
|
932
|
+
await fs.unlink(validatedPath);
|
|
933
|
+
}
|
|
934
|
+
// Log the change
|
|
935
|
+
this.logChange('delete', filePath, {
|
|
936
|
+
summary: `Deleted ${itemType}: ${path.basename(filePath)}`
|
|
937
|
+
});
|
|
938
|
+
results.push({ path: filePath, success: true, itemType });
|
|
939
|
+
successCount++;
|
|
940
|
+
}
|
|
941
|
+
catch (error) {
|
|
942
|
+
let errorMessage = error.message;
|
|
943
|
+
if (error.code === 'ENOENT') {
|
|
944
|
+
errorMessage = `File or directory not found: ${filePath}`;
|
|
945
|
+
}
|
|
946
|
+
results.push({ path: filePath, error: errorMessage });
|
|
947
|
+
errorCount++;
|
|
948
|
+
}
|
|
949
|
+
}
|
|
950
|
+
// Create summary message
|
|
951
|
+
const summary = `Deletion completed: ${successCount} successful, ${errorCount} failed out of ${filePaths.length} total items`;
|
|
952
|
+
const detailedResults = JSON.stringify(results, null, 2);
|
|
953
|
+
return {
|
|
954
|
+
content: [{ type: "text", text: `${summary}\n\nDetailed results:\n${detailedResults}` }],
|
|
955
|
+
isError: errorCount > 0 && successCount === 0 // Only mark as error if ALL deletions failed
|
|
956
|
+
};
|
|
957
|
+
}
|
|
958
|
+
getSessionLog() {
|
|
959
|
+
return this.sessionLog;
|
|
960
|
+
}
|
|
961
|
+
getSessionLogSummary() {
|
|
962
|
+
// Create a summary view that shows the final state of each file
|
|
963
|
+
return {
|
|
964
|
+
sessionId: this.sessionLog.sessionId,
|
|
965
|
+
startTime: this.sessionLog.startTime,
|
|
966
|
+
totalOperations: this.sessionLog.entries.length,
|
|
967
|
+
fileChanges: this.sessionLog.entries.map(entry => ({
|
|
968
|
+
path: entry.path,
|
|
969
|
+
operation: entry.operation,
|
|
970
|
+
timestamp: entry.timestamp,
|
|
971
|
+
summary: entry.details.summary,
|
|
972
|
+
editCount: entry.details.editCount,
|
|
973
|
+
hasOriginalContent: !!entry.details.originalContent,
|
|
974
|
+
diffPreview: entry.details.diff ? entry.details.diff.split('\n').slice(0, 10).join('\n') + (entry.details.diff.split('\n').length > 10 ? '\n...' : '') : undefined
|
|
975
|
+
}))
|
|
976
|
+
};
|
|
977
|
+
}
|
|
978
|
+
async clearSessionLog() {
|
|
979
|
+
try {
|
|
980
|
+
this.sessionLog = {
|
|
981
|
+
sessionId: this.generateSessionId(),
|
|
982
|
+
startTime: new Date().toISOString(),
|
|
983
|
+
entries: []
|
|
984
|
+
};
|
|
985
|
+
this.changeCounter = 0;
|
|
986
|
+
}
|
|
987
|
+
catch (error) {
|
|
988
|
+
throw new Error(`Failed to clear session log: ${error.message}`);
|
|
989
|
+
}
|
|
990
|
+
}
|
|
991
|
+
}
|
|
992
|
+
//# sourceMappingURL=filesystem.js.map
|