code-graph-context 1.1.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +221 -101
- package/dist/core/config/fairsquare-framework-schema.js +47 -60
- package/dist/core/config/nestjs-framework-schema.js +11 -1
- package/dist/core/config/schema.js +1 -1
- package/dist/core/config/timeouts.js +27 -0
- package/dist/core/embeddings/embeddings.service.js +122 -2
- package/dist/core/embeddings/natural-language-to-cypher.service.js +416 -17
- package/dist/core/parsers/parser-factory.js +5 -3
- package/dist/core/parsers/typescript-parser.js +614 -45
- package/dist/core/parsers/workspace-parser.js +553 -0
- package/dist/core/utils/edge-factory.js +37 -0
- package/dist/core/utils/file-change-detection.js +105 -0
- package/dist/core/utils/file-utils.js +20 -0
- package/dist/core/utils/index.js +3 -0
- package/dist/core/utils/path-utils.js +75 -0
- package/dist/core/utils/progress-reporter.js +112 -0
- package/dist/core/utils/project-id.js +176 -0
- package/dist/core/utils/retry.js +41 -0
- package/dist/core/workspace/index.js +4 -0
- package/dist/core/workspace/workspace-detector.js +221 -0
- package/dist/mcp/constants.js +153 -5
- package/dist/mcp/handlers/cross-file-edge.helpers.js +19 -0
- package/dist/mcp/handlers/file-change-detection.js +105 -0
- package/dist/mcp/handlers/graph-generator.handler.js +97 -32
- package/dist/mcp/handlers/incremental-parse.handler.js +146 -0
- package/dist/mcp/handlers/streaming-import.handler.js +210 -0
- package/dist/mcp/handlers/traversal.handler.js +130 -71
- package/dist/mcp/mcp.server.js +45 -6
- package/dist/mcp/service-init.js +79 -0
- package/dist/mcp/services/job-manager.js +165 -0
- package/dist/mcp/services/watch-manager.js +376 -0
- package/dist/mcp/services.js +2 -2
- package/dist/mcp/tools/check-parse-status.tool.js +64 -0
- package/dist/mcp/tools/impact-analysis.tool.js +84 -18
- package/dist/mcp/tools/index.js +13 -1
- package/dist/mcp/tools/list-projects.tool.js +62 -0
- package/dist/mcp/tools/list-watchers.tool.js +51 -0
- package/dist/mcp/tools/natural-language-to-cypher.tool.js +34 -8
- package/dist/mcp/tools/parse-typescript-project.tool.js +318 -58
- package/dist/mcp/tools/search-codebase.tool.js +56 -16
- package/dist/mcp/tools/start-watch-project.tool.js +100 -0
- package/dist/mcp/tools/stop-watch-project.tool.js +49 -0
- package/dist/mcp/tools/traverse-from-node.tool.js +68 -9
- package/dist/mcp/utils.js +35 -13
- package/dist/mcp/workers/parse-worker.js +198 -0
- package/dist/storage/neo4j/neo4j.service.js +147 -48
- package/package.json +4 -2
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Watch Manager Service
|
|
3
|
+
* Manages file watchers for incremental graph updates across projects
|
|
4
|
+
* Uses @parcel/watcher for high-performance file watching
|
|
5
|
+
*/
|
|
6
|
+
import * as watcher from '@parcel/watcher';
|
|
7
|
+
import { debugLog } from '../utils.js';
|
|
8
|
+
const DEFAULT_EXCLUDE_PATTERNS = [
|
|
9
|
+
'**/node_modules/**',
|
|
10
|
+
'**/dist/**',
|
|
11
|
+
'**/build/**',
|
|
12
|
+
'**/.git/**',
|
|
13
|
+
'**/*.d.ts',
|
|
14
|
+
'**/*.js.map',
|
|
15
|
+
'**/*.js',
|
|
16
|
+
];
|
|
17
|
+
const DEFAULT_DEBOUNCE_MS = 1000;
|
|
18
|
+
const MAX_WATCHERS = 10;
|
|
19
|
+
const MAX_PENDING_EVENTS = 1000;
|
|
20
|
+
class WatchManager {
|
|
21
|
+
watchers = new Map();
|
|
22
|
+
mcpServer = null;
|
|
23
|
+
incrementalParseHandler = null;
|
|
24
|
+
/**
|
|
25
|
+
* Set the MCP server instance for sending notifications
|
|
26
|
+
*/
|
|
27
|
+
setMcpServer(server) {
|
|
28
|
+
this.mcpServer = server;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Set the incremental parse handler function
|
|
32
|
+
*/
|
|
33
|
+
setIncrementalParseHandler(handler) {
|
|
34
|
+
this.incrementalParseHandler = handler;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Send a notification via MCP logging (if supported)
|
|
38
|
+
*/
|
|
39
|
+
sendNotification(notification) {
|
|
40
|
+
if (!this.mcpServer) {
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
// sendLoggingMessage returns a Promise - use .catch() to handle rejection
|
|
44
|
+
this.mcpServer
|
|
45
|
+
.sendLoggingMessage({
|
|
46
|
+
level: notification.type.includes('failed') ? 'error' : 'info',
|
|
47
|
+
logger: 'file-watcher',
|
|
48
|
+
data: notification,
|
|
49
|
+
})
|
|
50
|
+
.catch(() => {
|
|
51
|
+
// MCP logging not supported - silently ignore
|
|
52
|
+
// This is expected if the client doesn't support logging capability
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Start watching a project for file changes
|
|
57
|
+
*/
|
|
58
|
+
async startWatching(config) {
|
|
59
|
+
// Check if already watching this project
|
|
60
|
+
if (this.watchers.has(config.projectId)) {
|
|
61
|
+
const existing = this.watchers.get(config.projectId);
|
|
62
|
+
return this.getWatcherInfoFromState(existing);
|
|
63
|
+
}
|
|
64
|
+
// Enforce maximum watcher limit
|
|
65
|
+
if (this.watchers.size >= MAX_WATCHERS) {
|
|
66
|
+
throw new Error(`Maximum watcher limit (${MAX_WATCHERS}) reached. ` + `Stop an existing watcher before starting a new one.`);
|
|
67
|
+
}
|
|
68
|
+
const fullConfig = {
|
|
69
|
+
projectPath: config.projectPath,
|
|
70
|
+
projectId: config.projectId,
|
|
71
|
+
tsconfigPath: config.tsconfigPath,
|
|
72
|
+
debounceMs: config.debounceMs ?? DEFAULT_DEBOUNCE_MS,
|
|
73
|
+
excludePatterns: config.excludePatterns ?? DEFAULT_EXCLUDE_PATTERNS,
|
|
74
|
+
};
|
|
75
|
+
await debugLog('Creating @parcel/watcher subscription', {
|
|
76
|
+
watchPath: fullConfig.projectPath,
|
|
77
|
+
ignored: fullConfig.excludePatterns,
|
|
78
|
+
});
|
|
79
|
+
// Create state object first (subscription will be added after)
|
|
80
|
+
const state = {
|
|
81
|
+
projectId: fullConfig.projectId,
|
|
82
|
+
projectPath: fullConfig.projectPath,
|
|
83
|
+
tsconfigPath: fullConfig.tsconfigPath,
|
|
84
|
+
subscription: null, // Will be set after successful subscription
|
|
85
|
+
config: fullConfig,
|
|
86
|
+
pendingEvents: [],
|
|
87
|
+
debounceTimer: null,
|
|
88
|
+
isProcessing: false,
|
|
89
|
+
isStopping: false,
|
|
90
|
+
lastUpdateTime: null,
|
|
91
|
+
status: 'active',
|
|
92
|
+
};
|
|
93
|
+
try {
|
|
94
|
+
// Subscribe to file changes using @parcel/watcher
|
|
95
|
+
const subscription = await watcher.subscribe(fullConfig.projectPath, (err, events) => {
|
|
96
|
+
if (err) {
|
|
97
|
+
this.handleWatcherError(state, err);
|
|
98
|
+
return;
|
|
99
|
+
}
|
|
100
|
+
for (const event of events) {
|
|
101
|
+
try {
|
|
102
|
+
// Filter for TypeScript files
|
|
103
|
+
if (!event.path.endsWith('.ts') && !event.path.endsWith('.tsx')) {
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
// Map parcel event types to our event types
|
|
107
|
+
let eventType;
|
|
108
|
+
if (event.type === 'create') {
|
|
109
|
+
eventType = 'add';
|
|
110
|
+
}
|
|
111
|
+
else if (event.type === 'delete') {
|
|
112
|
+
eventType = 'unlink';
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
eventType = 'change';
|
|
116
|
+
}
|
|
117
|
+
this.handleFileEvent(state, eventType, event.path);
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
debugLog('Error processing file event', { error: String(error), path: event.path });
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}, {
|
|
124
|
+
ignore: fullConfig.excludePatterns,
|
|
125
|
+
});
|
|
126
|
+
state.subscription = subscription;
|
|
127
|
+
this.watchers.set(fullConfig.projectId, state);
|
|
128
|
+
await debugLog('Watcher started', { projectId: fullConfig.projectId, projectPath: fullConfig.projectPath });
|
|
129
|
+
// Check for changes that occurred while watcher was off (run in background)
|
|
130
|
+
this.syncMissedChanges(state);
|
|
131
|
+
return this.getWatcherInfoFromState(state);
|
|
132
|
+
}
|
|
133
|
+
catch (error) {
|
|
134
|
+
await debugLog('Failed to start watcher', { error: String(error) });
|
|
135
|
+
throw error;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Handle a file system event
|
|
140
|
+
*/
|
|
141
|
+
handleFileEvent(state, type, filePath) {
|
|
142
|
+
debugLog('File event received', { type, filePath, projectId: state.projectId, status: state.status });
|
|
143
|
+
// Ignore events if watcher is stopping or not active
|
|
144
|
+
if (state.isStopping || state.status !== 'active') {
|
|
145
|
+
debugLog('Ignoring event - watcher not active or stopping', {
|
|
146
|
+
status: state.status,
|
|
147
|
+
isStopping: state.isStopping,
|
|
148
|
+
});
|
|
149
|
+
return;
|
|
150
|
+
}
|
|
151
|
+
const event = {
|
|
152
|
+
type,
|
|
153
|
+
filePath,
|
|
154
|
+
timestamp: Date.now(),
|
|
155
|
+
};
|
|
156
|
+
// Prevent unbounded event accumulation - drop oldest events if buffer is full
|
|
157
|
+
if (state.pendingEvents.length >= MAX_PENDING_EVENTS) {
|
|
158
|
+
debugLog('Event buffer full, dropping oldest events', { projectId: state.projectId });
|
|
159
|
+
state.pendingEvents = state.pendingEvents.slice(-Math.floor(MAX_PENDING_EVENTS / 2));
|
|
160
|
+
}
|
|
161
|
+
state.pendingEvents.push(event);
|
|
162
|
+
debugLog('Event added to pending', { pendingCount: state.pendingEvents.length });
|
|
163
|
+
// Clear existing debounce timer
|
|
164
|
+
if (state.debounceTimer) {
|
|
165
|
+
clearTimeout(state.debounceTimer);
|
|
166
|
+
}
|
|
167
|
+
// Send immediate notification about file change detected
|
|
168
|
+
this.sendNotification({
|
|
169
|
+
type: 'file_change_detected',
|
|
170
|
+
projectId: state.projectId,
|
|
171
|
+
projectPath: state.projectPath,
|
|
172
|
+
data: {
|
|
173
|
+
filesChanged: state.pendingEvents.filter((e) => e.type === 'change').map((e) => e.filePath),
|
|
174
|
+
filesAdded: state.pendingEvents.filter((e) => e.type === 'add').map((e) => e.filePath),
|
|
175
|
+
filesDeleted: state.pendingEvents.filter((e) => e.type === 'unlink').map((e) => e.filePath),
|
|
176
|
+
},
|
|
177
|
+
timestamp: new Date().toISOString(),
|
|
178
|
+
});
|
|
179
|
+
// Set new debounce timer
|
|
180
|
+
state.debounceTimer = setTimeout(() => {
|
|
181
|
+
this.processEvents(state).catch((error) => {
|
|
182
|
+
console.error('[WatchManager] Error in processEvents:', error);
|
|
183
|
+
});
|
|
184
|
+
}, state.config.debounceMs);
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Process accumulated file events after debounce period
|
|
188
|
+
*/
|
|
189
|
+
async processEvents(state) {
|
|
190
|
+
// Don't process if already processing, no events, or watcher is stopping
|
|
191
|
+
if (state.isProcessing || state.pendingEvents.length === 0 || state.isStopping)
|
|
192
|
+
return;
|
|
193
|
+
state.isProcessing = true;
|
|
194
|
+
const events = [...state.pendingEvents];
|
|
195
|
+
state.pendingEvents = [];
|
|
196
|
+
state.debounceTimer = null;
|
|
197
|
+
const startTime = Date.now();
|
|
198
|
+
this.sendNotification({
|
|
199
|
+
type: 'incremental_parse_started',
|
|
200
|
+
projectId: state.projectId,
|
|
201
|
+
projectPath: state.projectPath,
|
|
202
|
+
data: {
|
|
203
|
+
filesChanged: events.filter((e) => e.type === 'change').map((e) => e.filePath),
|
|
204
|
+
filesAdded: events.filter((e) => e.type === 'add').map((e) => e.filePath),
|
|
205
|
+
filesDeleted: events.filter((e) => e.type === 'unlink').map((e) => e.filePath),
|
|
206
|
+
},
|
|
207
|
+
timestamp: new Date().toISOString(),
|
|
208
|
+
});
|
|
209
|
+
try {
|
|
210
|
+
if (!this.incrementalParseHandler) {
|
|
211
|
+
throw new Error('Incremental parse handler not configured');
|
|
212
|
+
}
|
|
213
|
+
const result = await this.incrementalParseHandler(state.projectPath, state.projectId, state.tsconfigPath);
|
|
214
|
+
state.lastUpdateTime = new Date();
|
|
215
|
+
const elapsedMs = Date.now() - startTime;
|
|
216
|
+
this.sendNotification({
|
|
217
|
+
type: 'incremental_parse_completed',
|
|
218
|
+
projectId: state.projectId,
|
|
219
|
+
projectPath: state.projectPath,
|
|
220
|
+
data: {
|
|
221
|
+
filesChanged: events.filter((e) => e.type === 'change').map((e) => e.filePath),
|
|
222
|
+
filesAdded: events.filter((e) => e.type === 'add').map((e) => e.filePath),
|
|
223
|
+
filesDeleted: events.filter((e) => e.type === 'unlink').map((e) => e.filePath),
|
|
224
|
+
nodesUpdated: result.nodesUpdated,
|
|
225
|
+
edgesUpdated: result.edgesUpdated,
|
|
226
|
+
elapsedMs,
|
|
227
|
+
},
|
|
228
|
+
timestamp: new Date().toISOString(),
|
|
229
|
+
});
|
|
230
|
+
console.log(`[WatchManager] Incremental parse completed for ${state.projectId}: ` +
|
|
231
|
+
`${result.nodesUpdated} nodes, ${result.edgesUpdated} edges in ${elapsedMs}ms`);
|
|
232
|
+
}
|
|
233
|
+
catch (error) {
|
|
234
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
235
|
+
this.sendNotification({
|
|
236
|
+
type: 'incremental_parse_failed',
|
|
237
|
+
projectId: state.projectId,
|
|
238
|
+
projectPath: state.projectPath,
|
|
239
|
+
data: {
|
|
240
|
+
error: errorMessage,
|
|
241
|
+
elapsedMs: Date.now() - startTime,
|
|
242
|
+
},
|
|
243
|
+
timestamp: new Date().toISOString(),
|
|
244
|
+
});
|
|
245
|
+
console.error(`[WatchManager] Incremental parse failed for ${state.projectId}:`, error);
|
|
246
|
+
}
|
|
247
|
+
finally {
|
|
248
|
+
state.isProcessing = false;
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
/**
|
|
252
|
+
* Handle watcher error
|
|
253
|
+
*/
|
|
254
|
+
handleWatcherError(state, error) {
|
|
255
|
+
state.status = 'error';
|
|
256
|
+
state.errorMessage = error instanceof Error ? error.message : String(error);
|
|
257
|
+
debugLog('Watcher error', { projectId: state.projectId, error: state.errorMessage });
|
|
258
|
+
// Clean up the failed watcher to prevent it from staying in error state indefinitely
|
|
259
|
+
this.stopWatching(state.projectId).catch((cleanupError) => {
|
|
260
|
+
console.error(`[WatchManager] Failed to cleanup errored watcher ${state.projectId}:`, cleanupError);
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
/**
|
|
264
|
+
* Sync any changes that occurred while the watcher was off
|
|
265
|
+
* Runs in the background without blocking watcher startup
|
|
266
|
+
* Promise is tracked on state to allow cleanup during stop
|
|
267
|
+
*/
|
|
268
|
+
syncMissedChanges(state) {
|
|
269
|
+
if (!this.incrementalParseHandler)
|
|
270
|
+
return;
|
|
271
|
+
// Track the promise on state so stopWatching can wait for it
|
|
272
|
+
state.syncPromise = this.incrementalParseHandler(state.projectPath, state.projectId, state.tsconfigPath)
|
|
273
|
+
.then((result) => {
|
|
274
|
+
if (result.nodesUpdated > 0 || result.edgesUpdated > 0) {
|
|
275
|
+
console.log(`[WatchManager] Synced missed changes for ${state.projectId}: ` +
|
|
276
|
+
`${result.nodesUpdated} nodes, ${result.edgesUpdated} edges`);
|
|
277
|
+
}
|
|
278
|
+
})
|
|
279
|
+
.catch((error) => {
|
|
280
|
+
// Only log if watcher hasn't been stopped
|
|
281
|
+
if (!state.isStopping) {
|
|
282
|
+
console.error(`[WatchManager] Failed to sync missed changes for ${state.projectId}:`, error);
|
|
283
|
+
}
|
|
284
|
+
})
|
|
285
|
+
.finally(() => {
|
|
286
|
+
state.syncPromise = undefined;
|
|
287
|
+
});
|
|
288
|
+
}
|
|
289
|
+
/**
|
|
290
|
+
* Stop watching a project
|
|
291
|
+
* Waits for any in-progress processing to complete before cleanup
|
|
292
|
+
*/
|
|
293
|
+
async stopWatching(projectId) {
|
|
294
|
+
const state = this.watchers.get(projectId);
|
|
295
|
+
if (!state) {
|
|
296
|
+
return false;
|
|
297
|
+
}
|
|
298
|
+
// Mark as stopping to prevent new event processing
|
|
299
|
+
state.isStopping = true;
|
|
300
|
+
state.status = 'paused';
|
|
301
|
+
// Clear debounce timer
|
|
302
|
+
if (state.debounceTimer) {
|
|
303
|
+
clearTimeout(state.debounceTimer);
|
|
304
|
+
state.debounceTimer = null;
|
|
305
|
+
}
|
|
306
|
+
// Wait for any in-progress processing to complete (with timeout)
|
|
307
|
+
const maxWaitMs = 30000; // 30 second timeout
|
|
308
|
+
const startTime = Date.now();
|
|
309
|
+
while (state.isProcessing && Date.now() - startTime < maxWaitMs) {
|
|
310
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
311
|
+
}
|
|
312
|
+
// Wait for sync promise if it exists (with timeout)
|
|
313
|
+
if (state.syncPromise) {
|
|
314
|
+
try {
|
|
315
|
+
await Promise.race([
|
|
316
|
+
state.syncPromise,
|
|
317
|
+
new Promise((_, reject) => setTimeout(() => reject(new Error('Sync timeout')), 5000)),
|
|
318
|
+
]);
|
|
319
|
+
}
|
|
320
|
+
catch {
|
|
321
|
+
// Timeout or error - continue with cleanup
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
// Unsubscribe from @parcel/watcher (only if subscription exists)
|
|
325
|
+
if (state.subscription) {
|
|
326
|
+
try {
|
|
327
|
+
await state.subscription.unsubscribe();
|
|
328
|
+
}
|
|
329
|
+
catch (error) {
|
|
330
|
+
console.warn(`[WatchManager] Error unsubscribing watcher for ${projectId}:`, error);
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
this.watchers.delete(projectId);
|
|
334
|
+
console.log(`[WatchManager] Stopped watching project: ${projectId}`);
|
|
335
|
+
return true;
|
|
336
|
+
}
|
|
337
|
+
/**
|
|
338
|
+
* Get watcher info for a project
|
|
339
|
+
*/
|
|
340
|
+
getWatcherInfo(projectId) {
|
|
341
|
+
const state = this.watchers.get(projectId);
|
|
342
|
+
if (!state)
|
|
343
|
+
return undefined;
|
|
344
|
+
return this.getWatcherInfoFromState(state);
|
|
345
|
+
}
|
|
346
|
+
/**
|
|
347
|
+
* List all active watchers
|
|
348
|
+
*/
|
|
349
|
+
listWatchers() {
|
|
350
|
+
return Array.from(this.watchers.values()).map((state) => this.getWatcherInfoFromState(state));
|
|
351
|
+
}
|
|
352
|
+
/**
|
|
353
|
+
* Stop all watchers (for shutdown)
|
|
354
|
+
*/
|
|
355
|
+
async stopAllWatchers() {
|
|
356
|
+
const projectIds = Array.from(this.watchers.keys());
|
|
357
|
+
await Promise.all(projectIds.map((id) => this.stopWatching(id)));
|
|
358
|
+
console.log(`[WatchManager] Stopped all ${projectIds.length} watchers`);
|
|
359
|
+
}
|
|
360
|
+
/**
|
|
361
|
+
* Convert internal state to public info
|
|
362
|
+
*/
|
|
363
|
+
getWatcherInfoFromState(state) {
|
|
364
|
+
return {
|
|
365
|
+
projectId: state.projectId,
|
|
366
|
+
projectPath: state.projectPath,
|
|
367
|
+
status: state.status,
|
|
368
|
+
lastUpdateTime: state.lastUpdateTime?.toISOString() ?? null,
|
|
369
|
+
pendingChanges: state.pendingEvents.length,
|
|
370
|
+
debounceMs: state.config.debounceMs,
|
|
371
|
+
errorMessage: state.errorMessage,
|
|
372
|
+
};
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
// Singleton instance
|
|
376
|
+
export const watchManager = new WatchManager();
|
package/dist/mcp/services.js
CHANGED
|
@@ -31,12 +31,12 @@ const discoverSchemaFromGraph = async (neo4jService) => {
|
|
|
31
31
|
nodeTypes: nodeTypes.map((r) => ({
|
|
32
32
|
label: r.label,
|
|
33
33
|
count: typeof r.nodeCount === 'object' ? r.nodeCount.toNumber() : r.nodeCount,
|
|
34
|
-
properties: r.sampleProperties
|
|
34
|
+
properties: r.sampleProperties ?? [],
|
|
35
35
|
})),
|
|
36
36
|
relationshipTypes: relationshipTypes.map((r) => ({
|
|
37
37
|
type: r.relationshipType,
|
|
38
38
|
count: typeof r.relCount === 'object' ? r.relCount.toNumber() : r.relCount,
|
|
39
|
-
connections: r.connections
|
|
39
|
+
connections: r.connections ?? [],
|
|
40
40
|
})),
|
|
41
41
|
semanticTypes: semanticTypes.map((r) => ({
|
|
42
42
|
type: r.semanticType,
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Check Parse Status Tool
|
|
3
|
+
* Returns the status of an async parsing job
|
|
4
|
+
*/
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
import { TOOL_NAMES, TOOL_METADATA } from '../constants.js';
|
|
7
|
+
import { jobManager } from '../services/job-manager.js';
|
|
8
|
+
import { createErrorResponse, createSuccessResponse } from '../utils.js';
|
|
9
|
+
const formatProgress = (job) => {
|
|
10
|
+
const { progress } = job;
|
|
11
|
+
const progressPct = progress.filesTotal > 0 ? Math.round((progress.filesProcessed / progress.filesTotal) * 100) : 0;
|
|
12
|
+
const unit = progress.phase === 'resolving' ? 'edges' : 'files';
|
|
13
|
+
const lines = [
|
|
14
|
+
`Status: ${job.status}`,
|
|
15
|
+
`Phase: ${progress.phase}`,
|
|
16
|
+
`Progress: ${progressPct}% (${progress.filesProcessed}/${progress.filesTotal} ${unit})`,
|
|
17
|
+
];
|
|
18
|
+
if (progress.totalChunks > 0) {
|
|
19
|
+
lines.push(`Chunk: ${progress.currentChunk}/${progress.totalChunks}`);
|
|
20
|
+
}
|
|
21
|
+
lines.push(`Nodes: ${progress.nodesImported}`);
|
|
22
|
+
lines.push(`Edges: ${progress.edgesImported}`);
|
|
23
|
+
return lines.join('\n');
|
|
24
|
+
};
|
|
25
|
+
const formatCompleted = (job) => {
|
|
26
|
+
if (!job.result) {
|
|
27
|
+
return 'Parsing completed (no result data)';
|
|
28
|
+
}
|
|
29
|
+
return [
|
|
30
|
+
`Parsing completed!`,
|
|
31
|
+
``,
|
|
32
|
+
`Nodes: ${job.result.nodesImported}`,
|
|
33
|
+
`Edges: ${job.result.edgesImported}`,
|
|
34
|
+
`Time: ${(job.result.elapsedMs / 1000).toFixed(2)}s`,
|
|
35
|
+
`Project ID: ${job.projectId}`,
|
|
36
|
+
].join('\n');
|
|
37
|
+
};
|
|
38
|
+
const formatFailed = (job) => {
|
|
39
|
+
return `Parsing failed: ${job.error ?? 'Unknown error'}`;
|
|
40
|
+
};
|
|
41
|
+
export const createCheckParseStatusTool = (server) => {
|
|
42
|
+
server.registerTool(TOOL_NAMES.checkParseStatus, {
|
|
43
|
+
title: TOOL_METADATA[TOOL_NAMES.checkParseStatus].title,
|
|
44
|
+
description: TOOL_METADATA[TOOL_NAMES.checkParseStatus].description,
|
|
45
|
+
inputSchema: {
|
|
46
|
+
jobId: z.string().describe('Job ID returned from async parse_typescript_project'),
|
|
47
|
+
},
|
|
48
|
+
}, async ({ jobId }) => {
|
|
49
|
+
const job = jobManager.getJob(jobId);
|
|
50
|
+
if (!job) {
|
|
51
|
+
return createErrorResponse(`Job not found: ${jobId}\n\nJobs are automatically cleaned up after 1 hour.`);
|
|
52
|
+
}
|
|
53
|
+
switch (job.status) {
|
|
54
|
+
case 'completed':
|
|
55
|
+
return createSuccessResponse(formatCompleted(job));
|
|
56
|
+
case 'failed':
|
|
57
|
+
return createErrorResponse(formatFailed(job));
|
|
58
|
+
case 'pending':
|
|
59
|
+
case 'running':
|
|
60
|
+
default:
|
|
61
|
+
return createSuccessResponse(formatProgress(job));
|
|
62
|
+
}
|
|
63
|
+
});
|
|
64
|
+
};
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
import { z } from 'zod';
|
|
7
7
|
import { Neo4jService, QUERIES } from '../../storage/neo4j/neo4j.service.js';
|
|
8
8
|
import { TOOL_NAMES, TOOL_METADATA } from '../constants.js';
|
|
9
|
-
import { createErrorResponse, createSuccessResponse, debugLog } from '../utils.js';
|
|
9
|
+
import { createErrorResponse, createSuccessResponse, debugLog, resolveProjectIdOrError } from '../utils.js';
|
|
10
10
|
// Default relationship weights for core AST relationships
|
|
11
11
|
const DEFAULT_RELATIONSHIP_WEIGHTS = {
|
|
12
12
|
// Critical - inheritance/interface contracts
|
|
@@ -35,6 +35,7 @@ export const createImpactAnalysisTool = (server) => {
|
|
|
35
35
|
title: TOOL_METADATA[TOOL_NAMES.impactAnalysis].title,
|
|
36
36
|
description: TOOL_METADATA[TOOL_NAMES.impactAnalysis].description,
|
|
37
37
|
inputSchema: {
|
|
38
|
+
projectId: z.string().describe('Project ID, name, or path (e.g., "backend" or "proj_a1b2c3d4e5f6")'),
|
|
38
39
|
nodeId: z
|
|
39
40
|
.string()
|
|
40
41
|
.optional()
|
|
@@ -53,13 +54,24 @@ export const createImpactAnalysisTool = (server) => {
|
|
|
53
54
|
.default(4),
|
|
54
55
|
frameworkConfig: FrameworkConfigSchema.optional().describe('Framework-specific configuration for risk scoring. Includes relationshipWeights (e.g., {"INJECTS": 0.9}), highRiskTypes (e.g., ["Controller", "Service"]), and optional name.'),
|
|
55
56
|
},
|
|
56
|
-
}, async ({ nodeId, filePath, maxDepth = 4, frameworkConfig }) => {
|
|
57
|
+
}, async ({ projectId, nodeId, filePath, maxDepth = 4, frameworkConfig }) => {
|
|
58
|
+
const neo4jService = new Neo4jService();
|
|
57
59
|
try {
|
|
60
|
+
// Resolve project ID from name, path, or ID
|
|
61
|
+
const projectResult = await resolveProjectIdOrError(projectId, neo4jService);
|
|
62
|
+
if (!projectResult.success)
|
|
63
|
+
return projectResult.error;
|
|
64
|
+
const resolvedProjectId = projectResult.projectId;
|
|
58
65
|
if (!nodeId && !filePath) {
|
|
59
66
|
return createErrorResponse('Either nodeId or filePath must be provided');
|
|
60
67
|
}
|
|
61
|
-
await debugLog('Impact analysis started', {
|
|
62
|
-
|
|
68
|
+
await debugLog('Impact analysis started', {
|
|
69
|
+
projectId: resolvedProjectId,
|
|
70
|
+
nodeId,
|
|
71
|
+
filePath,
|
|
72
|
+
maxDepth,
|
|
73
|
+
frameworkConfig,
|
|
74
|
+
});
|
|
63
75
|
// Merge default weights with framework-specific weights
|
|
64
76
|
const weights = { ...DEFAULT_RELATIONSHIP_WEIGHTS, ...frameworkConfig?.relationshipWeights };
|
|
65
77
|
const highRiskTypes = new Set(frameworkConfig?.highRiskTypes ?? []);
|
|
@@ -67,9 +79,9 @@ export const createImpactAnalysisTool = (server) => {
|
|
|
67
79
|
let directDependents;
|
|
68
80
|
if (nodeId) {
|
|
69
81
|
// Get target node info
|
|
70
|
-
const targetResult = await neo4jService.run(QUERIES.GET_NODE_BY_ID, { nodeId });
|
|
82
|
+
const targetResult = await neo4jService.run(QUERIES.GET_NODE_BY_ID, { nodeId, projectId: resolvedProjectId });
|
|
71
83
|
if (targetResult.length === 0) {
|
|
72
|
-
return createErrorResponse(`Node with ID "${nodeId}" not found`);
|
|
84
|
+
return createErrorResponse(`Node with ID "${nodeId}" not found in project "${resolvedProjectId}"`);
|
|
73
85
|
}
|
|
74
86
|
const target = targetResult[0];
|
|
75
87
|
targetInfo = {
|
|
@@ -79,25 +91,76 @@ export const createImpactAnalysisTool = (server) => {
|
|
|
79
91
|
filePath: target.filePath ?? '',
|
|
80
92
|
};
|
|
81
93
|
// Get direct dependents using cross-file edge pattern
|
|
82
|
-
const directResult = await neo4jService.run(QUERIES.GET_NODE_IMPACT, {
|
|
94
|
+
const directResult = await neo4jService.run(QUERIES.GET_NODE_IMPACT, {
|
|
95
|
+
nodeId,
|
|
96
|
+
projectId: resolvedProjectId,
|
|
97
|
+
});
|
|
83
98
|
directDependents = normalizeDependents(directResult);
|
|
84
99
|
}
|
|
85
100
|
else {
|
|
86
|
-
// File-based analysis
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
101
|
+
// File-based analysis - find all Class/Function/Interface entities in the file
|
|
102
|
+
// and aggregate their impact analysis results
|
|
103
|
+
const entitiesQuery = `
|
|
104
|
+
MATCH (n)
|
|
105
|
+
WHERE n.projectId = $projectId
|
|
106
|
+
AND (n.filePath = $filePath OR n.filePath ENDS WITH '/' + $filePath)
|
|
107
|
+
AND (n:Class OR n:Function OR n:Interface)
|
|
108
|
+
RETURN n.id AS nodeId, n.name AS name, labels(n) AS labels,
|
|
109
|
+
n.semanticType AS semanticType, n.coreType AS coreType
|
|
110
|
+
`;
|
|
111
|
+
const entities = await neo4jService.run(entitiesQuery, {
|
|
112
|
+
filePath,
|
|
113
|
+
projectId: resolvedProjectId,
|
|
114
|
+
});
|
|
115
|
+
if (entities.length === 0) {
|
|
116
|
+
// No exportable entities found
|
|
117
|
+
targetInfo = {
|
|
118
|
+
id: filePath,
|
|
119
|
+
name: filePath.split('/').pop() ?? filePath,
|
|
120
|
+
type: 'SourceFile',
|
|
121
|
+
filePath: filePath,
|
|
122
|
+
};
|
|
123
|
+
directDependents = [];
|
|
124
|
+
}
|
|
125
|
+
else {
|
|
126
|
+
// Use first entity as the primary target for display
|
|
127
|
+
const primaryEntity = entities[0];
|
|
128
|
+
targetInfo = {
|
|
129
|
+
id: primaryEntity.nodeId,
|
|
130
|
+
name: primaryEntity.name ?? filePath.split('/').pop() ?? filePath,
|
|
131
|
+
type: primaryEntity.semanticType ?? primaryEntity.coreType ?? 'Class',
|
|
132
|
+
filePath: filePath,
|
|
133
|
+
};
|
|
134
|
+
// Aggregate impact from all entities in the file
|
|
135
|
+
const allDependentsMap = new Map();
|
|
136
|
+
for (const entity of entities) {
|
|
137
|
+
const entityResult = await neo4jService.run(QUERIES.GET_NODE_IMPACT, {
|
|
138
|
+
nodeId: entity.nodeId,
|
|
139
|
+
projectId: resolvedProjectId,
|
|
140
|
+
});
|
|
141
|
+
for (const dep of normalizeDependents(entityResult)) {
|
|
142
|
+
// Dedupe by nodeId, keeping highest weight
|
|
143
|
+
const existing = allDependentsMap.get(dep.nodeId);
|
|
144
|
+
if (!existing || dep.weight > existing.weight) {
|
|
145
|
+
allDependentsMap.set(dep.nodeId, dep);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
directDependents = Array.from(allDependentsMap.values());
|
|
150
|
+
// Update nodeId for transitive analysis if we have dependents
|
|
151
|
+
if (directDependents.length > 0 && entities.length > 0) {
|
|
152
|
+
// Use first entity's nodeId for transitive analysis
|
|
153
|
+
nodeId = primaryEntity.nodeId;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
96
156
|
}
|
|
97
157
|
// Get transitive dependents if nodeId provided
|
|
98
158
|
let transitiveDependents = [];
|
|
99
159
|
if (nodeId && maxDepth > 1) {
|
|
100
|
-
const transitiveResult = await neo4jService.run(QUERIES.GET_TRANSITIVE_DEPENDENTS(maxDepth), {
|
|
160
|
+
const transitiveResult = await neo4jService.run(QUERIES.GET_TRANSITIVE_DEPENDENTS(maxDepth), {
|
|
161
|
+
nodeId,
|
|
162
|
+
projectId: resolvedProjectId,
|
|
163
|
+
});
|
|
101
164
|
transitiveDependents = normalizeTransitiveDependents(transitiveResult);
|
|
102
165
|
// Filter out direct dependents from transitive
|
|
103
166
|
const directIds = new Set(directDependents.map((d) => d.nodeId));
|
|
@@ -147,6 +210,9 @@ export const createImpactAnalysisTool = (server) => {
|
|
|
147
210
|
await debugLog('Impact analysis error', { nodeId, filePath, error });
|
|
148
211
|
return createErrorResponse(error);
|
|
149
212
|
}
|
|
213
|
+
finally {
|
|
214
|
+
await neo4jService.close();
|
|
215
|
+
}
|
|
150
216
|
});
|
|
151
217
|
};
|
|
152
218
|
// Helper functions
|
package/dist/mcp/tools/index.js
CHANGED
|
@@ -2,11 +2,16 @@
|
|
|
2
2
|
* MCP Tool Factory
|
|
3
3
|
* Centralized tool creation and registration
|
|
4
4
|
*/
|
|
5
|
+
import { createCheckParseStatusTool } from './check-parse-status.tool.js';
|
|
5
6
|
import { createHelloTool } from './hello.tool.js';
|
|
6
7
|
import { createImpactAnalysisTool } from './impact-analysis.tool.js';
|
|
8
|
+
import { createListProjectsTool } from './list-projects.tool.js';
|
|
9
|
+
import { createListWatchersTool } from './list-watchers.tool.js';
|
|
7
10
|
import { createNaturalLanguageToCypherTool } from './natural-language-to-cypher.tool.js';
|
|
8
11
|
import { createParseTypescriptProjectTool } from './parse-typescript-project.tool.js';
|
|
9
12
|
import { createSearchCodebaseTool } from './search-codebase.tool.js';
|
|
13
|
+
import { createStartWatchProjectTool } from './start-watch-project.tool.js';
|
|
14
|
+
import { createStopWatchProjectTool } from './stop-watch-project.tool.js';
|
|
10
15
|
import { createTestNeo4jConnectionTool } from './test-neo4j-connection.tool.js';
|
|
11
16
|
import { createTraverseFromNodeTool } from './traverse-from-node.tool.js';
|
|
12
17
|
/**
|
|
@@ -21,6 +26,13 @@ export const registerAllTools = (server) => {
|
|
|
21
26
|
createTraverseFromNodeTool(server);
|
|
22
27
|
createNaturalLanguageToCypherTool(server);
|
|
23
28
|
createImpactAnalysisTool(server);
|
|
24
|
-
// Register project parsing
|
|
29
|
+
// Register project parsing tools
|
|
25
30
|
createParseTypescriptProjectTool(server);
|
|
31
|
+
createCheckParseStatusTool(server);
|
|
32
|
+
// Register project management tools
|
|
33
|
+
createListProjectsTool(server);
|
|
34
|
+
// Register file watch tools
|
|
35
|
+
createStartWatchProjectTool(server);
|
|
36
|
+
createStopWatchProjectTool(server);
|
|
37
|
+
createListWatchersTool(server);
|
|
26
38
|
};
|