code-graph-context 1.0.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +221 -101
- package/dist/core/config/fairsquare-framework-schema.js +47 -60
- package/dist/core/config/nestjs-framework-schema.js +11 -1
- package/dist/core/config/schema.js +1 -1
- package/dist/core/config/timeouts.js +27 -0
- package/dist/core/embeddings/embeddings.service.js +122 -2
- package/dist/core/embeddings/natural-language-to-cypher.service.js +428 -30
- package/dist/core/parsers/parser-factory.js +6 -6
- package/dist/core/parsers/typescript-parser.js +639 -44
- package/dist/core/parsers/workspace-parser.js +553 -0
- package/dist/core/utils/edge-factory.js +37 -0
- package/dist/core/utils/file-change-detection.js +105 -0
- package/dist/core/utils/file-utils.js +20 -0
- package/dist/core/utils/index.js +3 -0
- package/dist/core/utils/path-utils.js +75 -0
- package/dist/core/utils/progress-reporter.js +112 -0
- package/dist/core/utils/project-id.js +176 -0
- package/dist/core/utils/retry.js +41 -0
- package/dist/core/workspace/index.js +4 -0
- package/dist/core/workspace/workspace-detector.js +221 -0
- package/dist/mcp/constants.js +172 -7
- package/dist/mcp/handlers/cross-file-edge.helpers.js +19 -0
- package/dist/mcp/handlers/file-change-detection.js +105 -0
- package/dist/mcp/handlers/graph-generator.handler.js +97 -32
- package/dist/mcp/handlers/incremental-parse.handler.js +146 -0
- package/dist/mcp/handlers/streaming-import.handler.js +210 -0
- package/dist/mcp/handlers/traversal.handler.js +130 -71
- package/dist/mcp/mcp.server.js +46 -7
- package/dist/mcp/service-init.js +79 -0
- package/dist/mcp/services/job-manager.js +165 -0
- package/dist/mcp/services/watch-manager.js +376 -0
- package/dist/mcp/services.js +48 -127
- package/dist/mcp/tools/check-parse-status.tool.js +64 -0
- package/dist/mcp/tools/impact-analysis.tool.js +319 -0
- package/dist/mcp/tools/index.js +15 -1
- package/dist/mcp/tools/list-projects.tool.js +62 -0
- package/dist/mcp/tools/list-watchers.tool.js +51 -0
- package/dist/mcp/tools/natural-language-to-cypher.tool.js +34 -8
- package/dist/mcp/tools/parse-typescript-project.tool.js +325 -60
- package/dist/mcp/tools/search-codebase.tool.js +57 -23
- package/dist/mcp/tools/start-watch-project.tool.js +100 -0
- package/dist/mcp/tools/stop-watch-project.tool.js +49 -0
- package/dist/mcp/tools/traverse-from-node.tool.js +68 -9
- package/dist/mcp/utils.js +35 -12
- package/dist/mcp/workers/parse-worker.js +198 -0
- package/dist/storage/neo4j/neo4j.service.js +273 -34
- package/package.json +4 -2
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Watch Manager Service
|
|
3
|
+
* Manages file watchers for incremental graph updates across projects
|
|
4
|
+
* Uses @parcel/watcher for high-performance file watching
|
|
5
|
+
*/
|
|
6
|
+
import * as watcher from '@parcel/watcher';
|
|
7
|
+
import { debugLog } from '../utils.js';
|
|
8
|
+
const DEFAULT_EXCLUDE_PATTERNS = [
|
|
9
|
+
'**/node_modules/**',
|
|
10
|
+
'**/dist/**',
|
|
11
|
+
'**/build/**',
|
|
12
|
+
'**/.git/**',
|
|
13
|
+
'**/*.d.ts',
|
|
14
|
+
'**/*.js.map',
|
|
15
|
+
'**/*.js',
|
|
16
|
+
];
|
|
17
|
+
const DEFAULT_DEBOUNCE_MS = 1000;
|
|
18
|
+
const MAX_WATCHERS = 10;
|
|
19
|
+
const MAX_PENDING_EVENTS = 1000;
|
|
20
|
+
class WatchManager {
|
|
21
|
+
watchers = new Map();
|
|
22
|
+
mcpServer = null;
|
|
23
|
+
incrementalParseHandler = null;
|
|
24
|
+
/**
|
|
25
|
+
* Set the MCP server instance for sending notifications
|
|
26
|
+
*/
|
|
27
|
+
setMcpServer(server) {
|
|
28
|
+
this.mcpServer = server;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Set the incremental parse handler function
|
|
32
|
+
*/
|
|
33
|
+
setIncrementalParseHandler(handler) {
|
|
34
|
+
this.incrementalParseHandler = handler;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Send a notification via MCP logging (if supported)
|
|
38
|
+
*/
|
|
39
|
+
sendNotification(notification) {
|
|
40
|
+
if (!this.mcpServer) {
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
// sendLoggingMessage returns a Promise - use .catch() to handle rejection
|
|
44
|
+
this.mcpServer
|
|
45
|
+
.sendLoggingMessage({
|
|
46
|
+
level: notification.type.includes('failed') ? 'error' : 'info',
|
|
47
|
+
logger: 'file-watcher',
|
|
48
|
+
data: notification,
|
|
49
|
+
})
|
|
50
|
+
.catch(() => {
|
|
51
|
+
// MCP logging not supported - silently ignore
|
|
52
|
+
// This is expected if the client doesn't support logging capability
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Start watching a project for file changes
|
|
57
|
+
*/
|
|
58
|
+
async startWatching(config) {
|
|
59
|
+
// Check if already watching this project
|
|
60
|
+
if (this.watchers.has(config.projectId)) {
|
|
61
|
+
const existing = this.watchers.get(config.projectId);
|
|
62
|
+
return this.getWatcherInfoFromState(existing);
|
|
63
|
+
}
|
|
64
|
+
// Enforce maximum watcher limit
|
|
65
|
+
if (this.watchers.size >= MAX_WATCHERS) {
|
|
66
|
+
throw new Error(`Maximum watcher limit (${MAX_WATCHERS}) reached. ` + `Stop an existing watcher before starting a new one.`);
|
|
67
|
+
}
|
|
68
|
+
const fullConfig = {
|
|
69
|
+
projectPath: config.projectPath,
|
|
70
|
+
projectId: config.projectId,
|
|
71
|
+
tsconfigPath: config.tsconfigPath,
|
|
72
|
+
debounceMs: config.debounceMs ?? DEFAULT_DEBOUNCE_MS,
|
|
73
|
+
excludePatterns: config.excludePatterns ?? DEFAULT_EXCLUDE_PATTERNS,
|
|
74
|
+
};
|
|
75
|
+
await debugLog('Creating @parcel/watcher subscription', {
|
|
76
|
+
watchPath: fullConfig.projectPath,
|
|
77
|
+
ignored: fullConfig.excludePatterns,
|
|
78
|
+
});
|
|
79
|
+
// Create state object first (subscription will be added after)
|
|
80
|
+
const state = {
|
|
81
|
+
projectId: fullConfig.projectId,
|
|
82
|
+
projectPath: fullConfig.projectPath,
|
|
83
|
+
tsconfigPath: fullConfig.tsconfigPath,
|
|
84
|
+
subscription: null, // Will be set after successful subscription
|
|
85
|
+
config: fullConfig,
|
|
86
|
+
pendingEvents: [],
|
|
87
|
+
debounceTimer: null,
|
|
88
|
+
isProcessing: false,
|
|
89
|
+
isStopping: false,
|
|
90
|
+
lastUpdateTime: null,
|
|
91
|
+
status: 'active',
|
|
92
|
+
};
|
|
93
|
+
try {
|
|
94
|
+
// Subscribe to file changes using @parcel/watcher
|
|
95
|
+
const subscription = await watcher.subscribe(fullConfig.projectPath, (err, events) => {
|
|
96
|
+
if (err) {
|
|
97
|
+
this.handleWatcherError(state, err);
|
|
98
|
+
return;
|
|
99
|
+
}
|
|
100
|
+
for (const event of events) {
|
|
101
|
+
try {
|
|
102
|
+
// Filter for TypeScript files
|
|
103
|
+
if (!event.path.endsWith('.ts') && !event.path.endsWith('.tsx')) {
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
// Map parcel event types to our event types
|
|
107
|
+
let eventType;
|
|
108
|
+
if (event.type === 'create') {
|
|
109
|
+
eventType = 'add';
|
|
110
|
+
}
|
|
111
|
+
else if (event.type === 'delete') {
|
|
112
|
+
eventType = 'unlink';
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
eventType = 'change';
|
|
116
|
+
}
|
|
117
|
+
this.handleFileEvent(state, eventType, event.path);
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
debugLog('Error processing file event', { error: String(error), path: event.path });
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}, {
|
|
124
|
+
ignore: fullConfig.excludePatterns,
|
|
125
|
+
});
|
|
126
|
+
state.subscription = subscription;
|
|
127
|
+
this.watchers.set(fullConfig.projectId, state);
|
|
128
|
+
await debugLog('Watcher started', { projectId: fullConfig.projectId, projectPath: fullConfig.projectPath });
|
|
129
|
+
// Check for changes that occurred while watcher was off (run in background)
|
|
130
|
+
this.syncMissedChanges(state);
|
|
131
|
+
return this.getWatcherInfoFromState(state);
|
|
132
|
+
}
|
|
133
|
+
catch (error) {
|
|
134
|
+
await debugLog('Failed to start watcher', { error: String(error) });
|
|
135
|
+
throw error;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Handle a file system event
|
|
140
|
+
*/
|
|
141
|
+
handleFileEvent(state, type, filePath) {
|
|
142
|
+
debugLog('File event received', { type, filePath, projectId: state.projectId, status: state.status });
|
|
143
|
+
// Ignore events if watcher is stopping or not active
|
|
144
|
+
if (state.isStopping || state.status !== 'active') {
|
|
145
|
+
debugLog('Ignoring event - watcher not active or stopping', {
|
|
146
|
+
status: state.status,
|
|
147
|
+
isStopping: state.isStopping,
|
|
148
|
+
});
|
|
149
|
+
return;
|
|
150
|
+
}
|
|
151
|
+
const event = {
|
|
152
|
+
type,
|
|
153
|
+
filePath,
|
|
154
|
+
timestamp: Date.now(),
|
|
155
|
+
};
|
|
156
|
+
// Prevent unbounded event accumulation - drop oldest events if buffer is full
|
|
157
|
+
if (state.pendingEvents.length >= MAX_PENDING_EVENTS) {
|
|
158
|
+
debugLog('Event buffer full, dropping oldest events', { projectId: state.projectId });
|
|
159
|
+
state.pendingEvents = state.pendingEvents.slice(-Math.floor(MAX_PENDING_EVENTS / 2));
|
|
160
|
+
}
|
|
161
|
+
state.pendingEvents.push(event);
|
|
162
|
+
debugLog('Event added to pending', { pendingCount: state.pendingEvents.length });
|
|
163
|
+
// Clear existing debounce timer
|
|
164
|
+
if (state.debounceTimer) {
|
|
165
|
+
clearTimeout(state.debounceTimer);
|
|
166
|
+
}
|
|
167
|
+
// Send immediate notification about file change detected
|
|
168
|
+
this.sendNotification({
|
|
169
|
+
type: 'file_change_detected',
|
|
170
|
+
projectId: state.projectId,
|
|
171
|
+
projectPath: state.projectPath,
|
|
172
|
+
data: {
|
|
173
|
+
filesChanged: state.pendingEvents.filter((e) => e.type === 'change').map((e) => e.filePath),
|
|
174
|
+
filesAdded: state.pendingEvents.filter((e) => e.type === 'add').map((e) => e.filePath),
|
|
175
|
+
filesDeleted: state.pendingEvents.filter((e) => e.type === 'unlink').map((e) => e.filePath),
|
|
176
|
+
},
|
|
177
|
+
timestamp: new Date().toISOString(),
|
|
178
|
+
});
|
|
179
|
+
// Set new debounce timer
|
|
180
|
+
state.debounceTimer = setTimeout(() => {
|
|
181
|
+
this.processEvents(state).catch((error) => {
|
|
182
|
+
console.error('[WatchManager] Error in processEvents:', error);
|
|
183
|
+
});
|
|
184
|
+
}, state.config.debounceMs);
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Process accumulated file events after debounce period
|
|
188
|
+
*/
|
|
189
|
+
async processEvents(state) {
|
|
190
|
+
// Don't process if already processing, no events, or watcher is stopping
|
|
191
|
+
if (state.isProcessing || state.pendingEvents.length === 0 || state.isStopping)
|
|
192
|
+
return;
|
|
193
|
+
state.isProcessing = true;
|
|
194
|
+
const events = [...state.pendingEvents];
|
|
195
|
+
state.pendingEvents = [];
|
|
196
|
+
state.debounceTimer = null;
|
|
197
|
+
const startTime = Date.now();
|
|
198
|
+
this.sendNotification({
|
|
199
|
+
type: 'incremental_parse_started',
|
|
200
|
+
projectId: state.projectId,
|
|
201
|
+
projectPath: state.projectPath,
|
|
202
|
+
data: {
|
|
203
|
+
filesChanged: events.filter((e) => e.type === 'change').map((e) => e.filePath),
|
|
204
|
+
filesAdded: events.filter((e) => e.type === 'add').map((e) => e.filePath),
|
|
205
|
+
filesDeleted: events.filter((e) => e.type === 'unlink').map((e) => e.filePath),
|
|
206
|
+
},
|
|
207
|
+
timestamp: new Date().toISOString(),
|
|
208
|
+
});
|
|
209
|
+
try {
|
|
210
|
+
if (!this.incrementalParseHandler) {
|
|
211
|
+
throw new Error('Incremental parse handler not configured');
|
|
212
|
+
}
|
|
213
|
+
const result = await this.incrementalParseHandler(state.projectPath, state.projectId, state.tsconfigPath);
|
|
214
|
+
state.lastUpdateTime = new Date();
|
|
215
|
+
const elapsedMs = Date.now() - startTime;
|
|
216
|
+
this.sendNotification({
|
|
217
|
+
type: 'incremental_parse_completed',
|
|
218
|
+
projectId: state.projectId,
|
|
219
|
+
projectPath: state.projectPath,
|
|
220
|
+
data: {
|
|
221
|
+
filesChanged: events.filter((e) => e.type === 'change').map((e) => e.filePath),
|
|
222
|
+
filesAdded: events.filter((e) => e.type === 'add').map((e) => e.filePath),
|
|
223
|
+
filesDeleted: events.filter((e) => e.type === 'unlink').map((e) => e.filePath),
|
|
224
|
+
nodesUpdated: result.nodesUpdated,
|
|
225
|
+
edgesUpdated: result.edgesUpdated,
|
|
226
|
+
elapsedMs,
|
|
227
|
+
},
|
|
228
|
+
timestamp: new Date().toISOString(),
|
|
229
|
+
});
|
|
230
|
+
console.log(`[WatchManager] Incremental parse completed for ${state.projectId}: ` +
|
|
231
|
+
`${result.nodesUpdated} nodes, ${result.edgesUpdated} edges in ${elapsedMs}ms`);
|
|
232
|
+
}
|
|
233
|
+
catch (error) {
|
|
234
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
235
|
+
this.sendNotification({
|
|
236
|
+
type: 'incremental_parse_failed',
|
|
237
|
+
projectId: state.projectId,
|
|
238
|
+
projectPath: state.projectPath,
|
|
239
|
+
data: {
|
|
240
|
+
error: errorMessage,
|
|
241
|
+
elapsedMs: Date.now() - startTime,
|
|
242
|
+
},
|
|
243
|
+
timestamp: new Date().toISOString(),
|
|
244
|
+
});
|
|
245
|
+
console.error(`[WatchManager] Incremental parse failed for ${state.projectId}:`, error);
|
|
246
|
+
}
|
|
247
|
+
finally {
|
|
248
|
+
state.isProcessing = false;
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
/**
|
|
252
|
+
* Handle watcher error
|
|
253
|
+
*/
|
|
254
|
+
handleWatcherError(state, error) {
|
|
255
|
+
state.status = 'error';
|
|
256
|
+
state.errorMessage = error instanceof Error ? error.message : String(error);
|
|
257
|
+
debugLog('Watcher error', { projectId: state.projectId, error: state.errorMessage });
|
|
258
|
+
// Clean up the failed watcher to prevent it from staying in error state indefinitely
|
|
259
|
+
this.stopWatching(state.projectId).catch((cleanupError) => {
|
|
260
|
+
console.error(`[WatchManager] Failed to cleanup errored watcher ${state.projectId}:`, cleanupError);
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
/**
|
|
264
|
+
* Sync any changes that occurred while the watcher was off
|
|
265
|
+
* Runs in the background without blocking watcher startup
|
|
266
|
+
* Promise is tracked on state to allow cleanup during stop
|
|
267
|
+
*/
|
|
268
|
+
syncMissedChanges(state) {
|
|
269
|
+
if (!this.incrementalParseHandler)
|
|
270
|
+
return;
|
|
271
|
+
// Track the promise on state so stopWatching can wait for it
|
|
272
|
+
state.syncPromise = this.incrementalParseHandler(state.projectPath, state.projectId, state.tsconfigPath)
|
|
273
|
+
.then((result) => {
|
|
274
|
+
if (result.nodesUpdated > 0 || result.edgesUpdated > 0) {
|
|
275
|
+
console.log(`[WatchManager] Synced missed changes for ${state.projectId}: ` +
|
|
276
|
+
`${result.nodesUpdated} nodes, ${result.edgesUpdated} edges`);
|
|
277
|
+
}
|
|
278
|
+
})
|
|
279
|
+
.catch((error) => {
|
|
280
|
+
// Only log if watcher hasn't been stopped
|
|
281
|
+
if (!state.isStopping) {
|
|
282
|
+
console.error(`[WatchManager] Failed to sync missed changes for ${state.projectId}:`, error);
|
|
283
|
+
}
|
|
284
|
+
})
|
|
285
|
+
.finally(() => {
|
|
286
|
+
state.syncPromise = undefined;
|
|
287
|
+
});
|
|
288
|
+
}
|
|
289
|
+
/**
|
|
290
|
+
* Stop watching a project
|
|
291
|
+
* Waits for any in-progress processing to complete before cleanup
|
|
292
|
+
*/
|
|
293
|
+
async stopWatching(projectId) {
|
|
294
|
+
const state = this.watchers.get(projectId);
|
|
295
|
+
if (!state) {
|
|
296
|
+
return false;
|
|
297
|
+
}
|
|
298
|
+
// Mark as stopping to prevent new event processing
|
|
299
|
+
state.isStopping = true;
|
|
300
|
+
state.status = 'paused';
|
|
301
|
+
// Clear debounce timer
|
|
302
|
+
if (state.debounceTimer) {
|
|
303
|
+
clearTimeout(state.debounceTimer);
|
|
304
|
+
state.debounceTimer = null;
|
|
305
|
+
}
|
|
306
|
+
// Wait for any in-progress processing to complete (with timeout)
|
|
307
|
+
const maxWaitMs = 30000; // 30 second timeout
|
|
308
|
+
const startTime = Date.now();
|
|
309
|
+
while (state.isProcessing && Date.now() - startTime < maxWaitMs) {
|
|
310
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
311
|
+
}
|
|
312
|
+
// Wait for sync promise if it exists (with timeout)
|
|
313
|
+
if (state.syncPromise) {
|
|
314
|
+
try {
|
|
315
|
+
await Promise.race([
|
|
316
|
+
state.syncPromise,
|
|
317
|
+
new Promise((_, reject) => setTimeout(() => reject(new Error('Sync timeout')), 5000)),
|
|
318
|
+
]);
|
|
319
|
+
}
|
|
320
|
+
catch {
|
|
321
|
+
// Timeout or error - continue with cleanup
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
// Unsubscribe from @parcel/watcher (only if subscription exists)
|
|
325
|
+
if (state.subscription) {
|
|
326
|
+
try {
|
|
327
|
+
await state.subscription.unsubscribe();
|
|
328
|
+
}
|
|
329
|
+
catch (error) {
|
|
330
|
+
console.warn(`[WatchManager] Error unsubscribing watcher for ${projectId}:`, error);
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
this.watchers.delete(projectId);
|
|
334
|
+
console.log(`[WatchManager] Stopped watching project: ${projectId}`);
|
|
335
|
+
return true;
|
|
336
|
+
}
|
|
337
|
+
/**
|
|
338
|
+
* Get watcher info for a project
|
|
339
|
+
*/
|
|
340
|
+
getWatcherInfo(projectId) {
|
|
341
|
+
const state = this.watchers.get(projectId);
|
|
342
|
+
if (!state)
|
|
343
|
+
return undefined;
|
|
344
|
+
return this.getWatcherInfoFromState(state);
|
|
345
|
+
}
|
|
346
|
+
/**
|
|
347
|
+
* List all active watchers
|
|
348
|
+
*/
|
|
349
|
+
listWatchers() {
|
|
350
|
+
return Array.from(this.watchers.values()).map((state) => this.getWatcherInfoFromState(state));
|
|
351
|
+
}
|
|
352
|
+
/**
|
|
353
|
+
* Stop all watchers (for shutdown)
|
|
354
|
+
*/
|
|
355
|
+
async stopAllWatchers() {
|
|
356
|
+
const projectIds = Array.from(this.watchers.keys());
|
|
357
|
+
await Promise.all(projectIds.map((id) => this.stopWatching(id)));
|
|
358
|
+
console.log(`[WatchManager] Stopped all ${projectIds.length} watchers`);
|
|
359
|
+
}
|
|
360
|
+
/**
|
|
361
|
+
* Convert internal state to public info
|
|
362
|
+
*/
|
|
363
|
+
getWatcherInfoFromState(state) {
|
|
364
|
+
return {
|
|
365
|
+
projectId: state.projectId,
|
|
366
|
+
projectPath: state.projectPath,
|
|
367
|
+
status: state.status,
|
|
368
|
+
lastUpdateTime: state.lastUpdateTime?.toISOString() ?? null,
|
|
369
|
+
pendingChanges: state.pendingEvents.length,
|
|
370
|
+
debounceMs: state.config.debounceMs,
|
|
371
|
+
errorMessage: state.errorMessage,
|
|
372
|
+
};
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
// Singleton instance
|
|
376
|
+
export const watchManager = new WatchManager();
|
package/dist/mcp/services.js
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
*/
|
|
5
5
|
import fs from 'fs/promises';
|
|
6
6
|
import { join } from 'path';
|
|
7
|
-
import { Neo4jService } from '../storage/neo4j/neo4j.service.js';
|
|
7
|
+
import { Neo4jService, QUERIES } from '../storage/neo4j/neo4j.service.js';
|
|
8
8
|
import { FILE_PATHS, LOG_CONFIG } from './constants.js';
|
|
9
9
|
import { initializeNaturalLanguageService } from './tools/natural-language-to-cypher.tool.js';
|
|
10
10
|
import { debugLog } from './utils.js';
|
|
@@ -15,141 +15,62 @@ export const initializeServices = async () => {
|
|
|
15
15
|
await Promise.all([initializeNeo4jSchema(), initializeNaturalLanguageService()]);
|
|
16
16
|
};
|
|
17
17
|
/**
|
|
18
|
-
*
|
|
18
|
+
* Dynamically discover schema from the actual graph contents.
|
|
19
|
+
* This is framework-agnostic - it discovers what's actually in the graph.
|
|
19
20
|
*/
|
|
20
|
-
const
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
},
|
|
57
|
-
VendorClient: {
|
|
58
|
-
description: 'External service integration clients',
|
|
59
|
-
purpose: 'Interface with third-party APIs and services',
|
|
60
|
-
commonProperties: ['name', 'filePath'],
|
|
61
|
-
exampleQuery: 'MATCH (v:VendorClient)<-[:INJECTS]-(s) RETURN v.name, collect(s.name) as usedBy',
|
|
62
|
-
},
|
|
63
|
-
RouteDefinition: {
|
|
64
|
-
description: 'Explicit route definitions from route files. CRITICAL: Individual route details (method, path, authenticated, handler, controllerName) are stored in the "context" property as a JSON string.',
|
|
65
|
-
purpose: 'Map HTTP paths and methods to controller handlers',
|
|
66
|
-
commonProperties: ['name', 'context', 'filePath', 'sourceCode'],
|
|
67
|
-
contextStructure: 'The context property contains JSON with structure: {"routes": [{"method": "POST", "path": "/v1/endpoint", "controllerName": "SomeController", "handler": "methodName", "authenticated": true}]}',
|
|
68
|
-
parsingInstructions: 'To get individual routes: (1) Parse JSON with apoc.convert.fromJsonMap(rd.context) (2) UNWIND the routes array (3) Access route.method, route.path, route.handler, route.authenticated, route.controllerName',
|
|
69
|
-
exampleQuery: 'MATCH (rd:RouteDefinition) WITH rd, apoc.convert.fromJsonMap(rd.context) AS ctx UNWIND ctx.routes AS route RETURN route.method, route.path, route.controllerName, route.handler, route.authenticated ORDER BY route.path',
|
|
70
|
-
},
|
|
71
|
-
HttpEndpoint: {
|
|
72
|
-
description: 'Methods that handle HTTP requests',
|
|
73
|
-
purpose: 'Process incoming HTTP requests and return responses',
|
|
74
|
-
commonProperties: ['name', 'filePath', 'sourceCode'],
|
|
75
|
-
exampleQuery: 'MATCH (e:HttpEndpoint)<-[r:ROUTES_TO_HANDLER]-(rd) WHERE apoc.convert.fromJsonMap(r.context).authenticated = true RETURN e.name, apoc.convert.fromJsonMap(r.context).path as path',
|
|
76
|
-
},
|
|
77
|
-
},
|
|
78
|
-
relationships: {
|
|
79
|
-
INJECTS: {
|
|
80
|
-
description: 'Dependency injection relationship from @Injectable decorator',
|
|
81
|
-
direction: 'OUTGOING',
|
|
82
|
-
example: 'Controller -[:INJECTS]-> Service',
|
|
83
|
-
commonPatterns: ['Controller -> Service', 'Service -> Repository', 'Service -> VendorClient'],
|
|
84
|
-
},
|
|
85
|
-
USES_DAL: {
|
|
86
|
-
description: 'Repository uses Data Access Layer for database operations',
|
|
87
|
-
direction: 'OUTGOING',
|
|
88
|
-
example: 'Repository -[:USES_DAL]-> DAL',
|
|
89
|
-
commonPatterns: ['Repository -> DAL'],
|
|
90
|
-
},
|
|
91
|
-
ROUTES_TO: {
|
|
92
|
-
description: 'Route definition points to a Controller',
|
|
93
|
-
direction: 'OUTGOING',
|
|
94
|
-
example: 'RouteDefinition -[:ROUTES_TO]-> Controller',
|
|
95
|
-
commonPatterns: ['RouteDefinition -> Controller'],
|
|
96
|
-
},
|
|
97
|
-
ROUTES_TO_HANDLER: {
|
|
98
|
-
description: 'Route definition points to a specific handler method',
|
|
99
|
-
direction: 'OUTGOING',
|
|
100
|
-
example: 'RouteDefinition -[:ROUTES_TO_HANDLER]-> HttpEndpoint',
|
|
101
|
-
contextProperties: ['path', 'method', 'authenticated', 'handler', 'controllerName'],
|
|
102
|
-
contextNote: 'IMPORTANT: context is stored as a JSON string. Access properties using apoc.convert.fromJsonMap(r.context).propertyName',
|
|
103
|
-
commonPatterns: ['RouteDefinition -> HttpEndpoint (Method)'],
|
|
104
|
-
},
|
|
105
|
-
PROTECTED_BY: {
|
|
106
|
-
description: 'Controller is protected by a PermissionManager',
|
|
107
|
-
direction: 'OUTGOING',
|
|
108
|
-
example: 'Controller -[:PROTECTED_BY]-> PermissionManager',
|
|
109
|
-
commonPatterns: ['Controller -> PermissionManager'],
|
|
110
|
-
},
|
|
111
|
-
},
|
|
112
|
-
commonQueryPatterns: [
|
|
113
|
-
{
|
|
114
|
-
intent: 'Find all HTTP endpoints',
|
|
115
|
-
query: 'MATCH (e:HttpEndpoint) RETURN e.name, e.filePath',
|
|
116
|
-
},
|
|
117
|
-
{
|
|
118
|
-
intent: 'Find service dependency chain',
|
|
119
|
-
query: 'MATCH path = (c:Controller)-[:INJECTS*1..3]->(s) RETURN [n in nodes(path) | n.name] as chain',
|
|
120
|
-
},
|
|
121
|
-
{
|
|
122
|
-
intent: 'Find all authenticated routes',
|
|
123
|
-
query: 'MATCH (rd:RouteDefinition)-[r:ROUTES_TO_HANDLER]->(m) WHERE apoc.convert.fromJsonMap(r.context).authenticated = true RETURN apoc.convert.fromJsonMap(r.context).path as path, apoc.convert.fromJsonMap(r.context).method as method, m.name',
|
|
124
|
-
},
|
|
125
|
-
{
|
|
126
|
-
intent: 'Find controllers without permission managers',
|
|
127
|
-
query: 'MATCH (c:Controller) WHERE NOT (c)-[:PROTECTED_BY]->(:PermissionManager) RETURN c.name',
|
|
128
|
-
},
|
|
129
|
-
{
|
|
130
|
-
intent: 'Find what services a controller uses',
|
|
131
|
-
query: 'MATCH (c:Controller {name: $controllerName})-[:INJECTS]->(s:Service) RETURN s.name',
|
|
132
|
-
},
|
|
133
|
-
{
|
|
134
|
-
intent: 'Find complete execution path from controller to database',
|
|
135
|
-
query: 'MATCH path = (c:Controller)-[:INJECTS*1..3]->(r:Repository)-[:USES_DAL]->(d:DAL) WHERE c.name = $controllerName RETURN [n in nodes(path) | n.name] as executionPath',
|
|
136
|
-
},
|
|
137
|
-
],
|
|
138
|
-
},
|
|
139
|
-
};
|
|
21
|
+
const discoverSchemaFromGraph = async (neo4jService) => {
|
|
22
|
+
try {
|
|
23
|
+
// Discover actual node types, relationships, and patterns from the graph
|
|
24
|
+
const [nodeTypes, relationshipTypes, semanticTypes, commonPatterns] = await Promise.all([
|
|
25
|
+
neo4jService.run(QUERIES.DISCOVER_NODE_TYPES),
|
|
26
|
+
neo4jService.run(QUERIES.DISCOVER_RELATIONSHIP_TYPES),
|
|
27
|
+
neo4jService.run(QUERIES.DISCOVER_SEMANTIC_TYPES),
|
|
28
|
+
neo4jService.run(QUERIES.DISCOVER_COMMON_PATTERNS),
|
|
29
|
+
]);
|
|
30
|
+
return {
|
|
31
|
+
nodeTypes: nodeTypes.map((r) => ({
|
|
32
|
+
label: r.label,
|
|
33
|
+
count: typeof r.nodeCount === 'object' ? r.nodeCount.toNumber() : r.nodeCount,
|
|
34
|
+
properties: r.sampleProperties ?? [],
|
|
35
|
+
})),
|
|
36
|
+
relationshipTypes: relationshipTypes.map((r) => ({
|
|
37
|
+
type: r.relationshipType,
|
|
38
|
+
count: typeof r.relCount === 'object' ? r.relCount.toNumber() : r.relCount,
|
|
39
|
+
connections: r.connections ?? [],
|
|
40
|
+
})),
|
|
41
|
+
semanticTypes: semanticTypes.map((r) => ({
|
|
42
|
+
type: r.semanticType,
|
|
43
|
+
count: typeof r.count === 'object' ? r.count.toNumber() : r.count,
|
|
44
|
+
})),
|
|
45
|
+
commonPatterns: commonPatterns.map((r) => ({
|
|
46
|
+
from: r.fromType,
|
|
47
|
+
relationship: r.relType,
|
|
48
|
+
to: r.toType,
|
|
49
|
+
count: typeof r.count === 'object' ? r.count.toNumber() : r.count,
|
|
50
|
+
})),
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
catch (error) {
|
|
54
|
+
await debugLog('Failed to discover schema from graph', error);
|
|
55
|
+
return null;
|
|
56
|
+
}
|
|
140
57
|
};
|
|
141
58
|
/**
|
|
142
|
-
* Initialize Neo4j schema by fetching and
|
|
59
|
+
* Initialize Neo4j schema by fetching from APOC and discovering actual graph structure
|
|
143
60
|
*/
|
|
144
61
|
const initializeNeo4jSchema = async () => {
|
|
145
62
|
try {
|
|
146
63
|
const neo4jService = new Neo4jService();
|
|
147
64
|
const rawSchema = await neo4jService.getSchema();
|
|
148
|
-
//
|
|
149
|
-
const
|
|
65
|
+
// Dynamically discover what's actually in the graph
|
|
66
|
+
const discoveredSchema = await discoverSchemaFromGraph(neo4jService);
|
|
67
|
+
const schema = {
|
|
68
|
+
rawSchema,
|
|
69
|
+
discoveredSchema,
|
|
70
|
+
};
|
|
150
71
|
const schemaPath = join(process.cwd(), FILE_PATHS.schemaOutput);
|
|
151
|
-
await fs.writeFile(schemaPath, JSON.stringify(
|
|
152
|
-
await debugLog('Neo4j schema cached successfully
|
|
72
|
+
await fs.writeFile(schemaPath, JSON.stringify(schema, null, LOG_CONFIG.jsonIndentation));
|
|
73
|
+
await debugLog('Neo4j schema cached successfully', { schemaPath });
|
|
153
74
|
}
|
|
154
75
|
catch (error) {
|
|
155
76
|
await debugLog('Failed to initialize Neo4j schema', error);
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Check Parse Status Tool
|
|
3
|
+
* Returns the status of an async parsing job
|
|
4
|
+
*/
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
import { TOOL_NAMES, TOOL_METADATA } from '../constants.js';
|
|
7
|
+
import { jobManager } from '../services/job-manager.js';
|
|
8
|
+
import { createErrorResponse, createSuccessResponse } from '../utils.js';
|
|
9
|
+
const formatProgress = (job) => {
|
|
10
|
+
const { progress } = job;
|
|
11
|
+
const progressPct = progress.filesTotal > 0 ? Math.round((progress.filesProcessed / progress.filesTotal) * 100) : 0;
|
|
12
|
+
const unit = progress.phase === 'resolving' ? 'edges' : 'files';
|
|
13
|
+
const lines = [
|
|
14
|
+
`Status: ${job.status}`,
|
|
15
|
+
`Phase: ${progress.phase}`,
|
|
16
|
+
`Progress: ${progressPct}% (${progress.filesProcessed}/${progress.filesTotal} ${unit})`,
|
|
17
|
+
];
|
|
18
|
+
if (progress.totalChunks > 0) {
|
|
19
|
+
lines.push(`Chunk: ${progress.currentChunk}/${progress.totalChunks}`);
|
|
20
|
+
}
|
|
21
|
+
lines.push(`Nodes: ${progress.nodesImported}`);
|
|
22
|
+
lines.push(`Edges: ${progress.edgesImported}`);
|
|
23
|
+
return lines.join('\n');
|
|
24
|
+
};
|
|
25
|
+
const formatCompleted = (job) => {
|
|
26
|
+
if (!job.result) {
|
|
27
|
+
return 'Parsing completed (no result data)';
|
|
28
|
+
}
|
|
29
|
+
return [
|
|
30
|
+
`Parsing completed!`,
|
|
31
|
+
``,
|
|
32
|
+
`Nodes: ${job.result.nodesImported}`,
|
|
33
|
+
`Edges: ${job.result.edgesImported}`,
|
|
34
|
+
`Time: ${(job.result.elapsedMs / 1000).toFixed(2)}s`,
|
|
35
|
+
`Project ID: ${job.projectId}`,
|
|
36
|
+
].join('\n');
|
|
37
|
+
};
|
|
38
|
+
const formatFailed = (job) => {
|
|
39
|
+
return `Parsing failed: ${job.error ?? 'Unknown error'}`;
|
|
40
|
+
};
|
|
41
|
+
export const createCheckParseStatusTool = (server) => {
|
|
42
|
+
server.registerTool(TOOL_NAMES.checkParseStatus, {
|
|
43
|
+
title: TOOL_METADATA[TOOL_NAMES.checkParseStatus].title,
|
|
44
|
+
description: TOOL_METADATA[TOOL_NAMES.checkParseStatus].description,
|
|
45
|
+
inputSchema: {
|
|
46
|
+
jobId: z.string().describe('Job ID returned from async parse_typescript_project'),
|
|
47
|
+
},
|
|
48
|
+
}, async ({ jobId }) => {
|
|
49
|
+
const job = jobManager.getJob(jobId);
|
|
50
|
+
if (!job) {
|
|
51
|
+
return createErrorResponse(`Job not found: ${jobId}\n\nJobs are automatically cleaned up after 1 hour.`);
|
|
52
|
+
}
|
|
53
|
+
switch (job.status) {
|
|
54
|
+
case 'completed':
|
|
55
|
+
return createSuccessResponse(formatCompleted(job));
|
|
56
|
+
case 'failed':
|
|
57
|
+
return createErrorResponse(formatFailed(job));
|
|
58
|
+
case 'pending':
|
|
59
|
+
case 'running':
|
|
60
|
+
default:
|
|
61
|
+
return createSuccessResponse(formatProgress(job));
|
|
62
|
+
}
|
|
63
|
+
});
|
|
64
|
+
};
|