code-graph-context 1.1.0 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +221 -101
- package/dist/core/config/fairsquare-framework-schema.js +47 -60
- package/dist/core/config/nestjs-framework-schema.js +71 -44
- package/dist/core/config/schema.js +1 -1
- package/dist/core/config/timeouts.js +27 -0
- package/dist/core/embeddings/embeddings.service.js +122 -2
- package/dist/core/embeddings/natural-language-to-cypher.service.js +416 -17
- package/dist/core/parsers/parser-factory.js +5 -3
- package/dist/core/parsers/typescript-parser.js +618 -50
- package/dist/core/parsers/workspace-parser.js +554 -0
- package/dist/core/utils/edge-factory.js +37 -0
- package/dist/core/utils/file-change-detection.js +105 -0
- package/dist/core/utils/file-utils.js +20 -0
- package/dist/core/utils/index.js +3 -0
- package/dist/core/utils/path-utils.js +75 -0
- package/dist/core/utils/progress-reporter.js +112 -0
- package/dist/core/utils/project-id.js +176 -0
- package/dist/core/utils/retry.js +41 -0
- package/dist/core/workspace/index.js +4 -0
- package/dist/core/workspace/workspace-detector.js +221 -0
- package/dist/mcp/constants.js +153 -5
- package/dist/mcp/handlers/cross-file-edge.helpers.js +19 -0
- package/dist/mcp/handlers/file-change-detection.js +105 -0
- package/dist/mcp/handlers/graph-generator.handler.js +97 -32
- package/dist/mcp/handlers/incremental-parse.handler.js +146 -0
- package/dist/mcp/handlers/streaming-import.handler.js +210 -0
- package/dist/mcp/handlers/traversal.handler.js +130 -71
- package/dist/mcp/mcp.server.js +45 -6
- package/dist/mcp/service-init.js +79 -0
- package/dist/mcp/services/job-manager.js +165 -0
- package/dist/mcp/services/watch-manager.js +376 -0
- package/dist/mcp/services.js +2 -2
- package/dist/mcp/tools/check-parse-status.tool.js +64 -0
- package/dist/mcp/tools/impact-analysis.tool.js +84 -18
- package/dist/mcp/tools/index.js +13 -1
- package/dist/mcp/tools/list-projects.tool.js +62 -0
- package/dist/mcp/tools/list-watchers.tool.js +51 -0
- package/dist/mcp/tools/natural-language-to-cypher.tool.js +34 -8
- package/dist/mcp/tools/parse-typescript-project.tool.js +318 -58
- package/dist/mcp/tools/search-codebase.tool.js +56 -16
- package/dist/mcp/tools/start-watch-project.tool.js +100 -0
- package/dist/mcp/tools/stop-watch-project.tool.js +49 -0
- package/dist/mcp/tools/traverse-from-node.tool.js +68 -9
- package/dist/mcp/utils.js +35 -13
- package/dist/mcp/workers/parse-worker.js +198 -0
- package/dist/storage/neo4j/neo4j.service.js +147 -48
- package/package.json +4 -2
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Start Watch Project Tool
|
|
3
|
+
* Starts file watching for incremental graph updates
|
|
4
|
+
*/
|
|
5
|
+
import { constants as fsConstants } from 'fs';
|
|
6
|
+
import { access, stat } from 'fs/promises';
|
|
7
|
+
import { resolve } from 'path';
|
|
8
|
+
import { z } from 'zod';
|
|
9
|
+
import { resolveProjectId } from '../../core/utils/project-id.js';
|
|
10
|
+
import { Neo4jService } from '../../storage/neo4j/neo4j.service.js';
|
|
11
|
+
import { TOOL_NAMES, TOOL_METADATA } from '../constants.js';
|
|
12
|
+
import { watchManager } from '../services/watch-manager.js';
|
|
13
|
+
import { createErrorResponse, createSuccessResponse, debugLog } from '../utils.js';
|
|
14
|
+
const inputSchema = z.object({
|
|
15
|
+
projectPath: z.string().describe('Path to the TypeScript project root directory'),
|
|
16
|
+
tsconfigPath: z.string().describe('Path to TypeScript project tsconfig.json file'),
|
|
17
|
+
projectId: z.string().optional().describe('Optional project ID override (auto-generated from path if omitted)'),
|
|
18
|
+
debounceMs: z.number().optional().default(1000).describe('Debounce delay in milliseconds (default: 1000)'),
|
|
19
|
+
});
|
|
20
|
+
export const createStartWatchProjectTool = (server) => {
|
|
21
|
+
server.registerTool(TOOL_NAMES.startWatchProject, {
|
|
22
|
+
title: TOOL_METADATA[TOOL_NAMES.startWatchProject].title,
|
|
23
|
+
description: TOOL_METADATA[TOOL_NAMES.startWatchProject].description,
|
|
24
|
+
inputSchema: inputSchema.shape,
|
|
25
|
+
}, async (args) => {
|
|
26
|
+
try {
|
|
27
|
+
const { projectPath, tsconfigPath, debounceMs } = args;
|
|
28
|
+
await debugLog('Starting file watcher', { projectPath, tsconfigPath, debounceMs });
|
|
29
|
+
// Validate project path exists and is a directory
|
|
30
|
+
const resolvedProjectPath = resolve(projectPath);
|
|
31
|
+
try {
|
|
32
|
+
await access(resolvedProjectPath, fsConstants.R_OK);
|
|
33
|
+
const projectStats = await stat(resolvedProjectPath);
|
|
34
|
+
if (!projectStats.isDirectory()) {
|
|
35
|
+
return createErrorResponse(new Error(`Project path exists but is not a directory: ${resolvedProjectPath}`));
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
catch (error) {
|
|
39
|
+
if (error.code === 'ENOENT') {
|
|
40
|
+
return createErrorResponse(new Error(`Project path does not exist: ${resolvedProjectPath}`));
|
|
41
|
+
}
|
|
42
|
+
throw error;
|
|
43
|
+
}
|
|
44
|
+
// Validate tsconfig exists and is a file
|
|
45
|
+
const resolvedTsconfigPath = resolve(tsconfigPath);
|
|
46
|
+
try {
|
|
47
|
+
await access(resolvedTsconfigPath, fsConstants.R_OK);
|
|
48
|
+
const tsconfigStats = await stat(resolvedTsconfigPath);
|
|
49
|
+
if (!tsconfigStats.isFile()) {
|
|
50
|
+
return createErrorResponse(new Error(`tsconfig path exists but is not a file: ${resolvedTsconfigPath}`));
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
catch (error) {
|
|
54
|
+
if (error.code === 'ENOENT') {
|
|
55
|
+
return createErrorResponse(new Error(`tsconfig.json not found at: ${resolvedTsconfigPath}`));
|
|
56
|
+
}
|
|
57
|
+
throw error;
|
|
58
|
+
}
|
|
59
|
+
// Resolve project ID
|
|
60
|
+
const projectId = args.projectId ?? (await resolveProjectId(resolvedProjectPath));
|
|
61
|
+
// Check if project has been indexed
|
|
62
|
+
const neo4jService = new Neo4jService();
|
|
63
|
+
try {
|
|
64
|
+
const result = await neo4jService.run('MATCH (p:Project {projectId: $projectId}) RETURN p.projectId AS projectId', { projectId });
|
|
65
|
+
if (result.length === 0) {
|
|
66
|
+
return createErrorResponse(new Error(`Project has not been indexed yet. Run parse_typescript_project first to create the initial graph, then start the watcher for incremental updates.`));
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
finally {
|
|
70
|
+
await neo4jService.close();
|
|
71
|
+
}
|
|
72
|
+
// Start watching
|
|
73
|
+
const watcherInfo = await watchManager.startWatching({
|
|
74
|
+
projectPath: resolvedProjectPath,
|
|
75
|
+
projectId,
|
|
76
|
+
tsconfigPath: resolvedTsconfigPath,
|
|
77
|
+
debounceMs,
|
|
78
|
+
});
|
|
79
|
+
await debugLog('File watcher started', { projectId, status: watcherInfo.status });
|
|
80
|
+
const output = [
|
|
81
|
+
`File watcher started successfully!`,
|
|
82
|
+
``,
|
|
83
|
+
`Project: ${watcherInfo.projectPath}`,
|
|
84
|
+
`Project ID: ${watcherInfo.projectId}`,
|
|
85
|
+
`Status: ${watcherInfo.status}`,
|
|
86
|
+
`Debounce: ${watcherInfo.debounceMs}ms`,
|
|
87
|
+
``,
|
|
88
|
+
`The graph will be automatically updated when TypeScript files change.`,
|
|
89
|
+
`Use stop_watch_project to stop watching.`,
|
|
90
|
+
`Use list_watchers to see all active watchers.`,
|
|
91
|
+
].join('\n');
|
|
92
|
+
return createSuccessResponse(output);
|
|
93
|
+
}
|
|
94
|
+
catch (error) {
|
|
95
|
+
console.error('Start watch project error:', error);
|
|
96
|
+
await debugLog('Start watch project error', { error });
|
|
97
|
+
return createErrorResponse(error);
|
|
98
|
+
}
|
|
99
|
+
});
|
|
100
|
+
};
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Stop Watch Project Tool
|
|
3
|
+
* Stops file watching for a project
|
|
4
|
+
*/
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
import { TOOL_NAMES, TOOL_METADATA } from '../constants.js';
|
|
7
|
+
import { watchManager } from '../services/watch-manager.js';
|
|
8
|
+
import { createErrorResponse, createSuccessResponse, debugLog } from '../utils.js';
|
|
9
|
+
const inputSchema = z.object({
|
|
10
|
+
projectId: z.string().describe('Project ID to stop watching'),
|
|
11
|
+
});
|
|
12
|
+
export const createStopWatchProjectTool = (server) => {
|
|
13
|
+
server.registerTool(TOOL_NAMES.stopWatchProject, {
|
|
14
|
+
title: TOOL_METADATA[TOOL_NAMES.stopWatchProject].title,
|
|
15
|
+
description: TOOL_METADATA[TOOL_NAMES.stopWatchProject].description,
|
|
16
|
+
inputSchema: inputSchema.shape,
|
|
17
|
+
}, async (args) => {
|
|
18
|
+
try {
|
|
19
|
+
const { projectId } = args;
|
|
20
|
+
await debugLog('Stopping file watcher', { projectId });
|
|
21
|
+
// Get watcher info before stopping (for response)
|
|
22
|
+
const watcherInfo = watchManager.getWatcherInfo(projectId);
|
|
23
|
+
if (!watcherInfo) {
|
|
24
|
+
return createErrorResponse(new Error(`No active watcher found for project: ${projectId}. Use list_watchers to see active watchers.`));
|
|
25
|
+
}
|
|
26
|
+
// Stop watching
|
|
27
|
+
const stopped = await watchManager.stopWatching(projectId);
|
|
28
|
+
if (!stopped) {
|
|
29
|
+
return createErrorResponse(new Error(`Failed to stop watcher for project: ${projectId}`));
|
|
30
|
+
}
|
|
31
|
+
await debugLog('File watcher stopped', { projectId });
|
|
32
|
+
const output = [
|
|
33
|
+
`File watcher stopped successfully!`,
|
|
34
|
+
``,
|
|
35
|
+
`Project: ${watcherInfo.projectPath}`,
|
|
36
|
+
`Project ID: ${watcherInfo.projectId}`,
|
|
37
|
+
``,
|
|
38
|
+
`The graph will no longer be automatically updated for this project.`,
|
|
39
|
+
`Use start_watch_project to start watching again.`,
|
|
40
|
+
].join('\n');
|
|
41
|
+
return createSuccessResponse(output);
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
console.error('Stop watch project error:', error);
|
|
45
|
+
await debugLog('Stop watch project error', { error });
|
|
46
|
+
return createErrorResponse(error);
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
};
|
|
@@ -7,13 +7,21 @@ import { MAX_TRAVERSAL_DEPTH } from '../../constants.js';
|
|
|
7
7
|
import { Neo4jService } from '../../storage/neo4j/neo4j.service.js';
|
|
8
8
|
import { TOOL_NAMES, TOOL_METADATA, DEFAULTS } from '../constants.js';
|
|
9
9
|
import { TraversalHandler } from '../handlers/traversal.handler.js';
|
|
10
|
-
import { createErrorResponse, sanitizeNumericInput, debugLog } from '../utils.js';
|
|
10
|
+
import { createErrorResponse, sanitizeNumericInput, debugLog, resolveProjectIdOrError } from '../utils.js';
|
|
11
11
|
export const createTraverseFromNodeTool = (server) => {
|
|
12
12
|
server.registerTool(TOOL_NAMES.traverseFromNode, {
|
|
13
13
|
title: TOOL_METADATA[TOOL_NAMES.traverseFromNode].title,
|
|
14
14
|
description: TOOL_METADATA[TOOL_NAMES.traverseFromNode].description,
|
|
15
15
|
inputSchema: {
|
|
16
|
-
|
|
16
|
+
projectId: z.string().describe('Project ID, name, or path (e.g., "backend" or "proj_a1b2c3d4e5f6")'),
|
|
17
|
+
nodeId: z
|
|
18
|
+
.string()
|
|
19
|
+
.optional()
|
|
20
|
+
.describe('The node ID to start traversal from (required if filePath not provided)'),
|
|
21
|
+
filePath: z
|
|
22
|
+
.string()
|
|
23
|
+
.optional()
|
|
24
|
+
.describe('File path to start traversal from (alternative to nodeId - finds the SourceFile node)'),
|
|
17
25
|
maxDepth: z
|
|
18
26
|
.number()
|
|
19
27
|
.int()
|
|
@@ -26,6 +34,12 @@ export const createTraverseFromNodeTool = (server) => {
|
|
|
26
34
|
.optional()
|
|
27
35
|
.describe(`Number of results to skip for pagination (default: ${DEFAULTS.skipOffset})`)
|
|
28
36
|
.default(DEFAULTS.skipOffset),
|
|
37
|
+
limit: z
|
|
38
|
+
.number()
|
|
39
|
+
.int()
|
|
40
|
+
.optional()
|
|
41
|
+
.describe('Maximum results per page (default: 50). Use with skip for pagination.')
|
|
42
|
+
.default(50),
|
|
29
43
|
direction: z
|
|
30
44
|
.enum(['OUTGOING', 'INCOMING', 'BOTH'])
|
|
31
45
|
.optional()
|
|
@@ -57,27 +71,68 @@ export const createTraverseFromNodeTool = (server) => {
|
|
|
57
71
|
.optional()
|
|
58
72
|
.describe(`Code snippet character length when includeCode is true (default: ${DEFAULTS.codeSnippetLength})`)
|
|
59
73
|
.default(DEFAULTS.codeSnippetLength),
|
|
74
|
+
maxTotalNodes: z
|
|
75
|
+
.number()
|
|
76
|
+
.int()
|
|
77
|
+
.optional()
|
|
78
|
+
.describe('Maximum total unique nodes to return across all depths (default: 50). Limits output size.')
|
|
79
|
+
.default(50),
|
|
60
80
|
},
|
|
61
|
-
}, async ({ nodeId, maxDepth = DEFAULTS.traversalDepth, skip = DEFAULTS.skipOffset, direction = 'BOTH', relationshipTypes, includeCode = true, maxNodesPerChain = 5, summaryOnly = false, snippetLength = DEFAULTS.codeSnippetLength, }) => {
|
|
81
|
+
}, async ({ projectId, nodeId, filePath, maxDepth = DEFAULTS.traversalDepth, skip = DEFAULTS.skipOffset, limit = 50, direction = 'BOTH', relationshipTypes, includeCode = true, maxNodesPerChain = 5, summaryOnly = false, snippetLength = DEFAULTS.codeSnippetLength, maxTotalNodes = 50, }) => {
|
|
82
|
+
// Validate that either nodeId or filePath is provided
|
|
83
|
+
if (!nodeId && !filePath) {
|
|
84
|
+
return createErrorResponse('Either nodeId or filePath must be provided.');
|
|
85
|
+
}
|
|
86
|
+
const neo4jService = new Neo4jService();
|
|
62
87
|
try {
|
|
88
|
+
// Resolve project ID from name, path, or ID
|
|
89
|
+
const projectResult = await resolveProjectIdOrError(projectId, neo4jService);
|
|
90
|
+
if (!projectResult.success)
|
|
91
|
+
return projectResult.error;
|
|
92
|
+
const resolvedProjectId = projectResult.projectId;
|
|
93
|
+
const traversalHandler = new TraversalHandler(neo4jService);
|
|
94
|
+
// If filePath is provided, resolve it to a nodeId
|
|
95
|
+
let resolvedNodeId = nodeId;
|
|
96
|
+
if (!resolvedNodeId && filePath) {
|
|
97
|
+
const fileNodeId = await traversalHandler.resolveNodeIdFromFilePath(filePath, resolvedProjectId);
|
|
98
|
+
if (!fileNodeId) {
|
|
99
|
+
// Try to provide helpful suggestions
|
|
100
|
+
const fileName = filePath.split('/').pop() ?? filePath;
|
|
101
|
+
return createErrorResponse(`No SourceFile node found for "${filePath}" in project "${resolvedProjectId}".\n\n` +
|
|
102
|
+
`Suggestions:\n` +
|
|
103
|
+
`- Use the full absolute path (e.g., /Users/.../src/file.ts)\n` +
|
|
104
|
+
`- Use just the filename (e.g., "${fileName}")\n` +
|
|
105
|
+
`- Use search_codebase to find the correct node ID first\n` +
|
|
106
|
+
`- Run list_projects to verify the project exists`);
|
|
107
|
+
}
|
|
108
|
+
resolvedNodeId = fileNodeId;
|
|
109
|
+
}
|
|
63
110
|
const sanitizedMaxDepth = sanitizeNumericInput(maxDepth, DEFAULTS.traversalDepth, MAX_TRAVERSAL_DEPTH);
|
|
64
111
|
const sanitizedSkip = sanitizeNumericInput(skip, DEFAULTS.skipOffset);
|
|
65
112
|
await debugLog('Node traversal started', {
|
|
66
|
-
|
|
113
|
+
projectId: resolvedProjectId,
|
|
114
|
+
nodeId: resolvedNodeId,
|
|
115
|
+
filePath,
|
|
67
116
|
maxDepth: sanitizedMaxDepth,
|
|
68
117
|
skip: sanitizedSkip,
|
|
118
|
+
limit,
|
|
69
119
|
direction,
|
|
70
120
|
relationshipTypes,
|
|
71
121
|
includeCode,
|
|
72
122
|
maxNodesPerChain,
|
|
73
123
|
summaryOnly,
|
|
74
124
|
snippetLength,
|
|
125
|
+
maxTotalNodes,
|
|
75
126
|
});
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
127
|
+
// Safety check - resolvedNodeId should be set at this point
|
|
128
|
+
if (!resolvedNodeId) {
|
|
129
|
+
return createErrorResponse('Could not resolve node ID from provided parameters.');
|
|
130
|
+
}
|
|
131
|
+
return await traversalHandler.traverseFromNode(resolvedNodeId, [], {
|
|
132
|
+
projectId: resolvedProjectId,
|
|
79
133
|
maxDepth: sanitizedMaxDepth,
|
|
80
134
|
skip: sanitizedSkip,
|
|
135
|
+
limit,
|
|
81
136
|
direction,
|
|
82
137
|
relationshipTypes,
|
|
83
138
|
includeStartNodeDetails: true,
|
|
@@ -85,13 +140,17 @@ export const createTraverseFromNodeTool = (server) => {
|
|
|
85
140
|
maxNodesPerChain,
|
|
86
141
|
summaryOnly,
|
|
87
142
|
snippetLength,
|
|
88
|
-
|
|
143
|
+
maxTotalNodes,
|
|
144
|
+
title: filePath ? `File Traversal from: ${filePath}` : `Node Traversal from: ${resolvedNodeId}`,
|
|
89
145
|
});
|
|
90
146
|
}
|
|
91
147
|
catch (error) {
|
|
92
148
|
console.error('Node traversal error:', error);
|
|
93
|
-
await debugLog('Node traversal error', { nodeId, error });
|
|
149
|
+
await debugLog('Node traversal error', { nodeId, filePath, error });
|
|
94
150
|
return createErrorResponse(error);
|
|
95
151
|
}
|
|
152
|
+
finally {
|
|
153
|
+
await neo4jService.close();
|
|
154
|
+
}
|
|
96
155
|
});
|
|
97
156
|
};
|
package/dist/mcp/utils.js
CHANGED
|
@@ -2,8 +2,23 @@
|
|
|
2
2
|
* MCP Server Utility Functions
|
|
3
3
|
* Common utility functions used across the MCP server
|
|
4
4
|
*/
|
|
5
|
+
import { resolveProjectIdFromInput } from '../core/utils/project-id.js';
|
|
5
6
|
import { MESSAGES } from './constants.js';
|
|
6
|
-
export { debugLog } from '../utils/file-utils.js';
|
|
7
|
+
export { debugLog } from '../core/utils/file-utils.js';
|
|
8
|
+
/**
|
|
9
|
+
* Resolve project ID with standardized error handling
|
|
10
|
+
* Returns either the resolved projectId or an error response ready for tool return
|
|
11
|
+
*/
|
|
12
|
+
export const resolveProjectIdOrError = async (projectId, neo4jService) => {
|
|
13
|
+
try {
|
|
14
|
+
const resolved = await resolveProjectIdFromInput(projectId, neo4jService);
|
|
15
|
+
return { success: true, projectId: resolved };
|
|
16
|
+
}
|
|
17
|
+
catch (error) {
|
|
18
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
19
|
+
return { success: false, error: createErrorResponse(message) };
|
|
20
|
+
}
|
|
21
|
+
};
|
|
7
22
|
/**
|
|
8
23
|
* Standard error response format for MCP tools
|
|
9
24
|
*/
|
|
@@ -31,6 +46,20 @@ export const createSuccessResponse = (text) => {
|
|
|
31
46
|
],
|
|
32
47
|
};
|
|
33
48
|
};
|
|
49
|
+
/**
|
|
50
|
+
* Truncate code to specified max length, showing first and last portions
|
|
51
|
+
*/
|
|
52
|
+
export const truncateCode = (code, maxLength) => {
|
|
53
|
+
if (code.length <= maxLength) {
|
|
54
|
+
return { text: code };
|
|
55
|
+
}
|
|
56
|
+
const half = Math.floor(maxLength / 2);
|
|
57
|
+
return {
|
|
58
|
+
text: code.substring(0, half) + '\n\n... [truncated] ...\n\n' + code.substring(code.length - half),
|
|
59
|
+
hasMore: true,
|
|
60
|
+
truncated: code.length - maxLength,
|
|
61
|
+
};
|
|
62
|
+
};
|
|
34
63
|
/**
|
|
35
64
|
* Format node information as structured data
|
|
36
65
|
*/
|
|
@@ -47,18 +76,11 @@ export const formatNodeInfo = (value, key) => {
|
|
|
47
76
|
}
|
|
48
77
|
// Include source code if available and not a SourceFile
|
|
49
78
|
if (value.properties.sourceCode && value.properties.coreType !== 'SourceFile') {
|
|
50
|
-
const
|
|
51
|
-
|
|
52
|
-
if (
|
|
53
|
-
result.
|
|
54
|
-
|
|
55
|
-
else {
|
|
56
|
-
// Show first 500 and last 500 characters
|
|
57
|
-
const half = Math.floor(maxLength / 2);
|
|
58
|
-
result.sourceCode =
|
|
59
|
-
code.substring(0, half) + '\n\n... [truncated] ...\n\n' + code.substring(code.length - half);
|
|
60
|
-
result.hasMore = true;
|
|
61
|
-
result.truncated = code.length - maxLength;
|
|
79
|
+
const truncateResult = truncateCode(value.properties.sourceCode, 1000);
|
|
80
|
+
result.sourceCode = truncateResult.text;
|
|
81
|
+
if (truncateResult.hasMore) {
|
|
82
|
+
result.hasMore = truncateResult.hasMore;
|
|
83
|
+
result.truncated = truncateResult.truncated;
|
|
62
84
|
}
|
|
63
85
|
}
|
|
64
86
|
return result;
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parse Worker
|
|
3
|
+
* Runs TypeScript parsing in a separate thread to avoid blocking the MCP server
|
|
4
|
+
*/
|
|
5
|
+
import { dirname, join } from 'path';
|
|
6
|
+
import { fileURLToPath } from 'url';
|
|
7
|
+
import { parentPort, workerData } from 'worker_threads';
|
|
8
|
+
// Load environment variables in worker thread
|
|
9
|
+
import dotenv from 'dotenv';
|
|
10
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
11
|
+
const __dirname = dirname(__filename);
|
|
12
|
+
dotenv.config({ path: join(__dirname, '..', '..', '..', '.env') });
|
|
13
|
+
import { EmbeddingsService } from '../../core/embeddings/embeddings.service.js';
|
|
14
|
+
import { ParserFactory } from '../../core/parsers/parser-factory.js';
|
|
15
|
+
import { WorkspaceParser } from '../../core/parsers/workspace-parser.js';
|
|
16
|
+
import { getProjectName, UPSERT_PROJECT_QUERY, UPDATE_PROJECT_STATUS_QUERY } from '../../core/utils/project-id.js';
|
|
17
|
+
import { WorkspaceDetector } from '../../core/workspace/index.js';
|
|
18
|
+
import { Neo4jService, QUERIES } from '../../storage/neo4j/neo4j.service.js';
|
|
19
|
+
import { debugLog } from '../../core/utils/file-utils.js';
|
|
20
|
+
import { GraphGeneratorHandler } from '../handlers/graph-generator.handler.js';
|
|
21
|
+
import { StreamingImportHandler } from '../handlers/streaming-import.handler.js';
|
|
22
|
+
const sendMessage = (msg) => {
|
|
23
|
+
parentPort?.postMessage(msg);
|
|
24
|
+
};
|
|
25
|
+
const runParser = async () => {
|
|
26
|
+
const config = workerData;
|
|
27
|
+
const startTime = Date.now();
|
|
28
|
+
// Declare outside try block so it's available in catch/finally
|
|
29
|
+
let resolvedProjectId = config.projectId;
|
|
30
|
+
let neo4jService = null;
|
|
31
|
+
try {
|
|
32
|
+
sendMessage({
|
|
33
|
+
type: 'progress',
|
|
34
|
+
data: {
|
|
35
|
+
phase: 'discovery',
|
|
36
|
+
filesProcessed: 0,
|
|
37
|
+
filesTotal: 0,
|
|
38
|
+
nodesImported: 0,
|
|
39
|
+
edgesImported: 0,
|
|
40
|
+
currentChunk: 0,
|
|
41
|
+
totalChunks: 0,
|
|
42
|
+
},
|
|
43
|
+
});
|
|
44
|
+
neo4jService = new Neo4jService();
|
|
45
|
+
const embeddingsService = new EmbeddingsService();
|
|
46
|
+
const graphGeneratorHandler = new GraphGeneratorHandler(neo4jService, embeddingsService);
|
|
47
|
+
// Use lazy loading to avoid OOM on large projects
|
|
48
|
+
const lazyLoad = true;
|
|
49
|
+
// Auto-detect workspace (Turborepo, pnpm, yarn, npm workspaces)
|
|
50
|
+
const workspaceDetector = new WorkspaceDetector();
|
|
51
|
+
await debugLog('Detecting workspace', { projectPath: config.projectPath });
|
|
52
|
+
const workspaceConfig = await workspaceDetector.detect(config.projectPath);
|
|
53
|
+
await debugLog('Workspace detection result', {
|
|
54
|
+
type: workspaceConfig.type,
|
|
55
|
+
rootPath: workspaceConfig.rootPath,
|
|
56
|
+
packageCount: workspaceConfig.packages.length,
|
|
57
|
+
packages: workspaceConfig.packages.map((p) => p.name),
|
|
58
|
+
});
|
|
59
|
+
// Use WorkspaceParser for monorepos, TypeScriptParser for single projects
|
|
60
|
+
let parser;
|
|
61
|
+
if (workspaceConfig.type !== 'single' && workspaceConfig.packages.length > 1) {
|
|
62
|
+
await debugLog('Using WorkspaceParser', {
|
|
63
|
+
type: workspaceConfig.type,
|
|
64
|
+
packageCount: workspaceConfig.packages.length,
|
|
65
|
+
});
|
|
66
|
+
// for workspaces default to auto for now
|
|
67
|
+
// TODO: allow worker config to specify projectType array to support multi-framework monorepos
|
|
68
|
+
parser = new WorkspaceParser(workspaceConfig, config.projectId, lazyLoad, 'auto');
|
|
69
|
+
resolvedProjectId = parser.getProjectId();
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
await debugLog('Using single project mode', {
|
|
73
|
+
type: workspaceConfig.type,
|
|
74
|
+
packageCount: workspaceConfig.packages.length,
|
|
75
|
+
});
|
|
76
|
+
parser =
|
|
77
|
+
config.projectType === 'auto'
|
|
78
|
+
? await ParserFactory.createParserWithAutoDetection(config.projectPath, config.tsconfigPath, config.projectId, lazyLoad)
|
|
79
|
+
: ParserFactory.createParser({
|
|
80
|
+
workspacePath: config.projectPath,
|
|
81
|
+
tsConfigPath: config.tsconfigPath,
|
|
82
|
+
projectType: config.projectType,
|
|
83
|
+
projectId: config.projectId,
|
|
84
|
+
lazyLoad,
|
|
85
|
+
});
|
|
86
|
+
resolvedProjectId = parser.getProjectId();
|
|
87
|
+
}
|
|
88
|
+
// Use async file discovery (works in lazy mode)
|
|
89
|
+
const sourceFiles = await parser.discoverSourceFiles();
|
|
90
|
+
const totalFiles = sourceFiles.length;
|
|
91
|
+
sendMessage({
|
|
92
|
+
type: 'progress',
|
|
93
|
+
data: {
|
|
94
|
+
phase: 'parsing',
|
|
95
|
+
filesProcessed: 0,
|
|
96
|
+
filesTotal: totalFiles,
|
|
97
|
+
nodesImported: 0,
|
|
98
|
+
edgesImported: 0,
|
|
99
|
+
currentChunk: 0,
|
|
100
|
+
totalChunks: Math.ceil(totalFiles / config.chunkSize),
|
|
101
|
+
},
|
|
102
|
+
});
|
|
103
|
+
// Clear existing project data first
|
|
104
|
+
graphGeneratorHandler.setProjectId(resolvedProjectId);
|
|
105
|
+
await neo4jService.run(QUERIES.CLEAR_PROJECT, { projectId: resolvedProjectId });
|
|
106
|
+
// Create/update Project node with 'parsing' status
|
|
107
|
+
const projectName = await getProjectName(config.projectPath);
|
|
108
|
+
await neo4jService.run(UPSERT_PROJECT_QUERY, {
|
|
109
|
+
projectId: resolvedProjectId,
|
|
110
|
+
name: projectName,
|
|
111
|
+
path: config.projectPath,
|
|
112
|
+
status: 'parsing',
|
|
113
|
+
});
|
|
114
|
+
await debugLog('Project node created', { projectId: resolvedProjectId, name: projectName });
|
|
115
|
+
const streamingHandler = new StreamingImportHandler(graphGeneratorHandler);
|
|
116
|
+
const result = await streamingHandler.importProjectStreaming(parser, {
|
|
117
|
+
chunkSize: config.chunkSize > 0 ? config.chunkSize : 100,
|
|
118
|
+
projectId: resolvedProjectId,
|
|
119
|
+
onProgress: async (progress) => {
|
|
120
|
+
sendMessage({
|
|
121
|
+
type: 'progress',
|
|
122
|
+
data: {
|
|
123
|
+
phase: progress.phase,
|
|
124
|
+
filesProcessed: progress.current,
|
|
125
|
+
filesTotal: progress.total,
|
|
126
|
+
nodesImported: progress.details?.nodesCreated ?? 0,
|
|
127
|
+
edgesImported: progress.details?.edgesCreated ?? 0,
|
|
128
|
+
currentChunk: progress.details?.chunkIndex ?? 0,
|
|
129
|
+
totalChunks: progress.details?.totalChunks ?? 0,
|
|
130
|
+
},
|
|
131
|
+
});
|
|
132
|
+
},
|
|
133
|
+
});
|
|
134
|
+
// Update Project node with 'complete' status and final counts
|
|
135
|
+
await neo4jService.run(UPDATE_PROJECT_STATUS_QUERY, {
|
|
136
|
+
projectId: resolvedProjectId,
|
|
137
|
+
status: 'complete',
|
|
138
|
+
nodeCount: result.nodesImported,
|
|
139
|
+
edgeCount: result.edgesImported,
|
|
140
|
+
});
|
|
141
|
+
await debugLog('Project node updated', {
|
|
142
|
+
projectId: resolvedProjectId,
|
|
143
|
+
status: 'complete',
|
|
144
|
+
nodeCount: result.nodesImported,
|
|
145
|
+
edgeCount: result.edgesImported,
|
|
146
|
+
});
|
|
147
|
+
sendMessage({
|
|
148
|
+
type: 'complete',
|
|
149
|
+
data: {
|
|
150
|
+
nodesImported: result.nodesImported,
|
|
151
|
+
edgesImported: result.edgesImported,
|
|
152
|
+
elapsedMs: Date.now() - startTime,
|
|
153
|
+
},
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
catch (error) {
|
|
157
|
+
// Try to update Project node with 'failed' status
|
|
158
|
+
try {
|
|
159
|
+
// Use existing service if available, otherwise create temporary one
|
|
160
|
+
const serviceForUpdate = neo4jService ?? new Neo4jService();
|
|
161
|
+
await serviceForUpdate.run(UPDATE_PROJECT_STATUS_QUERY, {
|
|
162
|
+
projectId: resolvedProjectId, // Use resolved ID, not config.projectId
|
|
163
|
+
status: 'failed',
|
|
164
|
+
nodeCount: 0,
|
|
165
|
+
edgeCount: 0,
|
|
166
|
+
});
|
|
167
|
+
// Close temporary service if we created one
|
|
168
|
+
if (!neo4jService) {
|
|
169
|
+
await serviceForUpdate.close();
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
catch {
|
|
173
|
+
// Ignore errors updating project status on failure
|
|
174
|
+
}
|
|
175
|
+
sendMessage({
|
|
176
|
+
type: 'error',
|
|
177
|
+
error: error.message ?? String(error),
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
finally {
|
|
181
|
+
// Always close the Neo4j connection to prevent resource leaks
|
|
182
|
+
if (neo4jService) {
|
|
183
|
+
try {
|
|
184
|
+
await neo4jService.close();
|
|
185
|
+
}
|
|
186
|
+
catch {
|
|
187
|
+
// Ignore cleanup errors
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
};
|
|
192
|
+
// Run the parser
|
|
193
|
+
runParser().catch((err) => {
|
|
194
|
+
sendMessage({
|
|
195
|
+
type: 'error',
|
|
196
|
+
error: err.message ?? String(err),
|
|
197
|
+
});
|
|
198
|
+
});
|