code-graph-context 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +14 -0
- package/LICENSE +21 -0
- package/README.md +870 -0
- package/dist/constants.js +1 -0
- package/dist/core/config/fairsquare-framework-schema.js +832 -0
- package/dist/core/config/graph-v2.js +1595 -0
- package/dist/core/config/nestjs-framework-schema.js +894 -0
- package/dist/core/config/schema.js +799 -0
- package/dist/core/embeddings/embeddings.service.js +26 -0
- package/dist/core/embeddings/natural-language-to-cypher.service.js +148 -0
- package/dist/core/parsers/parser-factory.js +102 -0
- package/dist/core/parsers/typescript-parser-v2.js +590 -0
- package/dist/core/parsers/typescript-parser.js +717 -0
- package/dist/mcp/constants.js +141 -0
- package/dist/mcp/handlers/graph-generator.handler.js +143 -0
- package/dist/mcp/handlers/traversal.handler.js +304 -0
- package/dist/mcp/mcp.server.js +47 -0
- package/dist/mcp/services.js +158 -0
- package/dist/mcp/tools/hello.tool.js +13 -0
- package/dist/mcp/tools/index.js +24 -0
- package/dist/mcp/tools/natural-language-to-cypher.tool.js +59 -0
- package/dist/mcp/tools/parse-typescript-project.tool.js +101 -0
- package/dist/mcp/tools/search-codebase.tool.js +97 -0
- package/dist/mcp/tools/test-neo4j-connection.tool.js +39 -0
- package/dist/mcp/tools/traverse-from-node.tool.js +97 -0
- package/dist/mcp/utils.js +152 -0
- package/dist/parsers/cypher-result.parser.js +44 -0
- package/dist/storage/neo4j/neo4j.service.js +277 -0
- package/dist/utils/test.js +19 -0
- package/package.json +81 -0
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MCP Server Utility Functions
|
|
3
|
+
* Common utility functions used across the MCP server
|
|
4
|
+
*/
|
|
5
|
+
import fs from 'fs/promises';
|
|
6
|
+
import path from 'path';
|
|
7
|
+
import { FILE_PATHS, LOG_CONFIG, MESSAGES } from './constants.js';
|
|
8
|
+
/**
|
|
9
|
+
* Debug logging utility
|
|
10
|
+
*/
|
|
11
|
+
export const debugLog = async (message, data) => {
|
|
12
|
+
const timestamp = new Date().toISOString();
|
|
13
|
+
const logEntry = `[${timestamp}] ${message}\n${data ? JSON.stringify(data, null, LOG_CONFIG.jsonIndentation) : ''}\n${LOG_CONFIG.logSeparator}\n`;
|
|
14
|
+
try {
|
|
15
|
+
await fs.appendFile(path.join(process.cwd(), FILE_PATHS.debugLog), logEntry);
|
|
16
|
+
}
|
|
17
|
+
catch (error) {
|
|
18
|
+
console.error('Failed to write debug log:', error);
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
/**
|
|
22
|
+
* Standard error response format for MCP tools
|
|
23
|
+
*/
|
|
24
|
+
export const createErrorResponse = (error) => {
|
|
25
|
+
const errorMessage = error instanceof Error ? error.message : error;
|
|
26
|
+
return {
|
|
27
|
+
content: [
|
|
28
|
+
{
|
|
29
|
+
type: 'text',
|
|
30
|
+
text: `${MESSAGES.errors.genericError} ${errorMessage}`,
|
|
31
|
+
},
|
|
32
|
+
],
|
|
33
|
+
};
|
|
34
|
+
};
|
|
35
|
+
/**
|
|
36
|
+
* Standard success response format for MCP tools
|
|
37
|
+
*/
|
|
38
|
+
export const createSuccessResponse = (text) => {
|
|
39
|
+
return {
|
|
40
|
+
content: [
|
|
41
|
+
{
|
|
42
|
+
type: 'text',
|
|
43
|
+
text,
|
|
44
|
+
},
|
|
45
|
+
],
|
|
46
|
+
};
|
|
47
|
+
};
|
|
48
|
+
/**
|
|
49
|
+
* Format node information as structured data
|
|
50
|
+
*/
|
|
51
|
+
export const formatNodeInfo = (value, key) => {
|
|
52
|
+
if (value && typeof value === 'object' && value.labels && value.properties) {
|
|
53
|
+
// Return structured node data
|
|
54
|
+
const result = {
|
|
55
|
+
id: value.properties.id,
|
|
56
|
+
type: value.labels[0] ?? 'Unknown',
|
|
57
|
+
filePath: value.properties.filePath,
|
|
58
|
+
};
|
|
59
|
+
if (value.properties.name) {
|
|
60
|
+
result.name = value.properties.name;
|
|
61
|
+
}
|
|
62
|
+
// Include source code if available and not a SourceFile
|
|
63
|
+
if (value.properties.sourceCode && value.properties.coreType !== 'SourceFile') {
|
|
64
|
+
const code = value.properties.sourceCode;
|
|
65
|
+
const maxLength = 1000; // Show max 1000 chars total
|
|
66
|
+
if (code.length <= maxLength) {
|
|
67
|
+
result.sourceCode = code;
|
|
68
|
+
}
|
|
69
|
+
else {
|
|
70
|
+
// Show first 500 and last 500 characters
|
|
71
|
+
const half = Math.floor(maxLength / 2);
|
|
72
|
+
result.sourceCode = code.substring(0, half) + '\n\n... [truncated] ...\n\n' + code.substring(code.length - half);
|
|
73
|
+
result.hasMore = true;
|
|
74
|
+
result.truncated = code.length - maxLength;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
return result;
|
|
78
|
+
}
|
|
79
|
+
else if (value && typeof value === 'object' && value.type) {
|
|
80
|
+
// Return structured relationship data
|
|
81
|
+
return {
|
|
82
|
+
relationshipType: value.type,
|
|
83
|
+
properties: value.properties,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
else if (value && typeof value === 'object' && !Array.isArray(value)) {
|
|
87
|
+
// Handle record objects (e.g., {rd.filePath: "...", rd.name: "..."})
|
|
88
|
+
const formatted = {};
|
|
89
|
+
Object.keys(value).forEach((k) => {
|
|
90
|
+
formatted[k] = formatNodeInfo(value[k], k);
|
|
91
|
+
});
|
|
92
|
+
return formatted;
|
|
93
|
+
}
|
|
94
|
+
else {
|
|
95
|
+
// Return primitive as-is
|
|
96
|
+
return value;
|
|
97
|
+
}
|
|
98
|
+
};
|
|
99
|
+
/**
|
|
100
|
+
* Format results for the natural language to cypher tool
|
|
101
|
+
*/
|
|
102
|
+
export const formatQueryResults = (results, query, cypherResult) => {
|
|
103
|
+
const formattedResults = results.map((record) => formatNodeInfo(record, 'result'));
|
|
104
|
+
return {
|
|
105
|
+
query,
|
|
106
|
+
cypher: cypherResult.cypher,
|
|
107
|
+
parameters: cypherResult.parameters ?? {},
|
|
108
|
+
explanation: cypherResult.explanation,
|
|
109
|
+
totalResults: results.length,
|
|
110
|
+
results: formattedResults,
|
|
111
|
+
};
|
|
112
|
+
};
|
|
113
|
+
/**
|
|
114
|
+
* Validate and sanitize numeric inputs
|
|
115
|
+
*/
|
|
116
|
+
export const sanitizeNumericInput = (value, defaultValue, max) => {
|
|
117
|
+
const parsed = typeof value === 'string' ? parseInt(value, 10) : value;
|
|
118
|
+
if (isNaN(parsed) || parsed < 0) {
|
|
119
|
+
return defaultValue;
|
|
120
|
+
}
|
|
121
|
+
if (max !== undefined && parsed > max) {
|
|
122
|
+
return max;
|
|
123
|
+
}
|
|
124
|
+
return parsed;
|
|
125
|
+
};
|
|
126
|
+
/**
|
|
127
|
+
* Safe JSON parse with fallback
|
|
128
|
+
*/
|
|
129
|
+
export const safeJsonParse = (json, fallback = null) => {
|
|
130
|
+
try {
|
|
131
|
+
return JSON.parse(json);
|
|
132
|
+
}
|
|
133
|
+
catch {
|
|
134
|
+
return fallback;
|
|
135
|
+
}
|
|
136
|
+
};
|
|
137
|
+
/**
|
|
138
|
+
* Format success message for parsing results
|
|
139
|
+
*/
|
|
140
|
+
export const formatParseSuccess = (nodeCount, edgeCount, result) => {
|
|
141
|
+
let message = `${MESSAGES.success.parseSuccess} Parsed ${nodeCount} nodes and ${edgeCount} edges. Graph imported to Neo4j.`;
|
|
142
|
+
if (result) {
|
|
143
|
+
message += ` Result: ${JSON.stringify(result)}`;
|
|
144
|
+
}
|
|
145
|
+
return message;
|
|
146
|
+
};
|
|
147
|
+
/**
|
|
148
|
+
* Format partial success message for parsing results
|
|
149
|
+
*/
|
|
150
|
+
export const formatParsePartialSuccess = (nodeCount, edgeCount, outputPath, errorMessage) => {
|
|
151
|
+
return `${MESSAGES.success.partialSuccess} Parsed ${nodeCount} nodes and ${edgeCount} edges. JSON saved to ${outputPath}. Neo4j import failed: ${errorMessage}`;
|
|
152
|
+
};
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
export class CypherResultParser {
|
|
2
|
+
static parseCypherResult(result) {
|
|
3
|
+
if (!result || result.length === 0) {
|
|
4
|
+
return 'No results found.';
|
|
5
|
+
}
|
|
6
|
+
const parsedResults = {};
|
|
7
|
+
// Parse nodes
|
|
8
|
+
if (result[0].nodes) {
|
|
9
|
+
parsedResults.nodes = result[0].nodes.map((node) => ({
|
|
10
|
+
identity: node.identity.toString(),
|
|
11
|
+
labels: node.labels,
|
|
12
|
+
properties: node.properties,
|
|
13
|
+
}));
|
|
14
|
+
}
|
|
15
|
+
// Parse relationships
|
|
16
|
+
if (result[0].relationships) {
|
|
17
|
+
parsedResults.relationships = result[0].relationships.map((rel) => ({
|
|
18
|
+
identity: rel.identity.toString(),
|
|
19
|
+
type: rel.type,
|
|
20
|
+
properties: rel.properties,
|
|
21
|
+
start: rel.start.toString(),
|
|
22
|
+
end: rel.end.toString(),
|
|
23
|
+
}));
|
|
24
|
+
}
|
|
25
|
+
// Parse paths
|
|
26
|
+
if (result[0].paths) {
|
|
27
|
+
parsedResults.paths = result[0].paths.map((path) => ({
|
|
28
|
+
start: path.start,
|
|
29
|
+
end: path.end,
|
|
30
|
+
segments: path.segments,
|
|
31
|
+
length: path.length,
|
|
32
|
+
}));
|
|
33
|
+
}
|
|
34
|
+
// Parse data
|
|
35
|
+
if (result[0].data) {
|
|
36
|
+
parsedResults.data = result[0].data;
|
|
37
|
+
}
|
|
38
|
+
// Parse summary
|
|
39
|
+
if (result[0].summary) {
|
|
40
|
+
parsedResults.summary = result[0].summary;
|
|
41
|
+
}
|
|
42
|
+
return JSON.stringify(parsedResults, null, 2);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
@@ -0,0 +1,277 @@
|
|
|
1
|
+
import neo4j from 'neo4j-driver';
|
|
2
|
+
import { MAX_TRAVERSAL_DEPTH } from '../../constants.js';
|
|
3
|
+
export class Neo4jService {
|
|
4
|
+
driver;
|
|
5
|
+
constructor() {
|
|
6
|
+
this.driver = this.createDriver();
|
|
7
|
+
}
|
|
8
|
+
createDriver() {
|
|
9
|
+
const uri = process.env.NEO4J_URI ?? 'bolt://localhost:7687';
|
|
10
|
+
const user = process.env.NEO4J_USER ?? 'neo4j';
|
|
11
|
+
const password = process.env.NEO4J_PASSWORD ?? 'PASSWORD';
|
|
12
|
+
return neo4j.driver(uri, neo4j.auth.basic(user, password));
|
|
13
|
+
}
|
|
14
|
+
async run(query, params = {}) {
|
|
15
|
+
const session = this.driver.session();
|
|
16
|
+
try {
|
|
17
|
+
const result = await session.run(query, params);
|
|
18
|
+
return result.records.map((record) => record.toObject());
|
|
19
|
+
}
|
|
20
|
+
catch (error) {
|
|
21
|
+
console.error('Error running query:', error);
|
|
22
|
+
throw error;
|
|
23
|
+
}
|
|
24
|
+
finally {
|
|
25
|
+
await session.close();
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
getDriver() {
|
|
29
|
+
return this.driver;
|
|
30
|
+
}
|
|
31
|
+
async getSchema() {
|
|
32
|
+
const session = this.driver.session();
|
|
33
|
+
try {
|
|
34
|
+
return await session.run(QUERIES.APOC_SCHEMA);
|
|
35
|
+
}
|
|
36
|
+
catch (error) {
|
|
37
|
+
console.error('Error fetching schema:', error);
|
|
38
|
+
throw error;
|
|
39
|
+
}
|
|
40
|
+
finally {
|
|
41
|
+
await session.close();
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
export const QUERIES = {
|
|
46
|
+
APOC_SCHEMA: `
|
|
47
|
+
CALL apoc.meta.schema() YIELD value
|
|
48
|
+
RETURN value as schema
|
|
49
|
+
`,
|
|
50
|
+
CLEAR_DATABASE: 'MATCH (n) DETACH DELETE n',
|
|
51
|
+
CREATE_NODE: `
|
|
52
|
+
UNWIND $nodes AS nodeData
|
|
53
|
+
CALL apoc.create.node(nodeData.labels, nodeData.properties) YIELD node
|
|
54
|
+
RETURN count(*) as created
|
|
55
|
+
`,
|
|
56
|
+
CREATE_RELATIONSHIP: `
|
|
57
|
+
UNWIND $edges AS edgeData
|
|
58
|
+
MATCH (start) WHERE start.id = edgeData.startNodeId
|
|
59
|
+
MATCH (end) WHERE end.id = edgeData.endNodeId
|
|
60
|
+
WITH start, end, edgeData
|
|
61
|
+
CALL apoc.create.relationship(start, edgeData.type, edgeData.properties, end) YIELD rel
|
|
62
|
+
RETURN count(*) as created
|
|
63
|
+
`,
|
|
64
|
+
CREATE_INDEX: (label, property) => `CREATE INDEX IF NOT EXISTS FOR (n:${label}) ON (n.${property})`,
|
|
65
|
+
GET_STATS: `
|
|
66
|
+
MATCH (n)
|
|
67
|
+
RETURN labels(n)[0] as nodeType, count(*) as count
|
|
68
|
+
ORDER BY count DESC
|
|
69
|
+
`,
|
|
70
|
+
CREATE_EMBEDDED_VECTOR_INDEX: `
|
|
71
|
+
CREATE VECTOR INDEX embedded_nodes_idx IF NOT EXISTS
|
|
72
|
+
FOR (n:Embedded) ON (n.embedding)
|
|
73
|
+
OPTIONS {indexConfig: {
|
|
74
|
+
\`vector.dimensions\`: 3072,
|
|
75
|
+
\`vector.similarity_function\`: 'cosine'
|
|
76
|
+
}}
|
|
77
|
+
`,
|
|
78
|
+
VECTOR_SEARCH: `
|
|
79
|
+
CALL db.index.vector.queryNodes('embedded_nodes_idx', $limit, $embedding)
|
|
80
|
+
YIELD node, score
|
|
81
|
+
RETURN {
|
|
82
|
+
id: node.id,
|
|
83
|
+
labels: labels(node),
|
|
84
|
+
properties: apoc.map.removeKeys(properties(node), ['embedding'])
|
|
85
|
+
} as node, score
|
|
86
|
+
ORDER BY score DESC
|
|
87
|
+
`,
|
|
88
|
+
// Check if index exists
|
|
89
|
+
CHECK_VECTOR_INDEX: `
|
|
90
|
+
SHOW INDEXES YIELD name, type
|
|
91
|
+
WHERE name = 'node_embedding_idx' AND type = 'VECTOR'
|
|
92
|
+
RETURN count(*) > 0 as exists
|
|
93
|
+
`,
|
|
94
|
+
EXPLORE_ALL_CONNECTIONS: (maxDepth = MAX_TRAVERSAL_DEPTH, direction = 'BOTH', relationshipTypes) => {
|
|
95
|
+
const safeMaxDepth = Math.min(Math.max(maxDepth, 1), MAX_TRAVERSAL_DEPTH);
|
|
96
|
+
// Build relationship pattern based on direction
|
|
97
|
+
let relPattern = '';
|
|
98
|
+
if (direction === 'OUTGOING') {
|
|
99
|
+
relPattern = `-[*1..${safeMaxDepth}]->`;
|
|
100
|
+
}
|
|
101
|
+
else if (direction === 'INCOMING') {
|
|
102
|
+
relPattern = `<-[*1..${safeMaxDepth}]-`;
|
|
103
|
+
}
|
|
104
|
+
else {
|
|
105
|
+
relPattern = `-[*1..${safeMaxDepth}]-`;
|
|
106
|
+
}
|
|
107
|
+
// Build relationship type filter if specified
|
|
108
|
+
let relTypeFilter = '';
|
|
109
|
+
if (relationshipTypes && relationshipTypes.length > 0) {
|
|
110
|
+
const types = relationshipTypes.map((t) => `'${t}'`).join(', ');
|
|
111
|
+
relTypeFilter = `AND all(rel in relationships(path) WHERE type(rel) IN [${types}])`;
|
|
112
|
+
}
|
|
113
|
+
return `
|
|
114
|
+
MATCH (start) WHERE start.id = $nodeId
|
|
115
|
+
|
|
116
|
+
CALL {
|
|
117
|
+
WITH start
|
|
118
|
+
MATCH path = (start)${relPattern}(connected)
|
|
119
|
+
WHERE connected <> start
|
|
120
|
+
${relTypeFilter}
|
|
121
|
+
WITH path, connected, length(path) as depth
|
|
122
|
+
|
|
123
|
+
RETURN {
|
|
124
|
+
id: connected.id,
|
|
125
|
+
labels: labels(connected),
|
|
126
|
+
properties: apoc.map.removeKeys(properties(connected), ['embedding'])
|
|
127
|
+
} as node,
|
|
128
|
+
depth,
|
|
129
|
+
[rel in relationships(path) | {
|
|
130
|
+
type: type(rel),
|
|
131
|
+
start: startNode(rel).id,
|
|
132
|
+
end: endNode(rel).id,
|
|
133
|
+
properties: properties(rel)
|
|
134
|
+
}] as relationshipChain
|
|
135
|
+
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
WITH start, collect({
|
|
139
|
+
node: node,
|
|
140
|
+
depth: depth,
|
|
141
|
+
relationshipChain: relationshipChain
|
|
142
|
+
}) as allConnections
|
|
143
|
+
|
|
144
|
+
WITH start, allConnections,
|
|
145
|
+
allConnections[$skip..] as connections
|
|
146
|
+
|
|
147
|
+
RETURN {
|
|
148
|
+
startNode: {
|
|
149
|
+
id: start.id,
|
|
150
|
+
labels: labels(start),
|
|
151
|
+
properties: apoc.map.removeKeys(properties(start), ['embedding'])
|
|
152
|
+
},
|
|
153
|
+
connections: connections,
|
|
154
|
+
totalConnections: size(allConnections),
|
|
155
|
+
graph: {
|
|
156
|
+
nodes: [conn in connections | conn.node] + [{
|
|
157
|
+
id: start.id,
|
|
158
|
+
labels: labels(start),
|
|
159
|
+
properties: apoc.map.removeKeys(properties(start), ['embedding'])
|
|
160
|
+
}],
|
|
161
|
+
relationships: reduce(rels = [], conn in connections | rels + conn.relationshipChain)
|
|
162
|
+
}
|
|
163
|
+
} as result
|
|
164
|
+
`;
|
|
165
|
+
},
|
|
166
|
+
/**
|
|
167
|
+
* DEPTH-BY-DEPTH WEIGHTED TRAVERSAL
|
|
168
|
+
*
|
|
169
|
+
* This query is called once per depth level, allowing you to score and prune
|
|
170
|
+
* at each level before deciding which nodes to explore further.
|
|
171
|
+
*
|
|
172
|
+
* Parameters:
|
|
173
|
+
* $sourceNodeIds: string[] - Node IDs to explore FROM (starts with just start node)
|
|
174
|
+
* $visitedNodeIds: string[] - Node IDs already visited (to avoid cycles)
|
|
175
|
+
* $queryEmbedding: number[] - The original query embedding for similarity scoring
|
|
176
|
+
* $currentDepth: number - Which depth level we're at (1-indexed)
|
|
177
|
+
* $depthDecay: number - Decay factor per depth (e.g., 0.85 means 15% penalty per level)
|
|
178
|
+
* $maxNodesPerDepth: number - Maximum nodes to return at this depth
|
|
179
|
+
* $direction: 'OUTGOING' | 'INCOMING' | 'BOTH'
|
|
180
|
+
*
|
|
181
|
+
* How it works:
|
|
182
|
+
*
|
|
183
|
+
* 1. UNWIND $sourceNodeIds - For each node we're exploring FROM
|
|
184
|
+
* 2. MATCH neighbors - Find all immediate neighbors (1 hop only)
|
|
185
|
+
* 3. Filter out visited nodes - Avoid cycles
|
|
186
|
+
* 4. Score each neighbor using:
|
|
187
|
+
* - edgeWeight: The relationshipWeight we added to edges (how important is this relationship type?)
|
|
188
|
+
* - nodeSimilarity: Cosine similarity between neighbor's embedding and query embedding
|
|
189
|
+
* - depthPenalty: Exponential decay based on current depth
|
|
190
|
+
* 5. Combine: score = edgeWeight * nodeSimilarity * depthPenalty
|
|
191
|
+
* 6. ORDER BY score DESC, LIMIT to top N
|
|
192
|
+
* 7. Return scored neighbors - caller decides which to explore at next depth
|
|
193
|
+
*
|
|
194
|
+
* Example flow:
|
|
195
|
+
* Depth 1: sourceNodeIds=[startNode], returns top 5 neighbors with scores
|
|
196
|
+
* Depth 2: sourceNodeIds=[top 3 from depth 1], returns top 5 neighbors of those
|
|
197
|
+
* Depth 3: sourceNodeIds=[top 3 from depth 2], returns top 5 neighbors of those
|
|
198
|
+
* ...until maxDepth reached or no more neighbors
|
|
199
|
+
*/
|
|
200
|
+
EXPLORE_DEPTH_LEVEL: (direction = 'BOTH', maxNodesPerDepth = 5) => {
|
|
201
|
+
// Build relationship pattern based on direction
|
|
202
|
+
let relPattern = '';
|
|
203
|
+
if (direction === 'OUTGOING') {
|
|
204
|
+
relPattern = '-[rel]->';
|
|
205
|
+
}
|
|
206
|
+
else if (direction === 'INCOMING') {
|
|
207
|
+
relPattern = '<-[rel]-';
|
|
208
|
+
}
|
|
209
|
+
else {
|
|
210
|
+
relPattern = '-[rel]-';
|
|
211
|
+
}
|
|
212
|
+
return `
|
|
213
|
+
// Unwind the source nodes we're exploring from
|
|
214
|
+
UNWIND $sourceNodeIds AS sourceId
|
|
215
|
+
MATCH (source) WHERE source.id = sourceId
|
|
216
|
+
|
|
217
|
+
// Find immediate neighbors (exactly 1 hop)
|
|
218
|
+
MATCH (source)${relPattern}(neighbor)
|
|
219
|
+
|
|
220
|
+
// Filter: skip already visited nodes to avoid cycles
|
|
221
|
+
WHERE NOT neighbor.id IN $visitedNodeIds
|
|
222
|
+
|
|
223
|
+
// Calculate the three scoring components
|
|
224
|
+
WITH source, neighbor, rel,
|
|
225
|
+
|
|
226
|
+
// 1. Edge weight: how important is this relationship type?
|
|
227
|
+
// Falls back to 0.5 if not set
|
|
228
|
+
COALESCE(rel.relationshipWeight, 0.5) AS edgeWeight,
|
|
229
|
+
|
|
230
|
+
// 2. Node similarity: how relevant is this node to the query?
|
|
231
|
+
// Uses cosine similarity if neighbor has an embedding
|
|
232
|
+
// Falls back to 0.5 if no embedding (structural nodes like decorators)
|
|
233
|
+
CASE
|
|
234
|
+
WHEN neighbor.embedding IS NOT NULL AND $queryEmbedding IS NOT NULL
|
|
235
|
+
THEN vector.similarity.cosine(neighbor.embedding, $queryEmbedding)
|
|
236
|
+
ELSE 0.5
|
|
237
|
+
END AS nodeSimilarity,
|
|
238
|
+
|
|
239
|
+
// 3. Depth penalty: exponential decay
|
|
240
|
+
// depth 1: decay^0 = 1.0 (no penalty)
|
|
241
|
+
// depth 2: decay^1 = 0.85 (if decay=0.85)
|
|
242
|
+
// depth 3: decay^2 = 0.72
|
|
243
|
+
// This ensures closer nodes are preferred
|
|
244
|
+
($depthDecay ^ ($currentDepth - 1)) AS depthPenalty
|
|
245
|
+
|
|
246
|
+
// Combine into final score
|
|
247
|
+
WITH source, neighbor, rel, edgeWeight, nodeSimilarity, depthPenalty,
|
|
248
|
+
(edgeWeight * nodeSimilarity * depthPenalty) AS combinedScore
|
|
249
|
+
|
|
250
|
+
// Return all neighbor data with scores
|
|
251
|
+
RETURN {
|
|
252
|
+
node: {
|
|
253
|
+
id: neighbor.id,
|
|
254
|
+
labels: labels(neighbor),
|
|
255
|
+
properties: apoc.map.removeKeys(properties(neighbor), ['embedding'])
|
|
256
|
+
},
|
|
257
|
+
relationship: {
|
|
258
|
+
type: type(rel),
|
|
259
|
+
startNodeId: startNode(rel).id,
|
|
260
|
+
endNodeId: endNode(rel).id,
|
|
261
|
+
properties: properties(rel)
|
|
262
|
+
},
|
|
263
|
+
sourceNodeId: source.id,
|
|
264
|
+
scoring: {
|
|
265
|
+
edgeWeight: edgeWeight,
|
|
266
|
+
nodeSimilarity: nodeSimilarity,
|
|
267
|
+
depthPenalty: depthPenalty,
|
|
268
|
+
combinedScore: combinedScore
|
|
269
|
+
}
|
|
270
|
+
} AS result
|
|
271
|
+
|
|
272
|
+
// Sort by score and limit to top N per depth
|
|
273
|
+
ORDER BY combinedScore DESC
|
|
274
|
+
LIMIT ${maxNodesPerDepth}
|
|
275
|
+
`;
|
|
276
|
+
},
|
|
277
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import 'reflect-metadata';
|
|
2
|
+
import { writeFileSync } from 'node:fs';
|
|
3
|
+
import fs from 'node:fs';
|
|
4
|
+
import os from 'node:os';
|
|
5
|
+
import path from 'node:path';
|
|
6
|
+
import { NESTJS_FRAMEWORK_SCHEMA } from '../core/config/graph-v2';
|
|
7
|
+
import { TypeScriptParser } from '../core/parsers/typescript-parser-v2';
|
|
8
|
+
const workspace = path.join(os.homedir(), 'nestjs/iluvcoffee');
|
|
9
|
+
const tsconfig = path.join(workspace, 'tsconfig.json'); // or tsconfig.build.json etc.
|
|
10
|
+
(async () => {
|
|
11
|
+
console.log({ workspace, tsconfig, exists: fs.existsSync(tsconfig) });
|
|
12
|
+
const parser = new TypeScriptParser(workspace, tsconfig, undefined, [NESTJS_FRAMEWORK_SCHEMA]);
|
|
13
|
+
// 👇 pull in every *.ts file under the repo
|
|
14
|
+
parser['project'].addSourceFilesAtPaths(path.join(workspace, 'src/**/*.ts'));
|
|
15
|
+
const { nodes, edges } = await parser.parseWorkspace(); // runs fine now
|
|
16
|
+
const { nodes: cleanNodes, edges: cleanEdges } = parser.exportToJson();
|
|
17
|
+
console.log(`Parsed ${cleanNodes.length} nodes / ${cleanEdges.length} edges`);
|
|
18
|
+
writeFileSync('em-backend-graph.json', JSON.stringify({ nodes: cleanNodes, edges: cleanEdges }, null, 2));
|
|
19
|
+
})();
|
package/package.json
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "code-graph-context",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "MCP server that builds code graphs to provide rich context to LLMs",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"repository": {
|
|
7
|
+
"type": "git",
|
|
8
|
+
"url": "git+https://github.com/drewdrewH/code-graph-context.git"
|
|
9
|
+
},
|
|
10
|
+
"keywords": [
|
|
11
|
+
"mcp",
|
|
12
|
+
"mcp-server",
|
|
13
|
+
"model-context-protocol",
|
|
14
|
+
"code-graph",
|
|
15
|
+
"code-analysis",
|
|
16
|
+
"neo4j",
|
|
17
|
+
"graph-database",
|
|
18
|
+
"typescript",
|
|
19
|
+
"nestjs",
|
|
20
|
+
"ast-parser",
|
|
21
|
+
"semantic-search",
|
|
22
|
+
"openai",
|
|
23
|
+
"embeddings",
|
|
24
|
+
"graph-rag",
|
|
25
|
+
"llm",
|
|
26
|
+
"claude"
|
|
27
|
+
],
|
|
28
|
+
"author": "Andrew Hernandez",
|
|
29
|
+
"license": "MIT",
|
|
30
|
+
"main": "dist/mcp/mcp.server.js",
|
|
31
|
+
"bin": {
|
|
32
|
+
"code-graph-context": "dist/mcp/mcp.server.js"
|
|
33
|
+
},
|
|
34
|
+
"files": [
|
|
35
|
+
"dist/**/*",
|
|
36
|
+
"README.md",
|
|
37
|
+
"LICENSE",
|
|
38
|
+
".env.example"
|
|
39
|
+
],
|
|
40
|
+
"scripts": {
|
|
41
|
+
"build": "tsc",
|
|
42
|
+
"prepare": "npm run build",
|
|
43
|
+
"mcp": "node dist/mcp/mcp.server.js",
|
|
44
|
+
"dev": "tsc --watch",
|
|
45
|
+
"graph:print": "npm run build && node dist/utils/test.js",
|
|
46
|
+
"lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix",
|
|
47
|
+
"format": "prettier --write \"src/**/*.ts\""
|
|
48
|
+
},
|
|
49
|
+
"dependencies": {
|
|
50
|
+
"@modelcontextprotocol/sdk": "^1.15.1",
|
|
51
|
+
"commander": "^14.0.0",
|
|
52
|
+
"dotenv": "^17.2.3",
|
|
53
|
+
"glob": "^11.0.3",
|
|
54
|
+
"neo4j": "^2.0.0-RC2",
|
|
55
|
+
"neo4j-driver": "^5.28.1",
|
|
56
|
+
"openai": "^5.10.1",
|
|
57
|
+
"zod": "^3.25.76"
|
|
58
|
+
},
|
|
59
|
+
"devDependencies": {
|
|
60
|
+
"@eslint/js": "^9.29.0",
|
|
61
|
+
"@types/neo4j": "^2.0.6",
|
|
62
|
+
"@types/node": "^20.19.1",
|
|
63
|
+
"@typescript-eslint/eslint-plugin": "^8.34.1",
|
|
64
|
+
"@typescript-eslint/parser": "^8.34.1",
|
|
65
|
+
"eslint": "^9.29.0",
|
|
66
|
+
"eslint-config-prettier": "^10.1.5",
|
|
67
|
+
"eslint-import-resolver-typescript": "^4.4.4",
|
|
68
|
+
"eslint-plugin-import": "^2.32.0",
|
|
69
|
+
"eslint-plugin-prefer-arrow": "^1.2.3",
|
|
70
|
+
"eslint-plugin-prettier": "^5.5.0",
|
|
71
|
+
"eslint-plugin-unused-imports": "^4.1.4",
|
|
72
|
+
"globals": "^16.2.0",
|
|
73
|
+
"prettier": "^3.5.3",
|
|
74
|
+
"reflect-metadata": "^0.2.2",
|
|
75
|
+
"ts-morph": "^26.0.0",
|
|
76
|
+
"ts-node": "^10.9.2",
|
|
77
|
+
"typescript": "^5.8.3",
|
|
78
|
+
"typescript-eslint": "^8.34.1",
|
|
79
|
+
"uuid": "^11.1.0"
|
|
80
|
+
}
|
|
81
|
+
}
|