code-graph-context 0.1.0 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,5 +1,6 @@
1
1
  # Code Graph Context MCP Server
2
2
 
3
+ [![npm version](https://badge.fury.io/js/code-graph-context.svg)](https://www.npmjs.com/package/code-graph-context)
3
4
  [![MIT License](https://img.shields.io/badge/License-MIT-green.svg)](LICENSE)
4
5
  [![TypeScript](https://img.shields.io/badge/TypeScript-5.8-007ACC?logo=typescript&logoColor=white)](https://typescriptlang.org/)
5
6
  [![Neo4j](https://img.shields.io/badge/Neo4j-5.0+-018bff?logo=neo4j&logoColor=white)](https://neo4j.com/)
@@ -147,6 +148,84 @@ Then configure in your MCP config file (`~/.config/claude/config.json`):
147
148
 
148
149
  **Note:** The env vars can be configured for any Neo4j instance - local, Docker, cloud (Aura), or enterprise.
149
150
 
151
+ ### Verify Installation
152
+
153
+ After installation, verify everything is working:
154
+
155
+ 1. **Check Neo4j is running:**
156
+ ```bash
157
+ # Open Neo4j Browser
158
+ open http://localhost:7474
159
+ # Login: neo4j / PASSWORD
160
+ ```
161
+
162
+ 2. **Test APOC plugin:**
163
+ ```cypher
164
+ CALL apoc.help("apoc")
165
+ ```
166
+ Should return a list of APOC functions.
167
+
168
+ 3. **Test MCP server connection:**
169
+ ```bash
170
+ claude mcp list
171
+ ```
172
+ Should show: `code-graph-context: ✓ Connected`
173
+
174
+ ### Troubleshooting
175
+
176
+ **"APOC plugin not found"**
177
+ ```bash
178
+ # Check Neo4j logs
179
+ docker logs code-graph-neo4j
180
+
181
+ # Verify APOC loaded
182
+ docker exec code-graph-neo4j cypher-shell -u neo4j -p PASSWORD "CALL apoc.help('apoc')"
183
+
184
+ # Restart if needed
185
+ docker restart code-graph-neo4j
186
+ ```
187
+
188
+ **"OPENAI_API_KEY environment variable is required"**
189
+ - Get your API key from: https://platform.openai.com/api-keys
190
+ - Add to Claude Code MCP config `env` section
191
+ - Verify with: `echo $OPENAI_API_KEY` (if using shell env)
192
+
193
+ **"Connection refused bolt://localhost:7687"**
194
+ ```bash
195
+ # Check Neo4j is running
196
+ docker ps | grep neo4j
197
+
198
+ # Check ports are not in use
199
+ lsof -i :7687
200
+
201
+ # Start Neo4j if stopped
202
+ docker start code-graph-neo4j
203
+
204
+ # Check Neo4j logs
205
+ docker logs code-graph-neo4j
206
+ ```
207
+
208
+ **"Neo4j memory errors"**
209
+ ```bash
210
+ # Increase memory in docker-compose.yml or docker run:
211
+ -e NEO4J_server_memory_heap_max__size=8G
212
+ -e NEO4J_dbms_memory_transaction_total_max=8G
213
+
214
+ docker restart code-graph-neo4j
215
+ ```
216
+
217
+ **"MCP server not responding"**
218
+ ```bash
219
+ # Check Claude Code logs
220
+ cat ~/Library/Logs/Claude/mcp*.log
221
+
222
+ # Test server directly
223
+ node /path/to/code-graph-context/dist/mcp/mcp.server.js
224
+
225
+ # Rebuild if needed
226
+ npm run build
227
+ ```
228
+
150
229
  ## Tool Usage Guide & Sequential Workflows
151
230
 
152
231
  ### Sequential Tool Usage Patterns
package/dist/constants.js CHANGED
@@ -1 +1,22 @@
1
1
  export const MAX_TRAVERSAL_DEPTH = 5;
2
+ // Shared exclude patterns for file parsing and change detection
3
+ // Regex patterns (escaped dots, anchored to end)
4
+ export const EXCLUDE_PATTERNS_REGEX = [
5
+ 'node_modules/',
6
+ 'dist/',
7
+ 'build/',
8
+ 'coverage/',
9
+ '\\.d\\.ts$',
10
+ '\\.spec\\.ts$',
11
+ '\\.test\\.ts$',
12
+ ];
13
+ // Glob patterns for use with glob library
14
+ export const EXCLUDE_PATTERNS_GLOB = [
15
+ 'node_modules/**',
16
+ 'dist/**',
17
+ 'build/**',
18
+ 'coverage/**',
19
+ '**/*.d.ts',
20
+ '**/*.spec.ts',
21
+ '**/*.test.ts',
22
+ ];
@@ -1,5 +1,6 @@
1
1
  /* eslint-disable prefer-arrow/prefer-arrow-functions */
2
2
  /* eslint-disable @typescript-eslint/no-explicit-any */
3
+ import { EXCLUDE_PATTERNS_REGEX } from '../../constants.js';
3
4
  import { CoreNodeType, SemanticNodeType, SemanticEdgeType, } from './schema.js';
4
5
  // ============================================================================
5
6
  // NESTJS HELPER FUNCTIONS
@@ -888,7 +889,7 @@ export const NESTJS_FRAMEWORK_SCHEMA = {
888
889
  // ============================================================================
889
890
  export const NESTJS_PARSE_OPTIONS = {
890
891
  includePatterns: ['**/*.ts', '**/*.tsx'],
891
- excludePatterns: ['node_modules/', 'dist/', 'coverage/', '.d.ts', '.spec.ts', '.test.ts'],
892
+ excludePatterns: EXCLUDE_PATTERNS_REGEX,
892
893
  maxFiles: 1000,
893
894
  frameworkSchemas: [NESTJS_FRAMEWORK_SCHEMA],
894
895
  };
@@ -1,5 +1,6 @@
1
1
  /* eslint-disable @typescript-eslint/no-explicit-any */
2
2
  // graph.ts - Optimized for Neo4j performance with context-based framework properties
3
+ import { EXCLUDE_PATTERNS_REGEX } from '../../constants.js';
3
4
  // ============================================================================
4
5
  // CORE ENUMS
5
6
  // ============================================================================
@@ -792,7 +793,7 @@ export const CORE_TYPESCRIPT_SCHEMA = {
792
793
  };
793
794
  export const DEFAULT_PARSE_OPTIONS = {
794
795
  includePatterns: ['**/*.ts', '**/*.tsx'],
795
- excludePatterns: ['node_modules/', 'dist/', 'coverage/', '.d.ts', '.spec.ts', '.test.ts'],
796
+ excludePatterns: EXCLUDE_PATTERNS_REGEX,
796
797
  maxFiles: 1000,
797
798
  coreSchema: CORE_TYPESCRIPT_SCHEMA,
798
799
  frameworkSchemas: [],
@@ -2,6 +2,7 @@
2
2
  * Parser Factory
3
3
  * Creates TypeScript parsers with appropriate framework schemas
4
4
  */
5
+ import { EXCLUDE_PATTERNS_REGEX } from '../../constants.js';
5
6
  import { FAIRSQUARE_FRAMEWORK_SCHEMA } from '../config/fairsquare-framework-schema.js';
6
7
  import { NESTJS_FRAMEWORK_SCHEMA } from '../config/nestjs-framework-schema.js';
7
8
  import { CORE_TYPESCRIPT_SCHEMA, CoreNodeType } from '../config/schema.js';
@@ -19,7 +20,7 @@ export class ParserFactory {
19
20
  */
20
21
  static createParser(options) {
21
22
  const { workspacePath, tsConfigPath = 'tsconfig.json', projectType = ProjectType.NESTJS, // Default to NestJS (use auto-detect for best results)
22
- customFrameworkSchemas = [], excludePatterns = ['node_modules', 'dist', 'build', '.spec.', '.test.'], excludedNodeTypes = [CoreNodeType.PARAMETER_DECLARATION], } = options;
23
+ customFrameworkSchemas = [], excludePatterns = EXCLUDE_PATTERNS_REGEX, excludedNodeTypes = [CoreNodeType.PARAMETER_DECLARATION], } = options;
23
24
  // Select framework schemas based on project type
24
25
  const frameworkSchemas = this.selectFrameworkSchemas(projectType, customFrameworkSchemas);
25
26
  console.log(`📦 Creating parser for ${projectType} project`);
@@ -68,7 +69,9 @@ export class ParserFactory {
68
69
  ...packageJson.devDependencies,
69
70
  };
70
71
  const hasNestJS = '@nestjs/common' in deps || '@nestjs/core' in deps;
71
- const hasFairSquare = '@fairsquare/core' in deps || '@fairsquare/server' in deps;
72
+ const hasFairSquare = '@fairsquare/core' in deps ||
73
+ '@fairsquare/server' in deps ||
74
+ packageJson.name === '@fairsquare/source';
72
75
  if (hasFairSquare && hasNestJS) {
73
76
  return ProjectType.BOTH;
74
77
  }
@@ -1,8 +1,24 @@
1
1
  /* eslint-disable @typescript-eslint/no-explicit-any */
2
+ import crypto from 'crypto';
3
+ import fs from 'fs/promises';
2
4
  import path from 'node:path';
3
5
  import { minimatch } from 'minimatch';
4
6
  import { Project, Node } from 'ts-morph';
5
- import { v4 as uuidv4 } from 'uuid';
7
+ /**
8
+ * Generate a deterministic node ID based on stable properties.
9
+ * This ensures the same node gets the same ID across reparses.
10
+ *
11
+ * Identity is based on: coreType + filePath + name (+ parentId for nested nodes)
12
+ * This is stable because when it matters (one side of edge not reparsed),
13
+ * names are guaranteed unchanged (or imports would break, triggering reparse).
14
+ */
15
+ const generateDeterministicId = (coreType, filePath, name, parentId) => {
16
+ const parts = parentId ? [coreType, filePath, parentId, name] : [coreType, filePath, name];
17
+ const identity = parts.join('::');
18
+ const hash = crypto.createHash('sha256').update(identity).digest('hex').substring(0, 16);
19
+ return `${coreType}:${hash}`;
20
+ };
21
+ import { hashFile } from '../../utils/file-utils.js';
6
22
  import { NESTJS_FRAMEWORK_SCHEMA } from '../config/nestjs-framework-schema.js';
7
23
  import { CoreNodeType, CORE_TYPESCRIPT_SCHEMA, DEFAULT_PARSE_OPTIONS, CoreEdgeType, } from '../config/schema.js';
8
24
  export class TypeScriptParser {
@@ -14,6 +30,7 @@ export class TypeScriptParser {
14
30
  frameworkSchemas;
15
31
  parsedNodes = new Map();
16
32
  parsedEdges = new Map();
33
+ existingNodes = new Map(); // Nodes from Neo4j for edge target matching
17
34
  deferredEdges = [];
18
35
  sharedContext = new Map(); // Shared context for custom data
19
36
  constructor(workspacePath, tsConfigPath = 'tsconfig.json', coreSchema = CORE_TYPESCRIPT_SCHEMA, frameworkSchemas = [NESTJS_FRAMEWORK_SCHEMA], parseConfig = DEFAULT_PARSE_OPTIONS) {
@@ -22,7 +39,6 @@ export class TypeScriptParser {
22
39
  this.coreSchema = coreSchema;
23
40
  this.frameworkSchemas = frameworkSchemas;
24
41
  this.parseConfig = parseConfig;
25
- // Initialize with proper compiler options for NestJS
26
42
  this.project = new Project({
27
43
  tsConfigFilePath: tsConfigPath,
28
44
  skipAddingFilesFromTsConfig: false,
@@ -36,25 +52,53 @@ export class TypeScriptParser {
36
52
  });
37
53
  this.project.addSourceFilesAtPaths(path.join(workspacePath, '**/*.ts'));
38
54
  }
39
- async parseWorkspace() {
40
- const sourceFiles = this.project.getSourceFiles();
41
- // Phase 1: Core parsing for ALL files
55
+ /**
56
+ * Set existing nodes from Neo4j for edge target matching during incremental parsing.
57
+ * These nodes will be available as targets for edge detection but won't be exported.
58
+ */
59
+ setExistingNodes(nodes) {
60
+ this.existingNodes.clear();
61
+ for (const node of nodes) {
62
+ // Convert to ParsedNode format (without AST)
63
+ const parsedNode = {
64
+ id: node.id,
65
+ coreType: node.coreType,
66
+ semanticType: node.semanticType,
67
+ labels: node.labels,
68
+ properties: {
69
+ id: node.id,
70
+ name: node.name,
71
+ coreType: node.coreType,
72
+ filePath: node.filePath,
73
+ semanticType: node.semanticType,
74
+ },
75
+ // No sourceNode - these are from Neo4j, not parsed
76
+ };
77
+ this.existingNodes.set(node.id, parsedNode);
78
+ }
79
+ console.log(`📦 Loaded ${nodes.length} existing nodes for edge detection`);
80
+ }
81
+ async parseWorkspace(filesToParse) {
82
+ let sourceFiles;
83
+ if (filesToParse && filesToParse.length > 0) {
84
+ sourceFiles = filesToParse
85
+ .map((filePath) => this.project.getSourceFile(filePath))
86
+ .filter((sf) => sf !== undefined);
87
+ }
88
+ else {
89
+ sourceFiles = this.project.getSourceFiles();
90
+ }
42
91
  for (const sourceFile of sourceFiles) {
43
92
  if (this.shouldSkipFile(sourceFile))
44
93
  continue;
45
94
  await this.parseCoreTypeScriptV2(sourceFile);
46
95
  }
47
- // Phase 1.5: Resolve deferred relationship edges (EXTENDS, IMPLEMENTS)
48
96
  this.resolveDeferredEdges();
49
- // Phase 2: Apply context extractors
50
97
  await this.applyContextExtractors();
51
- // Phase 3: Framework enhancements
52
98
  if (this.frameworkSchemas.length > 0) {
53
99
  await this.applyFrameworkEnhancements();
54
100
  }
55
- // Phase 4: Edge enhancements
56
101
  await this.applyEdgeEnhancements();
57
- // Convert to Neo4j format
58
102
  const neo4jNodes = Array.from(this.parsedNodes.values()).map(this.toNeo4jNode);
59
103
  const neo4jEdges = Array.from(this.parsedEdges.values()).map(this.toNeo4jEdge);
60
104
  return { nodes: neo4jNodes, edges: neo4jEdges };
@@ -77,17 +121,22 @@ export class TypeScriptParser {
77
121
  return false;
78
122
  }
79
123
  async parseCoreTypeScriptV2(sourceFile) {
80
- const sourceFileNode = this.createCoreNode(sourceFile, CoreNodeType.SOURCE_FILE);
124
+ const filePath = sourceFile.getFilePath();
125
+ const stats = await fs.stat(filePath);
126
+ const fileTrackingProperties = {
127
+ size: stats.size,
128
+ mtime: stats.mtimeMs,
129
+ contentHash: await hashFile(filePath),
130
+ };
131
+ const sourceFileNode = this.createCoreNode(sourceFile, CoreNodeType.SOURCE_FILE, fileTrackingProperties);
81
132
  this.addNode(sourceFileNode);
82
- // Parse configured children
83
- this.parseChildNodes(this.coreSchema.nodeTypes[CoreNodeType.SOURCE_FILE], sourceFileNode, sourceFile);
84
- // Special handling: Parse variable declarations if framework schema specifies patterns
133
+ await this.parseChildNodes(this.coreSchema.nodeTypes[CoreNodeType.SOURCE_FILE], sourceFileNode, sourceFile);
85
134
  if (this.shouldParseVariables(sourceFile.getFilePath())) {
86
135
  for (const varStatement of sourceFile.getVariableStatements()) {
87
136
  for (const varDecl of varStatement.getDeclarations()) {
88
137
  if (this.shouldSkipChildNode(varDecl))
89
138
  continue;
90
- const variableNode = this.createCoreNode(varDecl, CoreNodeType.VARIABLE_DECLARATION);
139
+ const variableNode = this.createCoreNode(varDecl, CoreNodeType.VARIABLE_DECLARATION, {}, sourceFileNode.id);
91
140
  this.addNode(variableNode);
92
141
  const containsEdge = this.createCoreEdge(CoreEdgeType.CONTAINS, sourceFileNode.id, variableNode.id);
93
142
  this.addEdge(containsEdge);
@@ -118,7 +167,7 @@ export class TypeScriptParser {
118
167
  for (const child of children) {
119
168
  if (this.shouldSkipChildNode(child))
120
169
  continue;
121
- const coreNode = this.createCoreNode(child, type);
170
+ const coreNode = this.createCoreNode(child, type, {}, parentNode.id);
122
171
  this.addNode(coreNode);
123
172
  const coreEdge = this.createCoreEdge(edgeType, parentNode.id, coreNode.id);
124
173
  this.addEdge(coreEdge);
@@ -205,14 +254,14 @@ export class TypeScriptParser {
205
254
  this.addNode(sourceFileNode);
206
255
  // Parse classes
207
256
  for (const classDecl of sourceFile.getClasses()) {
208
- const classNode = this.createCoreNode(classDecl, CoreNodeType.CLASS_DECLARATION);
257
+ const classNode = this.createCoreNode(classDecl, CoreNodeType.CLASS_DECLARATION, {}, sourceFileNode.id);
209
258
  this.addNode(classNode);
210
259
  // File contains class relationship
211
260
  const containsEdge = this.createCoreEdge(CoreEdgeType.CONTAINS, sourceFileNode.id, classNode.id);
212
261
  this.addEdge(containsEdge);
213
262
  // Parse class decorators
214
263
  for (const decorator of classDecl.getDecorators()) {
215
- const decoratorNode = this.createCoreNode(decorator, CoreNodeType.DECORATOR);
264
+ const decoratorNode = this.createCoreNode(decorator, CoreNodeType.DECORATOR, {}, classNode.id);
216
265
  this.addNode(decoratorNode);
217
266
  // Class decorated with decorator relationship
218
267
  const decoratedEdge = this.createCoreEdge(CoreEdgeType.DECORATED_WITH, classNode.id, decoratorNode.id);
@@ -220,14 +269,14 @@ export class TypeScriptParser {
220
269
  }
221
270
  // Parse methods
222
271
  for (const method of classDecl.getMethods()) {
223
- const methodNode = this.createCoreNode(method, CoreNodeType.METHOD_DECLARATION);
272
+ const methodNode = this.createCoreNode(method, CoreNodeType.METHOD_DECLARATION, {}, classNode.id);
224
273
  this.addNode(methodNode);
225
274
  // Class has method relationship
226
275
  const hasMethodEdge = this.createCoreEdge(CoreEdgeType.HAS_MEMBER, classNode.id, methodNode.id);
227
276
  this.addEdge(hasMethodEdge);
228
277
  // Parse method decorators
229
278
  for (const decorator of method.getDecorators()) {
230
- const decoratorNode = this.createCoreNode(decorator, CoreNodeType.DECORATOR);
279
+ const decoratorNode = this.createCoreNode(decorator, CoreNodeType.DECORATOR, {}, methodNode.id);
231
280
  this.addNode(decoratorNode);
232
281
  // Method decorated with decorator relationship
233
282
  const decoratedEdge = this.createCoreEdge(CoreEdgeType.DECORATED_WITH, methodNode.id, decoratorNode.id);
@@ -235,14 +284,14 @@ export class TypeScriptParser {
235
284
  }
236
285
  // Parse method parameters
237
286
  for (const param of method.getParameters()) {
238
- const paramNode = this.createCoreNode(param, CoreNodeType.PARAMETER_DECLARATION);
287
+ const paramNode = this.createCoreNode(param, CoreNodeType.PARAMETER_DECLARATION, {}, methodNode.id);
239
288
  this.addNode(paramNode);
240
289
  // Method has parameter relationship
241
290
  const hasParamEdge = this.createCoreEdge(CoreEdgeType.HAS_PARAMETER, methodNode.id, paramNode.id);
242
291
  this.addEdge(hasParamEdge);
243
292
  // Parse parameter decorators
244
293
  for (const decorator of param.getDecorators()) {
245
- const decoratorNode = this.createCoreNode(decorator, CoreNodeType.DECORATOR);
294
+ const decoratorNode = this.createCoreNode(decorator, CoreNodeType.DECORATOR, {}, paramNode.id);
246
295
  this.addNode(decoratorNode);
247
296
  // Parameter decorated with decorator relationship
248
297
  const decoratedEdge = this.createCoreEdge(CoreEdgeType.DECORATED_WITH, paramNode.id, decoratorNode.id);
@@ -252,14 +301,14 @@ export class TypeScriptParser {
252
301
  }
253
302
  // Parse properties
254
303
  for (const property of classDecl.getProperties()) {
255
- const propertyNode = this.createCoreNode(property, CoreNodeType.PROPERTY_DECLARATION);
304
+ const propertyNode = this.createCoreNode(property, CoreNodeType.PROPERTY_DECLARATION, {}, classNode.id);
256
305
  this.addNode(propertyNode);
257
306
  // Class has property relationship
258
307
  const hasPropertyEdge = this.createCoreEdge(CoreEdgeType.HAS_MEMBER, classNode.id, propertyNode.id);
259
308
  this.addEdge(hasPropertyEdge);
260
309
  // Parse property decorators
261
310
  for (const decorator of property.getDecorators()) {
262
- const decoratorNode = this.createCoreNode(decorator, CoreNodeType.DECORATOR);
311
+ const decoratorNode = this.createCoreNode(decorator, CoreNodeType.DECORATOR, {}, propertyNode.id);
263
312
  this.addNode(decoratorNode);
264
313
  // Property decorated with decorator relationship
265
314
  const decoratedEdge = this.createCoreEdge(CoreEdgeType.DECORATED_WITH, propertyNode.id, decoratorNode.id);
@@ -269,7 +318,7 @@ export class TypeScriptParser {
269
318
  }
270
319
  // Parse interfaces
271
320
  for (const interfaceDecl of sourceFile.getInterfaces()) {
272
- const interfaceNode = this.createCoreNode(interfaceDecl, CoreNodeType.INTERFACE_DECLARATION);
321
+ const interfaceNode = this.createCoreNode(interfaceDecl, CoreNodeType.INTERFACE_DECLARATION, {}, sourceFileNode.id);
273
322
  this.addNode(interfaceNode);
274
323
  // File contains interface relationship
275
324
  const containsEdge = this.createCoreEdge(CoreEdgeType.CONTAINS, sourceFileNode.id, interfaceNode.id);
@@ -277,14 +326,14 @@ export class TypeScriptParser {
277
326
  }
278
327
  // Parse functions
279
328
  for (const funcDecl of sourceFile.getFunctions()) {
280
- const functionNode = this.createCoreNode(funcDecl, CoreNodeType.FUNCTION_DECLARATION);
329
+ const functionNode = this.createCoreNode(funcDecl, CoreNodeType.FUNCTION_DECLARATION, {}, sourceFileNode.id);
281
330
  this.addNode(functionNode);
282
331
  // File contains function relationship
283
332
  const containsEdge = this.createCoreEdge(CoreEdgeType.CONTAINS, sourceFileNode.id, functionNode.id);
284
333
  this.addEdge(containsEdge);
285
334
  // Parse function parameters
286
335
  for (const param of funcDecl.getParameters()) {
287
- const paramNode = this.createCoreNode(param, CoreNodeType.PARAMETER_DECLARATION);
336
+ const paramNode = this.createCoreNode(param, CoreNodeType.PARAMETER_DECLARATION, {}, functionNode.id);
288
337
  this.addNode(paramNode);
289
338
  // Function has parameter relationship
290
339
  const hasParamEdge = this.createCoreEdge(CoreEdgeType.HAS_PARAMETER, functionNode.id, paramNode.id);
@@ -293,7 +342,7 @@ export class TypeScriptParser {
293
342
  }
294
343
  // Parse imports
295
344
  for (const importDecl of sourceFile.getImportDeclarations()) {
296
- const importNode = this.createCoreNode(importDecl, CoreNodeType.IMPORT_DECLARATION);
345
+ const importNode = this.createCoreNode(importDecl, CoreNodeType.IMPORT_DECLARATION, {}, sourceFileNode.id);
297
346
  this.addNode(importNode);
298
347
  // File contains import relationship
299
348
  const containsEdge = this.createCoreEdge(CoreEdgeType.CONTAINS, sourceFileNode.id, importNode.id);
@@ -303,7 +352,7 @@ export class TypeScriptParser {
303
352
  if (this.shouldParseVariables(sourceFile.getFilePath())) {
304
353
  for (const varStatement of sourceFile.getVariableStatements()) {
305
354
  for (const varDecl of varStatement.getDeclarations()) {
306
- const variableNode = this.createCoreNode(varDecl, CoreNodeType.VARIABLE_DECLARATION);
355
+ const variableNode = this.createCoreNode(varDecl, CoreNodeType.VARIABLE_DECLARATION, {}, sourceFileNode.id);
307
356
  this.addNode(variableNode);
308
357
  // File contains variable relationship
309
358
  const containsEdge = this.createCoreEdge(CoreEdgeType.CONTAINS, sourceFileNode.id, variableNode.id);
@@ -316,18 +365,21 @@ export class TypeScriptParser {
316
365
  console.error(`Error parsing file ${sourceFile.getFilePath()}:`, error);
317
366
  }
318
367
  }
319
- createCoreNode(astNode, coreType) {
320
- const nodeId = `${coreType}:${uuidv4()}`;
368
+ createCoreNode(astNode, coreType, baseProperties = {}, parentId) {
369
+ const name = this.extractNodeName(astNode, coreType);
370
+ const filePath = astNode.getSourceFile().getFilePath();
371
+ const nodeId = generateDeterministicId(coreType, filePath, name, parentId);
321
372
  // Extract base properties using schema
322
373
  const properties = {
323
374
  id: nodeId,
324
- name: this.extractNodeName(astNode, coreType),
375
+ name,
325
376
  coreType,
326
- filePath: astNode.getSourceFile().getFilePath(),
377
+ filePath,
327
378
  startLine: astNode.getStartLineNumber(),
328
379
  endLine: astNode.getEndLineNumber(),
329
380
  sourceCode: astNode.getText(),
330
381
  createdAt: new Date().toISOString(),
382
+ ...baseProperties,
331
383
  };
332
384
  // Extract schema-defined properties
333
385
  const coreNodeDef = this.coreSchema.nodeTypes[coreType];
@@ -386,8 +438,12 @@ export class TypeScriptParser {
386
438
  // Get the weight from the core schema
387
439
  const coreEdgeSchema = CORE_TYPESCRIPT_SCHEMA.edgeTypes[relationshipType];
388
440
  const relationshipWeight = coreEdgeSchema?.relationshipWeight ?? 0.5;
441
+ // Generate deterministic edge ID based on type + source + target
442
+ const edgeIdentity = `${relationshipType}::${sourceNodeId}::${targetNodeId}`;
443
+ const edgeHash = crypto.createHash('sha256').update(edgeIdentity).digest('hex').substring(0, 16);
444
+ const edgeId = `${relationshipType}:${edgeHash}`;
389
445
  return {
390
- id: `${relationshipType}:${uuidv4()}`,
446
+ id: edgeId,
391
447
  relationshipType,
392
448
  sourceNodeId,
393
449
  targetNodeId,
@@ -517,8 +573,14 @@ export class TypeScriptParser {
517
573
  }
518
574
  async applyEdgeEnhancement(edgeEnhancement) {
519
575
  try {
576
+ // Combine parsed nodes and existing nodes for target matching
577
+ // Sources must be parsed (have AST), targets can be either
578
+ const allTargetNodes = new Map([...this.parsedNodes, ...this.existingNodes]);
520
579
  for (const [sourceId, sourceNode] of this.parsedNodes) {
521
- for (const [targetId, targetNode] of this.parsedNodes) {
580
+ // Skip if source doesn't have AST (shouldn't happen for parsedNodes, but be safe)
581
+ if (!sourceNode.sourceNode)
582
+ continue;
583
+ for (const [targetId, targetNode] of allTargetNodes) {
522
584
  if (sourceId === targetId)
523
585
  continue;
524
586
  if (edgeEnhancement.detectionPattern(sourceNode, targetNode, this.parsedNodes, this.sharedContext)) {
@@ -538,7 +600,10 @@ export class TypeScriptParser {
538
600
  }
539
601
  }
540
602
  createFrameworkEdge(semanticType, relationshipType, sourceNodeId, targetNodeId, context = {}, relationshipWeight = 0.5) {
541
- const edgeId = `${semanticType}:${uuidv4()}`;
603
+ // Generate deterministic edge ID based on type + source + target
604
+ const edgeIdentity = `${semanticType}::${sourceNodeId}::${targetNodeId}`;
605
+ const edgeHash = crypto.createHash('sha256').update(edgeIdentity).digest('hex').substring(0, 16);
606
+ const edgeId = `${semanticType}:${edgeHash}`;
542
607
  const properties = {
543
608
  coreType: semanticType, // This might need adjustment based on schema
544
609
  semanticType,
@@ -652,7 +717,6 @@ export class TypeScriptParser {
652
717
  const excludedPatterns = this.parseConfig.excludePatterns ?? [];
653
718
  for (const pattern of excludedPatterns) {
654
719
  if (filePath.includes(pattern) || filePath.match(new RegExp(pattern))) {
655
- console.log(`⏭️ Skipping excluded file: ${filePath}`);
656
720
  return true;
657
721
  }
658
722
  }
@@ -3,12 +3,16 @@
3
3
  * Parses TypeScript/NestJS projects and builds Neo4j graph
4
4
  */
5
5
  import { writeFileSync } from 'fs';
6
- import { join } from 'path';
6
+ import { stat } from 'fs/promises';
7
+ import { join, resolve } from 'path';
8
+ import { glob } from 'glob';
7
9
  import { z } from 'zod';
10
+ import { EXCLUDE_PATTERNS_GLOB } from '../../constants.js';
8
11
  import { CORE_TYPESCRIPT_SCHEMA } from '../../core/config/schema.js';
9
12
  import { EmbeddingsService } from '../../core/embeddings/embeddings.service.js';
10
13
  import { ParserFactory } from '../../core/parsers/parser-factory.js';
11
- import { Neo4jService } from '../../storage/neo4j/neo4j.service.js';
14
+ import { Neo4jService, QUERIES } from '../../storage/neo4j/neo4j.service.js';
15
+ import { hashFile } from '../../utils/file-utils.js';
12
16
  import { TOOL_NAMES, TOOL_METADATA, DEFAULTS, FILE_PATHS, LOG_CONFIG } from '../constants.js';
13
17
  import { GraphGeneratorHandler } from '../handlers/graph-generator.handler.js';
14
18
  import { createErrorResponse, createSuccessResponse, formatParseSuccess, formatParsePartialSuccess, debugLog, } from '../utils.js';
@@ -38,58 +42,39 @@ export const createParseTypescriptProjectTool = (server) => {
38
42
  clearExisting,
39
43
  projectType,
40
44
  });
41
- // Create parser with auto-detection or specified type
42
- let parser;
43
- if (projectType === 'auto') {
44
- parser = await ParserFactory.createParserWithAutoDetection(projectPath, tsconfigPath);
45
- }
46
- else {
47
- parser = ParserFactory.createParser({
48
- workspacePath: projectPath,
49
- tsConfigPath: tsconfigPath,
50
- projectType: projectType,
51
- });
52
- }
53
- // Parse the workspace
54
- const { nodes, edges } = await parser.parseWorkspace();
55
- const { nodes: cleanNodes, edges: cleanEdges } = parser.exportToJson();
56
- console.log(`Parsed ${cleanNodes.length} nodes / ${cleanEdges.length} edges`);
57
- await debugLog('Parsing completed', {
58
- nodeCount: cleanNodes.length,
59
- edgeCount: cleanEdges.length,
45
+ const neo4jService = new Neo4jService();
46
+ const embeddingsService = new EmbeddingsService();
47
+ const graphGeneratorHandler = new GraphGeneratorHandler(neo4jService, embeddingsService);
48
+ const graphData = await parseProject({
49
+ neo4jService,
50
+ tsconfigPath,
51
+ projectPath,
52
+ clearExisting,
53
+ projectType,
60
54
  });
61
- // Create graph JSON output
55
+ const { nodes, edges, savedCrossFileEdges } = graphData;
56
+ console.log(`Parsed ${nodes.length} nodes / ${edges.length} edges`);
57
+ await debugLog('Parsing completed', { nodeCount: nodes.length, edgeCount: edges.length });
62
58
  const outputPath = join(projectPath, FILE_PATHS.graphOutput);
63
- // Get detected framework schemas from parser
64
- const frameworkSchemas = parser['frameworkSchemas']?.map((s) => s.name) ?? ['Auto-detected'];
65
- const graphData = {
66
- nodes: cleanNodes,
67
- edges: cleanEdges,
68
- metadata: {
69
- coreSchema: CORE_TYPESCRIPT_SCHEMA.name,
70
- frameworkSchemas,
71
- projectType,
72
- generated: new Date().toISOString(),
73
- },
74
- };
75
59
  writeFileSync(outputPath, JSON.stringify(graphData, null, LOG_CONFIG.jsonIndentation));
76
60
  console.log(`Graph data written to ${outputPath}`);
77
- // Attempt to import to Neo4j
78
61
  try {
79
- const neo4jService = new Neo4jService();
80
- const embeddingsService = new EmbeddingsService();
81
- const graphGeneratorHandler = new GraphGeneratorHandler(neo4jService, embeddingsService);
82
62
  const result = await graphGeneratorHandler.generateGraph(outputPath, DEFAULTS.batchSize, clearExisting);
63
+ // Recreate cross-file edges after incremental parse
64
+ if (!clearExisting && savedCrossFileEdges.length > 0) {
65
+ await debugLog('Recreating cross-file edges', { edgesToRecreate: savedCrossFileEdges.length });
66
+ const recreateResult = await neo4jService.run(QUERIES.RECREATE_CROSS_FILE_EDGES, { edges: savedCrossFileEdges });
67
+ const recreatedCount = recreateResult[0]?.recreatedCount ?? 0;
68
+ await debugLog('Cross-file edges recreated', { recreatedCount, expected: savedCrossFileEdges.length });
69
+ }
83
70
  console.log('Graph generation completed:', result);
84
71
  await debugLog('Neo4j import completed', result);
85
- const successMessage = formatParseSuccess(cleanNodes.length, cleanEdges.length, result);
86
- return createSuccessResponse(successMessage);
72
+ return createSuccessResponse(formatParseSuccess(nodes.length, edges.length, result));
87
73
  }
88
74
  catch (neo4jError) {
89
75
  console.error('Neo4j import failed:', neo4jError);
90
76
  await debugLog('Neo4j import failed', neo4jError);
91
- const partialSuccessMessage = formatParsePartialSuccess(cleanNodes.length, cleanEdges.length, outputPath, neo4jError.message);
92
- return createSuccessResponse(partialSuccessMessage);
77
+ return createSuccessResponse(formatParsePartialSuccess(nodes.length, edges.length, outputPath, neo4jError.message));
93
78
  }
94
79
  }
95
80
  catch (error) {
@@ -99,3 +84,98 @@ export const createParseTypescriptProjectTool = (server) => {
99
84
  }
100
85
  });
101
86
  };
87
+ const parseProject = async (options) => {
88
+ const { neo4jService, tsconfigPath, projectPath, clearExisting = true, projectType = 'auto' } = options;
89
+ const parser = projectType === 'auto'
90
+ ? await ParserFactory.createParserWithAutoDetection(projectPath, tsconfigPath)
91
+ : ParserFactory.createParser({
92
+ workspacePath: projectPath,
93
+ tsConfigPath: tsconfigPath,
94
+ projectType: projectType,
95
+ });
96
+ let incrementalStats;
97
+ let savedCrossFileEdges = [];
98
+ if (clearExisting) {
99
+ // Full rebuild: parse all files
100
+ await parser.parseWorkspace();
101
+ }
102
+ else {
103
+ // Incremental: detect changes and parse only affected files
104
+ const { filesToReparse, filesToDelete } = await detectChangedFiles(projectPath, neo4jService);
105
+ incrementalStats = { filesReparsed: filesToReparse.length, filesDeleted: filesToDelete.length };
106
+ await debugLog('Incremental change detection', { filesToReparse, filesToDelete });
107
+ const filesToRemoveFromGraph = [...filesToDelete, ...filesToReparse];
108
+ if (filesToRemoveFromGraph.length > 0) {
109
+ // Save cross-file edges before deletion (they'll be recreated after import)
110
+ savedCrossFileEdges = await getCrossFileEdges(neo4jService, filesToRemoveFromGraph);
111
+ await debugLog('Saved cross-file edges', { count: savedCrossFileEdges.length, edges: savedCrossFileEdges });
112
+ await deleteSourceFileSubgraphs(neo4jService, filesToRemoveFromGraph);
113
+ }
114
+ if (filesToReparse.length > 0) {
115
+ await debugLog('Incremental parse starting', { filesChanged: filesToReparse.length, filesDeleted: filesToDelete.length });
116
+ // Load existing nodes from Neo4j for edge target matching
117
+ const existingNodes = await loadExistingNodesForEdgeDetection(neo4jService, filesToRemoveFromGraph);
118
+ await debugLog('Loaded existing nodes for edge detection', { count: existingNodes.length });
119
+ parser.setExistingNodes(existingNodes);
120
+ await parser.parseWorkspace(filesToReparse);
121
+ }
122
+ else {
123
+ await debugLog('Incremental parse: no changes detected');
124
+ }
125
+ }
126
+ const { nodes, edges } = parser.exportToJson();
127
+ const frameworkSchemas = parser['frameworkSchemas']?.map((s) => s.name) ?? ['Auto-detected'];
128
+ return {
129
+ nodes,
130
+ edges,
131
+ savedCrossFileEdges,
132
+ metadata: {
133
+ coreSchema: CORE_TYPESCRIPT_SCHEMA.name,
134
+ frameworkSchemas,
135
+ projectType,
136
+ generated: new Date().toISOString(),
137
+ ...(incrementalStats && { incremental: incrementalStats }),
138
+ },
139
+ };
140
+ };
141
+ const deleteSourceFileSubgraphs = async (neo4jService, filePaths) => {
142
+ await neo4jService.run(QUERIES.DELETE_SOURCE_FILE_SUBGRAPHS, { filePaths });
143
+ };
144
+ const loadExistingNodesForEdgeDetection = async (neo4jService, excludeFilePaths) => {
145
+ const queryResult = await neo4jService.run(QUERIES.GET_EXISTING_NODES_FOR_EDGE_DETECTION, { excludeFilePaths });
146
+ return queryResult;
147
+ };
148
+ const getCrossFileEdges = async (neo4jService, filePaths) => {
149
+ const queryResult = await neo4jService.run(QUERIES.GET_CROSS_FILE_EDGES, { filePaths });
150
+ return queryResult;
151
+ };
152
+ const detectChangedFiles = async (projectPath, neo4jService) => {
153
+ const relativeFiles = await glob('**/*.ts', { cwd: projectPath, ignore: EXCLUDE_PATTERNS_GLOB });
154
+ const currentFiles = new Set(relativeFiles.map((f) => resolve(projectPath, f)));
155
+ const queryResult = await neo4jService.run(QUERIES.GET_SOURCE_FILE_TRACKING_INFO);
156
+ const indexedFiles = queryResult;
157
+ const indexedMap = new Map(indexedFiles.map((f) => [f.filePath, f]));
158
+ const filesToReparse = [];
159
+ const filesToDelete = [];
160
+ for (const absolutePath of currentFiles) {
161
+ const indexed = indexedMap.get(absolutePath);
162
+ if (!indexed) {
163
+ filesToReparse.push(absolutePath);
164
+ continue;
165
+ }
166
+ const fileStats = await stat(absolutePath);
167
+ if (fileStats.mtimeMs === indexed.mtime && fileStats.size === indexed.size) {
168
+ continue;
169
+ }
170
+ const currentHash = await hashFile(absolutePath);
171
+ if (currentHash !== indexed.contentHash) {
172
+ filesToReparse.push(absolutePath);
173
+ }
174
+ }
175
+ for (const indexedPath of indexedMap.keys()) {
176
+ if (!currentFiles.has(indexedPath)) {
177
+ filesToDelete.push(indexedPath);
178
+ }
179
+ }
180
+ return { filesToReparse, filesToDelete };
181
+ };
package/dist/mcp/utils.js CHANGED
@@ -2,22 +2,8 @@
2
2
  * MCP Server Utility Functions
3
3
  * Common utility functions used across the MCP server
4
4
  */
5
- import fs from 'fs/promises';
6
- import path from 'path';
7
- import { FILE_PATHS, LOG_CONFIG, MESSAGES } from './constants.js';
8
- /**
9
- * Debug logging utility
10
- */
11
- export const debugLog = async (message, data) => {
12
- const timestamp = new Date().toISOString();
13
- const logEntry = `[${timestamp}] ${message}\n${data ? JSON.stringify(data, null, LOG_CONFIG.jsonIndentation) : ''}\n${LOG_CONFIG.logSeparator}\n`;
14
- try {
15
- await fs.appendFile(path.join(process.cwd(), FILE_PATHS.debugLog), logEntry);
16
- }
17
- catch (error) {
18
- console.error('Failed to write debug log:', error);
19
- }
20
- };
5
+ import { MESSAGES } from './constants.js';
6
+ export { debugLog } from '../utils/file-utils.js';
21
7
  /**
22
8
  * Standard error response format for MCP tools
23
9
  */
@@ -81,15 +81,70 @@ export const QUERIES = {
81
81
  RETURN {
82
82
  id: node.id,
83
83
  labels: labels(node),
84
- properties: apoc.map.removeKeys(properties(node), ['embedding'])
84
+ properties: apoc.map.removeKeys(properties(node), ['embedding', 'contentHash', 'mtime', 'size'])
85
85
  } as node, score
86
86
  ORDER BY score DESC
87
87
  `,
88
88
  // Check if index exists
89
89
  CHECK_VECTOR_INDEX: `
90
- SHOW INDEXES YIELD name, type
90
+ SHOW INDEXES YIELD name, type
91
91
  WHERE name = 'node_embedding_idx' AND type = 'VECTOR'
92
92
  RETURN count(*) > 0 as exists
93
+ `,
94
+ GET_SOURCE_FILE_TRACKING_INFO: `
95
+ MATCH (sf:SourceFile)
96
+ RETURN sf.filePath AS filePath, sf.mtime AS mtime, sf.size AS size, sf.contentHash AS contentHash
97
+ `,
98
+ // Get cross-file edges before deletion (edges where one endpoint is outside the subgraph)
99
+ // These will be recreated after import using deterministic IDs
100
+ GET_CROSS_FILE_EDGES: `
101
+ MATCH (sf:SourceFile)
102
+ WHERE sf.filePath IN $filePaths
103
+ OPTIONAL MATCH (sf)-[*]->(child)
104
+ WITH collect(DISTINCT sf) + collect(DISTINCT child) AS nodesToDelete
105
+ UNWIND nodesToDelete AS n
106
+ MATCH (n)-[r]-(other)
107
+ WHERE NOT other IN nodesToDelete
108
+ RETURN DISTINCT
109
+ startNode(r).id AS startNodeId,
110
+ endNode(r).id AS endNodeId,
111
+ type(r) AS edgeType,
112
+ properties(r) AS edgeProperties
113
+ `,
114
+ // Delete source file subgraphs (nodes and all their edges)
115
+ DELETE_SOURCE_FILE_SUBGRAPHS: `
116
+ MATCH (sf:SourceFile)
117
+ WHERE sf.filePath IN $filePaths
118
+ OPTIONAL MATCH (sf)-[*]->(child)
119
+ DETACH DELETE sf, child
120
+ `,
121
+ // Recreate cross-file edges after import (uses deterministic IDs)
122
+ RECREATE_CROSS_FILE_EDGES: `
123
+ UNWIND $edges AS edge
124
+ MATCH (startNode {id: edge.startNodeId})
125
+ MATCH (endNode {id: edge.endNodeId})
126
+ CALL apoc.create.relationship(startNode, edge.edgeType, edge.edgeProperties, endNode) YIELD rel
127
+ RETURN count(rel) AS recreatedCount
128
+ `,
129
+ // Clean up dangling edges (edges pointing to non-existent nodes)
130
+ // Run after incremental parse to remove edges to renamed/deleted nodes
131
+ CLEANUP_DANGLING_EDGES: `
132
+ MATCH ()-[r]->()
133
+ WHERE startNode(r) IS NULL OR endNode(r) IS NULL
134
+ DELETE r
135
+ RETURN count(r) AS deletedCount
136
+ `,
137
+ // Get existing nodes (excluding files being reparsed) for edge target matching
138
+ // Returns minimal info needed for edge detection: id, name, coreType, semanticType
139
+ GET_EXISTING_NODES_FOR_EDGE_DETECTION: `
140
+ MATCH (sf:SourceFile)-[*]->(n)
141
+ WHERE NOT sf.filePath IN $excludeFilePaths
142
+ RETURN n.id AS id,
143
+ n.name AS name,
144
+ n.coreType AS coreType,
145
+ n.semanticType AS semanticType,
146
+ labels(n) AS labels,
147
+ sf.filePath AS filePath
93
148
  `,
94
149
  EXPLORE_ALL_CONNECTIONS: (maxDepth = MAX_TRAVERSAL_DEPTH, direction = 'BOTH', relationshipTypes) => {
95
150
  const safeMaxDepth = Math.min(Math.max(maxDepth, 1), MAX_TRAVERSAL_DEPTH);
@@ -123,7 +178,7 @@ export const QUERIES = {
123
178
  RETURN {
124
179
  id: connected.id,
125
180
  labels: labels(connected),
126
- properties: apoc.map.removeKeys(properties(connected), ['embedding'])
181
+ properties: apoc.map.removeKeys(properties(connected), ['embedding', 'contentHash', 'mtime', 'size'])
127
182
  } as node,
128
183
  depth,
129
184
  [rel in relationships(path) | {
@@ -148,7 +203,7 @@ export const QUERIES = {
148
203
  startNode: {
149
204
  id: start.id,
150
205
  labels: labels(start),
151
- properties: apoc.map.removeKeys(properties(start), ['embedding'])
206
+ properties: apoc.map.removeKeys(properties(start), ['embedding', 'contentHash', 'mtime', 'size'])
152
207
  },
153
208
  connections: connections,
154
209
  totalConnections: size(allConnections),
@@ -156,7 +211,7 @@ export const QUERIES = {
156
211
  nodes: [conn in connections | conn.node] + [{
157
212
  id: start.id,
158
213
  labels: labels(start),
159
- properties: apoc.map.removeKeys(properties(start), ['embedding'])
214
+ properties: apoc.map.removeKeys(properties(start), ['embedding', 'contentHash', 'mtime', 'size'])
160
215
  }],
161
216
  relationships: reduce(rels = [], conn in connections | rels + conn.relationshipChain)
162
217
  }
@@ -252,7 +307,7 @@ export const QUERIES = {
252
307
  node: {
253
308
  id: neighbor.id,
254
309
  labels: labels(neighbor),
255
- properties: apoc.map.removeKeys(properties(neighbor), ['embedding'])
310
+ properties: apoc.map.removeKeys(properties(neighbor), ['embedding', 'contentHash', 'mtime', 'size'])
256
311
  },
257
312
  relationship: {
258
313
  type: type(rel),
@@ -0,0 +1,20 @@
1
+ import * as crypto from 'crypto';
2
+ import * as fs from 'fs/promises';
3
+ import * as path from 'path';
4
+ const DEBUG_LOG_FILE = 'debug-search.log';
5
+ const LOG_SEPARATOR = '---';
6
+ const JSON_INDENT = 2;
7
+ export const hashFile = async (filePath) => {
8
+ const content = await fs.readFile(filePath);
9
+ return crypto.createHash('sha256').update(content).digest('hex');
10
+ };
11
+ export const debugLog = async (message, data) => {
12
+ const timestamp = new Date().toISOString();
13
+ const logEntry = `[${timestamp}] ${message}\n${data ? JSON.stringify(data, null, JSON_INDENT) : ''}\n${LOG_SEPARATOR}\n`;
14
+ try {
15
+ await fs.appendFile(path.join(process.cwd(), DEBUG_LOG_FILE), logEntry);
16
+ }
17
+ catch (error) {
18
+ console.error('Failed to write debug log:', error);
19
+ }
20
+ };
package/package.json CHANGED
@@ -1,8 +1,9 @@
1
1
  {
2
2
  "name": "code-graph-context",
3
- "version": "0.1.0",
3
+ "version": "1.0.0",
4
4
  "description": "MCP server that builds code graphs to provide rich context to LLMs",
5
5
  "type": "module",
6
+ "homepage": "https://github.com/drewdrewH/code-graph-context#readme",
6
7
  "repository": {
7
8
  "type": "git",
8
9
  "url": "git+https://github.com/drewdrewH/code-graph-context.git"