code-graph-context 2.0.0 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +156 -2
- package/dist/constants.js +167 -0
- package/dist/core/config/fairsquare-framework-schema.js +9 -7
- package/dist/core/config/nestjs-framework-schema.js +60 -43
- package/dist/core/config/schema.js +41 -2
- package/dist/core/embeddings/natural-language-to-cypher.service.js +166 -110
- package/dist/core/parsers/typescript-parser.js +1043 -747
- package/dist/core/parsers/workspace-parser.js +177 -194
- package/dist/core/utils/code-normalizer.js +299 -0
- package/dist/core/utils/file-change-detection.js +17 -2
- package/dist/core/utils/file-utils.js +40 -5
- package/dist/core/utils/graph-factory.js +161 -0
- package/dist/core/utils/shared-utils.js +79 -0
- package/dist/core/workspace/workspace-detector.js +59 -5
- package/dist/mcp/constants.js +141 -8
- package/dist/mcp/handlers/graph-generator.handler.js +1 -0
- package/dist/mcp/handlers/incremental-parse.handler.js +3 -6
- package/dist/mcp/handlers/parallel-import.handler.js +136 -0
- package/dist/mcp/handlers/streaming-import.handler.js +14 -59
- package/dist/mcp/mcp.server.js +1 -1
- package/dist/mcp/services/job-manager.js +5 -8
- package/dist/mcp/services/watch-manager.js +7 -18
- package/dist/mcp/tools/detect-dead-code.tool.js +413 -0
- package/dist/mcp/tools/detect-duplicate-code.tool.js +450 -0
- package/dist/mcp/tools/impact-analysis.tool.js +20 -4
- package/dist/mcp/tools/index.js +4 -0
- package/dist/mcp/tools/parse-typescript-project.tool.js +15 -14
- package/dist/mcp/workers/chunk-worker-pool.js +196 -0
- package/dist/mcp/workers/chunk-worker.types.js +4 -0
- package/dist/mcp/workers/chunk.worker.js +89 -0
- package/dist/mcp/workers/parse-coordinator.js +183 -0
- package/dist/mcp/workers/worker.pool.js +54 -0
- package/dist/storage/neo4j/neo4j.service.js +190 -10
- package/package.json +1 -1
|
@@ -2,14 +2,16 @@
|
|
|
2
2
|
* Workspace Parser
|
|
3
3
|
* Orchestrates parsing of multi-package monorepos
|
|
4
4
|
*/
|
|
5
|
-
import crypto from 'crypto';
|
|
6
5
|
import path from 'path';
|
|
7
6
|
import { glob } from 'glob';
|
|
7
|
+
import { EXCLUDE_PATTERNS_GLOB } from '../../constants.js';
|
|
8
|
+
import { FAIRSQUARE_FRAMEWORK_SCHEMA } from '../config/fairsquare-framework-schema.js';
|
|
9
|
+
import { NESTJS_FRAMEWORK_SCHEMA } from '../config/nestjs-framework-schema.js';
|
|
8
10
|
import { debugLog } from '../utils/file-utils.js';
|
|
9
|
-
import {
|
|
10
|
-
import { createFrameworkEdgeData } from '../utils/edge-factory.js';
|
|
11
|
+
import { createFrameworkEdgeData } from '../utils/graph-factory.js';
|
|
11
12
|
import { resolveProjectId } from '../utils/project-id.js';
|
|
12
13
|
import { ParserFactory } from './parser-factory.js';
|
|
14
|
+
import { TypeScriptParser } from './typescript-parser.js';
|
|
13
15
|
export class WorkspaceParser {
|
|
14
16
|
config;
|
|
15
17
|
projectId;
|
|
@@ -25,6 +27,11 @@ export class WorkspaceParser {
|
|
|
25
27
|
accumulatedParsedNodes = new Map();
|
|
26
28
|
// Framework schemas detected from packages (for edge enhancements)
|
|
27
29
|
frameworkSchemas = [];
|
|
30
|
+
// Track already exported items to avoid returning duplicates in streaming mode
|
|
31
|
+
exportedNodeIds = new Set();
|
|
32
|
+
exportedEdgeIds = new Set();
|
|
33
|
+
// Resolver parser for delegating edge resolution to TypeScriptParser
|
|
34
|
+
resolverParser = null;
|
|
28
35
|
constructor(config, projectId, lazyLoad = true, projectType = 'auto') {
|
|
29
36
|
this.config = config;
|
|
30
37
|
this.projectId = resolveProjectId(config.rootPath, projectId);
|
|
@@ -72,9 +79,10 @@ export class WorkspaceParser {
|
|
|
72
79
|
*/
|
|
73
80
|
async discoverPackageFiles(pkg) {
|
|
74
81
|
// Include both .ts and .tsx files
|
|
82
|
+
// Use EXCLUDE_PATTERNS_GLOB for consistency with detectChangedFiles and TypeScriptParser
|
|
75
83
|
const pattern = path.join(pkg.path, '**/*.{ts,tsx}');
|
|
76
84
|
const files = await glob(pattern, {
|
|
77
|
-
ignore:
|
|
85
|
+
ignore: EXCLUDE_PATTERNS_GLOB,
|
|
78
86
|
absolute: true,
|
|
79
87
|
});
|
|
80
88
|
return files;
|
|
@@ -93,7 +101,7 @@ export class WorkspaceParser {
|
|
|
93
101
|
* Injects the shared context so context is shared across all packages.
|
|
94
102
|
*/
|
|
95
103
|
async createParserForPackage(pkg) {
|
|
96
|
-
const tsConfigPath = pkg.tsConfigPath
|
|
104
|
+
const tsConfigPath = pkg.tsConfigPath ?? path.join(pkg.path, 'tsconfig.json');
|
|
97
105
|
let parser;
|
|
98
106
|
if (this.projectType === 'auto') {
|
|
99
107
|
// Auto-detect framework for this specific package
|
|
@@ -121,22 +129,22 @@ export class WorkspaceParser {
|
|
|
121
129
|
* Parse a single package and return its results
|
|
122
130
|
*/
|
|
123
131
|
async parsePackage(pkg) {
|
|
124
|
-
|
|
132
|
+
await debugLog(`Parsing package: ${pkg.name}`);
|
|
125
133
|
const parser = await this.createParserForPackage(pkg);
|
|
126
134
|
// Discover files for this package
|
|
127
135
|
const files = await this.discoverPackageFiles(pkg);
|
|
128
136
|
if (files.length === 0) {
|
|
129
|
-
|
|
137
|
+
await debugLog(`No TypeScript files found in ${pkg.name}`);
|
|
130
138
|
return { nodes: [], edges: [] };
|
|
131
139
|
}
|
|
132
|
-
|
|
140
|
+
await debugLog(`${pkg.name}: ${files.length} files to parse`);
|
|
133
141
|
// Parse all files in this package
|
|
134
142
|
const result = await parser.parseChunk(files, true); // Skip edge resolution for now
|
|
135
143
|
// Add package name to all nodes
|
|
136
144
|
for (const node of result.nodes) {
|
|
137
145
|
node.properties.packageName = pkg.name;
|
|
138
146
|
}
|
|
139
|
-
|
|
147
|
+
await debugLog(`${pkg.name}: ${result.nodes.length} nodes, ${result.edges.length} edges`);
|
|
140
148
|
return result;
|
|
141
149
|
}
|
|
142
150
|
/**
|
|
@@ -174,10 +182,11 @@ export class WorkspaceParser {
|
|
|
174
182
|
for (const [nodeId, parsedNode] of innerParsedNodes) {
|
|
175
183
|
this.accumulatedParsedNodes.set(nodeId, {
|
|
176
184
|
id: parsedNode.id,
|
|
185
|
+
coreType: parsedNode.coreType, // Needed for detection patterns
|
|
177
186
|
semanticType: parsedNode.semanticType,
|
|
178
187
|
properties: {
|
|
179
188
|
name: parsedNode.properties.name,
|
|
180
|
-
context: parsedNode.properties.context, // Contains propertyTypes
|
|
189
|
+
context: parsedNode.properties.context, // Contains propertyTypes, dependencies
|
|
181
190
|
},
|
|
182
191
|
});
|
|
183
192
|
}
|
|
@@ -195,7 +204,22 @@ export class WorkspaceParser {
|
|
|
195
204
|
// Continue with other packages
|
|
196
205
|
}
|
|
197
206
|
}
|
|
198
|
-
return
|
|
207
|
+
// Only return nodes/edges that haven't been exported yet (prevents duplicate imports in streaming mode)
|
|
208
|
+
const newNodes = allNodes.filter((node) => {
|
|
209
|
+
if (!this.exportedNodeIds.has(node.id)) {
|
|
210
|
+
this.exportedNodeIds.add(node.id);
|
|
211
|
+
return true;
|
|
212
|
+
}
|
|
213
|
+
return false;
|
|
214
|
+
});
|
|
215
|
+
const newEdges = allEdges.filter((edge) => {
|
|
216
|
+
if (!this.exportedEdgeIds.has(edge.id)) {
|
|
217
|
+
this.exportedEdgeIds.add(edge.id);
|
|
218
|
+
return true;
|
|
219
|
+
}
|
|
220
|
+
return false;
|
|
221
|
+
});
|
|
222
|
+
return { nodes: newNodes, edges: newEdges };
|
|
199
223
|
}
|
|
200
224
|
/**
|
|
201
225
|
* Find which package a file belongs to
|
|
@@ -224,8 +248,7 @@ export class WorkspaceParser {
|
|
|
224
248
|
edges: result.edges.length,
|
|
225
249
|
});
|
|
226
250
|
}
|
|
227
|
-
|
|
228
|
-
console.log(` Total: ${allNodes.length} nodes, ${allEdges.length} edges`);
|
|
251
|
+
await debugLog(`Workspace parsing complete! Total: ${allNodes.length} nodes, ${allEdges.length} edges`);
|
|
229
252
|
return {
|
|
230
253
|
nodes: allNodes,
|
|
231
254
|
edges: allEdges,
|
|
@@ -239,6 +262,8 @@ export class WorkspaceParser {
|
|
|
239
262
|
clearParsedData() {
|
|
240
263
|
this.parsedNodes.clear();
|
|
241
264
|
this.parsedEdges.clear();
|
|
265
|
+
this.exportedNodeIds.clear();
|
|
266
|
+
this.exportedEdgeIds.clear();
|
|
242
267
|
}
|
|
243
268
|
/**
|
|
244
269
|
* Add existing nodes for cross-package edge resolution
|
|
@@ -248,6 +273,25 @@ export class WorkspaceParser {
|
|
|
248
273
|
this.parsedNodes.set(node.id, node);
|
|
249
274
|
}
|
|
250
275
|
}
|
|
276
|
+
/**
|
|
277
|
+
* Add nodes to accumulatedParsedNodes for edge enhancement.
|
|
278
|
+
* Converts Neo4jNode to LightweightParsedNode format.
|
|
279
|
+
*/
|
|
280
|
+
addParsedNodesFromChunk(nodes) {
|
|
281
|
+
for (const node of nodes) {
|
|
282
|
+
this.parsedNodes.set(node.id, node);
|
|
283
|
+
// Also add to accumulatedParsedNodes for edge enhancement detection
|
|
284
|
+
this.accumulatedParsedNodes.set(node.id, {
|
|
285
|
+
id: node.id,
|
|
286
|
+
coreType: node.properties.coreType,
|
|
287
|
+
semanticType: node.properties.semanticType,
|
|
288
|
+
properties: {
|
|
289
|
+
name: node.properties.name,
|
|
290
|
+
context: node.properties.context,
|
|
291
|
+
},
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
}
|
|
251
295
|
/**
|
|
252
296
|
* Get current counts for progress reporting
|
|
253
297
|
*/
|
|
@@ -259,91 +303,126 @@ export class WorkspaceParser {
|
|
|
259
303
|
};
|
|
260
304
|
}
|
|
261
305
|
/**
|
|
262
|
-
*
|
|
263
|
-
*
|
|
306
|
+
* Set whether to defer edge enhancements.
|
|
307
|
+
* WorkspaceParser always defers edge enhancements to applyEdgeEnhancementsManually(),
|
|
308
|
+
* so this is a no-op for interface compliance.
|
|
264
309
|
*/
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
const relationshipWeight = coreEdgeSchema?.relationshipWeight ?? 0.5;
|
|
285
|
-
// Generate a unique edge ID
|
|
286
|
-
const edgeHash = crypto
|
|
287
|
-
.createHash('md5')
|
|
288
|
-
.update(`${deferred.sourceNodeId}-${deferred.edgeType}-${targetNode.id}`)
|
|
289
|
-
.digest('hex')
|
|
290
|
-
.substring(0, 12);
|
|
291
|
-
const edge = {
|
|
292
|
-
id: `${this.projectId}:${deferred.edgeType}:${edgeHash}`,
|
|
293
|
-
type: deferred.edgeType,
|
|
294
|
-
startNodeId: deferred.sourceNodeId,
|
|
295
|
-
endNodeId: targetNode.id,
|
|
296
|
-
properties: {
|
|
297
|
-
coreType: coreEdgeType,
|
|
298
|
-
projectId: this.projectId,
|
|
299
|
-
source: 'ast',
|
|
300
|
-
confidence: 1.0,
|
|
301
|
-
relationshipWeight,
|
|
302
|
-
filePath,
|
|
303
|
-
createdAt: new Date().toISOString(),
|
|
304
|
-
},
|
|
305
|
-
};
|
|
306
|
-
resolvedEdges.push(edge);
|
|
307
|
-
}
|
|
308
|
-
else {
|
|
309
|
-
// Track unresolved by type
|
|
310
|
-
if (deferred.edgeType === 'IMPORTS') {
|
|
311
|
-
unresolvedImports.push(deferred.targetName);
|
|
310
|
+
setDeferEdgeEnhancements(_defer) {
|
|
311
|
+
// No-op: WorkspaceParser always handles edge enhancements at the end
|
|
312
|
+
}
|
|
313
|
+
/**
|
|
314
|
+
* Load framework schemas for a specific project type.
|
|
315
|
+
* Used by parallel parsing coordinator to load schemas before edge enhancement.
|
|
316
|
+
* In sequential parsing, schemas are accumulated from inner parsers instead.
|
|
317
|
+
*/
|
|
318
|
+
loadFrameworkSchemasForType(projectType) {
|
|
319
|
+
// Load schemas based on project type (same logic as ParserFactory.selectFrameworkSchemas)
|
|
320
|
+
switch (projectType) {
|
|
321
|
+
case 'nestjs':
|
|
322
|
+
if (!this.frameworkSchemas.some((s) => s.name === NESTJS_FRAMEWORK_SCHEMA.name)) {
|
|
323
|
+
this.frameworkSchemas.push(NESTJS_FRAMEWORK_SCHEMA);
|
|
324
|
+
}
|
|
325
|
+
break;
|
|
326
|
+
case 'fairsquare':
|
|
327
|
+
if (!this.frameworkSchemas.some((s) => s.name === FAIRSQUARE_FRAMEWORK_SCHEMA.name)) {
|
|
328
|
+
this.frameworkSchemas.push(FAIRSQUARE_FRAMEWORK_SCHEMA);
|
|
312
329
|
}
|
|
313
|
-
|
|
314
|
-
|
|
330
|
+
break;
|
|
331
|
+
case 'both':
|
|
332
|
+
if (!this.frameworkSchemas.some((s) => s.name === FAIRSQUARE_FRAMEWORK_SCHEMA.name)) {
|
|
333
|
+
this.frameworkSchemas.push(FAIRSQUARE_FRAMEWORK_SCHEMA);
|
|
315
334
|
}
|
|
316
|
-
|
|
317
|
-
|
|
335
|
+
if (!this.frameworkSchemas.some((s) => s.name === NESTJS_FRAMEWORK_SCHEMA.name)) {
|
|
336
|
+
this.frameworkSchemas.push(NESTJS_FRAMEWORK_SCHEMA);
|
|
318
337
|
}
|
|
338
|
+
break;
|
|
339
|
+
// 'vanilla' and 'auto' - no framework schemas
|
|
340
|
+
}
|
|
341
|
+
debugLog('WorkspaceParser loaded framework schemas', { count: this.frameworkSchemas.length, projectType });
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* Get serialized shared context for parallel parsing.
|
|
345
|
+
* Converts Maps to arrays for structured clone compatibility.
|
|
346
|
+
*/
|
|
347
|
+
getSerializedSharedContext() {
|
|
348
|
+
const serialized = [];
|
|
349
|
+
for (const [key, value] of this.sharedContext) {
|
|
350
|
+
if (value instanceof Map) {
|
|
351
|
+
serialized.push([key, Array.from(value.entries())]);
|
|
352
|
+
}
|
|
353
|
+
else {
|
|
354
|
+
serialized.push([key, value]);
|
|
319
355
|
}
|
|
320
356
|
}
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
357
|
+
return serialized;
|
|
358
|
+
}
|
|
359
|
+
/**
|
|
360
|
+
* Merge serialized shared context from workers.
|
|
361
|
+
* Handles Map merging by combining entries.
|
|
362
|
+
*/
|
|
363
|
+
mergeSerializedSharedContext(serialized) {
|
|
364
|
+
for (const [key, value] of serialized) {
|
|
365
|
+
if (Array.isArray(value) && value.length > 0 && Array.isArray(value[0])) {
|
|
366
|
+
// It's a serialized Map - merge with existing
|
|
367
|
+
const existingMap = this.sharedContext.get(key);
|
|
368
|
+
const newMap = existingMap ?? new Map();
|
|
369
|
+
for (const [k, v] of value) {
|
|
370
|
+
newMap.set(k, v);
|
|
371
|
+
}
|
|
372
|
+
this.sharedContext.set(key, newMap);
|
|
373
|
+
}
|
|
374
|
+
else {
|
|
375
|
+
// Simple value - just set it
|
|
376
|
+
this.sharedContext.set(key, value);
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
/**
|
|
381
|
+
* Get deferred edges for cross-chunk resolution.
|
|
382
|
+
* Returns serializable format for worker thread transfer.
|
|
383
|
+
*/
|
|
384
|
+
getDeferredEdges() {
|
|
385
|
+
return this.accumulatedDeferredEdges.map((e) => ({
|
|
386
|
+
edgeType: e.edgeType,
|
|
387
|
+
sourceNodeId: e.sourceNodeId,
|
|
388
|
+
targetName: e.targetName,
|
|
389
|
+
targetType: e.targetType,
|
|
390
|
+
targetFilePath: e.targetFilePath,
|
|
391
|
+
}));
|
|
392
|
+
}
|
|
393
|
+
/**
|
|
394
|
+
* Merge deferred edges from workers for resolution.
|
|
395
|
+
*/
|
|
396
|
+
mergeDeferredEdges(edges) {
|
|
397
|
+
for (const e of edges) {
|
|
398
|
+
this.accumulatedDeferredEdges.push({
|
|
399
|
+
edgeType: e.edgeType,
|
|
400
|
+
sourceNodeId: e.sourceNodeId,
|
|
401
|
+
targetName: e.targetName,
|
|
402
|
+
targetType: e.targetType,
|
|
403
|
+
targetFilePath: e.targetFilePath,
|
|
404
|
+
});
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
/**
|
|
408
|
+
* Resolve accumulated deferred edges against all parsed nodes
|
|
409
|
+
* Call this after all chunks have been parsed
|
|
410
|
+
*/
|
|
411
|
+
async resolveDeferredEdges() {
|
|
412
|
+
if (this.accumulatedDeferredEdges.length === 0) {
|
|
413
|
+
return [];
|
|
414
|
+
}
|
|
415
|
+
// Create or reuse resolver parser - delegates all resolution logic to TypeScriptParser
|
|
416
|
+
// Uses createResolver() which doesn't require ts-morph initialization
|
|
417
|
+
if (!this.resolverParser) {
|
|
418
|
+
this.resolverParser = TypeScriptParser.createResolver(this.projectId);
|
|
419
|
+
}
|
|
420
|
+
// Populate resolver with accumulated nodes (builds CALLS indexes automatically)
|
|
421
|
+
this.resolverParser.addParsedNodesFromChunk(Array.from(this.parsedNodes.values()));
|
|
422
|
+
// Transfer deferred edges to resolver
|
|
423
|
+
this.resolverParser.mergeDeferredEdges(this.accumulatedDeferredEdges);
|
|
424
|
+
// Delegate resolution to TypeScriptParser
|
|
425
|
+
const resolvedEdges = await this.resolverParser.resolveDeferredEdges();
|
|
347
426
|
// Clear accumulated deferred edges after resolution
|
|
348
427
|
this.accumulatedDeferredEdges = [];
|
|
349
428
|
return resolvedEdges;
|
|
@@ -358,18 +437,18 @@ export class WorkspaceParser {
|
|
|
358
437
|
*/
|
|
359
438
|
async applyEdgeEnhancementsManually() {
|
|
360
439
|
if (this.accumulatedParsedNodes.size === 0) {
|
|
361
|
-
|
|
440
|
+
await debugLog('WorkspaceParser: No accumulated nodes for edge enhancements');
|
|
362
441
|
return [];
|
|
363
442
|
}
|
|
364
443
|
if (this.frameworkSchemas.length === 0) {
|
|
365
|
-
|
|
444
|
+
await debugLog('WorkspaceParser: No framework schemas for edge enhancements');
|
|
366
445
|
return [];
|
|
367
446
|
}
|
|
368
|
-
|
|
447
|
+
await debugLog(`WorkspaceParser: Applying edge enhancements on ${this.accumulatedParsedNodes.size} accumulated nodes across all packages...`);
|
|
369
448
|
// Pre-index nodes by semantic type for O(1) lookups
|
|
370
449
|
const nodesBySemanticType = new Map();
|
|
371
450
|
for (const [nodeId, node] of this.accumulatedParsedNodes) {
|
|
372
|
-
const semanticType = node.semanticType
|
|
451
|
+
const semanticType = node.semanticType ?? 'unknown';
|
|
373
452
|
if (!nodesBySemanticType.has(semanticType)) {
|
|
374
453
|
nodesBySemanticType.set(semanticType, new Map());
|
|
375
454
|
}
|
|
@@ -379,7 +458,7 @@ export class WorkspaceParser {
|
|
|
379
458
|
for (const [type, nodes] of nodesBySemanticType) {
|
|
380
459
|
typeCounts[type] = nodes.size;
|
|
381
460
|
}
|
|
382
|
-
|
|
461
|
+
await debugLog(`Node distribution by semantic type: ${JSON.stringify(typeCounts)}`);
|
|
383
462
|
const newEdges = [];
|
|
384
463
|
const edgeCountBefore = this.parsedEdges.size;
|
|
385
464
|
// Apply edge enhancements from all framework schemas
|
|
@@ -390,7 +469,7 @@ export class WorkspaceParser {
|
|
|
390
469
|
}
|
|
391
470
|
}
|
|
392
471
|
const newEdgeCount = this.parsedEdges.size - edgeCountBefore;
|
|
393
|
-
|
|
472
|
+
await debugLog(`Created ${newEdgeCount} cross-package edges from edge enhancements`);
|
|
394
473
|
return newEdges;
|
|
395
474
|
}
|
|
396
475
|
/**
|
|
@@ -454,100 +533,4 @@ export class WorkspaceParser {
|
|
|
454
533
|
properties,
|
|
455
534
|
};
|
|
456
535
|
}
|
|
457
|
-
/**
|
|
458
|
-
* Find a node by name and type from accumulated nodes
|
|
459
|
-
* For SourceFiles, implements smart import resolution:
|
|
460
|
-
* - Direct file path match
|
|
461
|
-
* - Relative import resolution (./foo, ../bar)
|
|
462
|
-
* - Scoped package imports (@workspace/ui, @ui/core)
|
|
463
|
-
*
|
|
464
|
-
* For ClassDeclaration/InterfaceDeclaration with filePath, uses precise matching.
|
|
465
|
-
*/
|
|
466
|
-
findNodeByNameAndType(name, type, filePath) {
|
|
467
|
-
const allNodes = [...this.parsedNodes.values()];
|
|
468
|
-
// If we have a file path and it's not a SourceFile, use precise matching first
|
|
469
|
-
if (filePath && type !== 'SourceFile') {
|
|
470
|
-
for (const node of allNodes) {
|
|
471
|
-
if (node.properties.coreType === type &&
|
|
472
|
-
node.properties.name === name &&
|
|
473
|
-
node.properties.filePath === filePath) {
|
|
474
|
-
return node;
|
|
475
|
-
}
|
|
476
|
-
}
|
|
477
|
-
// If precise match fails, fall through to name-only matching below
|
|
478
|
-
}
|
|
479
|
-
// For SOURCE_FILE with import specifier, try multiple matching strategies
|
|
480
|
-
if (type === 'SourceFile') {
|
|
481
|
-
// Strategy 1: Direct file path match
|
|
482
|
-
for (const node of allNodes) {
|
|
483
|
-
if (node.labels.includes(type) && node.properties.filePath === name) {
|
|
484
|
-
return node;
|
|
485
|
-
}
|
|
486
|
-
}
|
|
487
|
-
// Strategy 2: Resolve relative imports (./foo, ../bar)
|
|
488
|
-
if (name.startsWith('.')) {
|
|
489
|
-
// Normalize: remove leading ./ or ../
|
|
490
|
-
const normalizedPath = name.replace(/^\.\.\//, '').replace(/^\.\//, '');
|
|
491
|
-
// Try matching with common extensions
|
|
492
|
-
const extensions = ['', '.ts', '.tsx', '/index.ts', '/index.tsx'];
|
|
493
|
-
for (const ext of extensions) {
|
|
494
|
-
const searchPath = normalizedPath + ext;
|
|
495
|
-
for (const node of allNodes) {
|
|
496
|
-
if (node.labels.includes(type)) {
|
|
497
|
-
// Match if filePath ends with the normalized path
|
|
498
|
-
if (node.properties.filePath.endsWith(searchPath) ||
|
|
499
|
-
node.properties.filePath.endsWith('/' + searchPath)) {
|
|
500
|
-
return node;
|
|
501
|
-
}
|
|
502
|
-
}
|
|
503
|
-
}
|
|
504
|
-
}
|
|
505
|
-
}
|
|
506
|
-
// Strategy 3: Workspace package imports (@workspace/ui, @ui/core)
|
|
507
|
-
if (name.startsWith('@')) {
|
|
508
|
-
const parts = name.split('/');
|
|
509
|
-
const packageName = parts.slice(0, 2).join('/'); // @scope/package
|
|
510
|
-
const subPath = parts.slice(2).join('/'); // rest of path after package name
|
|
511
|
-
// First, try to find an exact match with subpath
|
|
512
|
-
if (subPath) {
|
|
513
|
-
const extensions = ['', '.ts', '.tsx', '/index.ts', '/index.tsx'];
|
|
514
|
-
for (const ext of extensions) {
|
|
515
|
-
const searchPath = subPath + ext;
|
|
516
|
-
for (const node of allNodes) {
|
|
517
|
-
if (node.labels.includes(type) && node.properties.packageName === packageName) {
|
|
518
|
-
if (node.properties.filePath.endsWith(searchPath) ||
|
|
519
|
-
node.properties.filePath.endsWith('/' + searchPath)) {
|
|
520
|
-
return node;
|
|
521
|
-
}
|
|
522
|
-
}
|
|
523
|
-
}
|
|
524
|
-
}
|
|
525
|
-
}
|
|
526
|
-
// For bare package imports (@workspace/ui), look for index files
|
|
527
|
-
if (!subPath) {
|
|
528
|
-
for (const node of allNodes) {
|
|
529
|
-
if (node.labels.includes(type) && node.properties.packageName === packageName) {
|
|
530
|
-
const fileName = node.properties.name;
|
|
531
|
-
if (fileName === 'index.ts' || fileName === 'index.tsx') {
|
|
532
|
-
return node;
|
|
533
|
-
}
|
|
534
|
-
}
|
|
535
|
-
}
|
|
536
|
-
// If no index file, return any file from the package as a fallback
|
|
537
|
-
for (const node of allNodes) {
|
|
538
|
-
if (node.labels.includes(type) && node.properties.packageName === packageName) {
|
|
539
|
-
return node;
|
|
540
|
-
}
|
|
541
|
-
}
|
|
542
|
-
}
|
|
543
|
-
}
|
|
544
|
-
}
|
|
545
|
-
// Default: exact name match (for non-SourceFile types like classes, interfaces)
|
|
546
|
-
for (const node of allNodes) {
|
|
547
|
-
if (node.properties.coreType === type && node.properties.name === name) {
|
|
548
|
-
return node;
|
|
549
|
-
}
|
|
550
|
-
}
|
|
551
|
-
return undefined;
|
|
552
|
-
}
|
|
553
536
|
}
|