gitnexus 1.4.1 → 1.4.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +215 -194
- package/dist/cli/ai-context.d.ts +2 -1
- package/dist/cli/ai-context.js +117 -90
- package/dist/cli/analyze.d.ts +2 -0
- package/dist/cli/analyze.js +57 -30
- package/dist/cli/augment.js +1 -1
- package/dist/cli/eval-server.d.ts +1 -1
- package/dist/cli/eval-server.js +14 -6
- package/dist/cli/index.js +18 -25
- package/dist/cli/lazy-action.d.ts +6 -0
- package/dist/cli/lazy-action.js +18 -0
- package/dist/cli/mcp.js +1 -1
- package/dist/cli/setup.js +42 -32
- package/dist/cli/skill-gen.d.ts +26 -0
- package/dist/cli/skill-gen.js +549 -0
- package/dist/cli/status.js +13 -4
- package/dist/cli/tool.d.ts +3 -2
- package/dist/cli/tool.js +48 -13
- package/dist/cli/wiki.js +2 -2
- package/dist/config/ignore-service.d.ts +25 -0
- package/dist/config/ignore-service.js +76 -0
- package/dist/config/supported-languages.d.ts +1 -0
- package/dist/config/supported-languages.js +1 -1
- package/dist/core/augmentation/engine.js +99 -72
- package/dist/core/embeddings/embedder.d.ts +1 -1
- package/dist/core/embeddings/embedder.js +1 -1
- package/dist/core/embeddings/embedding-pipeline.d.ts +3 -3
- package/dist/core/embeddings/embedding-pipeline.js +74 -47
- package/dist/core/embeddings/types.d.ts +1 -1
- package/dist/core/graph/types.d.ts +5 -2
- package/dist/core/ingestion/ast-cache.js +3 -2
- package/dist/core/ingestion/call-processor.d.ts +5 -7
- package/dist/core/ingestion/call-processor.js +430 -283
- package/dist/core/ingestion/call-routing.d.ts +53 -0
- package/dist/core/ingestion/call-routing.js +108 -0
- package/dist/core/ingestion/cluster-enricher.js +16 -16
- package/dist/core/ingestion/constants.d.ts +16 -0
- package/dist/core/ingestion/constants.js +16 -0
- package/dist/core/ingestion/entry-point-scoring.d.ts +2 -1
- package/dist/core/ingestion/entry-point-scoring.js +94 -24
- package/dist/core/ingestion/export-detection.d.ts +18 -0
- package/dist/core/ingestion/export-detection.js +231 -0
- package/dist/core/ingestion/filesystem-walker.js +4 -3
- package/dist/core/ingestion/framework-detection.d.ts +5 -1
- package/dist/core/ingestion/framework-detection.js +48 -8
- package/dist/core/ingestion/heritage-processor.d.ts +13 -5
- package/dist/core/ingestion/heritage-processor.js +109 -55
- package/dist/core/ingestion/import-processor.d.ts +16 -20
- package/dist/core/ingestion/import-processor.js +202 -696
- package/dist/core/ingestion/language-config.d.ts +46 -0
- package/dist/core/ingestion/language-config.js +167 -0
- package/dist/core/ingestion/mro-processor.d.ts +45 -0
- package/dist/core/ingestion/mro-processor.js +369 -0
- package/dist/core/ingestion/named-binding-extraction.d.ts +61 -0
- package/dist/core/ingestion/named-binding-extraction.js +363 -0
- package/dist/core/ingestion/parsing-processor.d.ts +3 -11
- package/dist/core/ingestion/parsing-processor.js +85 -181
- package/dist/core/ingestion/pipeline.d.ts +5 -1
- package/dist/core/ingestion/pipeline.js +192 -116
- package/dist/core/ingestion/process-processor.js +2 -1
- package/dist/core/ingestion/resolution-context.d.ts +53 -0
- package/dist/core/ingestion/resolution-context.js +132 -0
- package/dist/core/ingestion/resolvers/csharp.d.ts +22 -0
- package/dist/core/ingestion/resolvers/csharp.js +109 -0
- package/dist/core/ingestion/resolvers/go.d.ts +19 -0
- package/dist/core/ingestion/resolvers/go.js +42 -0
- package/dist/core/ingestion/resolvers/index.d.ts +18 -0
- package/dist/core/ingestion/resolvers/index.js +13 -0
- package/dist/core/ingestion/resolvers/jvm.d.ts +23 -0
- package/dist/core/ingestion/resolvers/jvm.js +87 -0
- package/dist/core/ingestion/resolvers/php.d.ts +15 -0
- package/dist/core/ingestion/resolvers/php.js +35 -0
- package/dist/core/ingestion/resolvers/python.d.ts +19 -0
- package/dist/core/ingestion/resolvers/python.js +52 -0
- package/dist/core/ingestion/resolvers/ruby.d.ts +12 -0
- package/dist/core/ingestion/resolvers/ruby.js +15 -0
- package/dist/core/ingestion/resolvers/rust.d.ts +15 -0
- package/dist/core/ingestion/resolvers/rust.js +73 -0
- package/dist/core/ingestion/resolvers/standard.d.ts +28 -0
- package/dist/core/ingestion/resolvers/standard.js +123 -0
- package/dist/core/ingestion/resolvers/utils.d.ts +33 -0
- package/dist/core/ingestion/resolvers/utils.js +122 -0
- package/dist/core/ingestion/symbol-table.d.ts +21 -1
- package/dist/core/ingestion/symbol-table.js +40 -12
- package/dist/core/ingestion/tree-sitter-queries.d.ts +12 -11
- package/dist/core/ingestion/tree-sitter-queries.js +642 -485
- package/dist/core/ingestion/type-env.d.ts +49 -0
- package/dist/core/ingestion/type-env.js +611 -0
- package/dist/core/ingestion/type-extractors/c-cpp.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/c-cpp.js +385 -0
- package/dist/core/ingestion/type-extractors/csharp.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/csharp.js +383 -0
- package/dist/core/ingestion/type-extractors/go.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/go.js +467 -0
- package/dist/core/ingestion/type-extractors/index.d.ts +22 -0
- package/dist/core/ingestion/type-extractors/index.js +31 -0
- package/dist/core/ingestion/type-extractors/jvm.d.ts +3 -0
- package/dist/core/ingestion/type-extractors/jvm.js +681 -0
- package/dist/core/ingestion/type-extractors/php.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/php.js +549 -0
- package/dist/core/ingestion/type-extractors/python.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/python.js +406 -0
- package/dist/core/ingestion/type-extractors/ruby.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/ruby.js +389 -0
- package/dist/core/ingestion/type-extractors/rust.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/rust.js +449 -0
- package/dist/core/ingestion/type-extractors/shared.d.ts +133 -0
- package/dist/core/ingestion/type-extractors/shared.js +703 -0
- package/dist/core/ingestion/type-extractors/swift.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/swift.js +137 -0
- package/dist/core/ingestion/type-extractors/types.d.ts +127 -0
- package/dist/core/ingestion/type-extractors/types.js +1 -0
- package/dist/core/ingestion/type-extractors/typescript.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/typescript.js +494 -0
- package/dist/core/ingestion/utils.d.ts +98 -0
- package/dist/core/ingestion/utils.js +1064 -9
- package/dist/core/ingestion/workers/parse-worker.d.ts +38 -4
- package/dist/core/ingestion/workers/parse-worker.js +251 -359
- package/dist/core/ingestion/workers/worker-pool.js +8 -0
- package/dist/core/{kuzu → lbug}/csv-generator.d.ts +1 -1
- package/dist/core/{kuzu → lbug}/csv-generator.js +20 -4
- package/dist/core/{kuzu/kuzu-adapter.d.ts → lbug/lbug-adapter.d.ts} +19 -19
- package/dist/core/{kuzu/kuzu-adapter.js → lbug/lbug-adapter.js} +82 -82
- package/dist/core/{kuzu → lbug}/schema.d.ts +4 -4
- package/dist/core/{kuzu → lbug}/schema.js +304 -289
- package/dist/core/search/bm25-index.d.ts +4 -4
- package/dist/core/search/bm25-index.js +17 -16
- package/dist/core/search/hybrid-search.d.ts +2 -2
- package/dist/core/search/hybrid-search.js +9 -9
- package/dist/core/tree-sitter/parser-loader.js +9 -2
- package/dist/core/wiki/generator.d.ts +4 -52
- package/dist/core/wiki/generator.js +53 -552
- package/dist/core/wiki/graph-queries.d.ts +4 -46
- package/dist/core/wiki/graph-queries.js +103 -282
- package/dist/core/wiki/html-viewer.js +192 -192
- package/dist/core/wiki/llm-client.js +11 -73
- package/dist/core/wiki/prompts.d.ts +8 -52
- package/dist/core/wiki/prompts.js +86 -200
- package/dist/mcp/compatible-stdio-transport.d.ts +25 -0
- package/dist/mcp/compatible-stdio-transport.js +200 -0
- package/dist/mcp/core/{kuzu-adapter.d.ts → lbug-adapter.d.ts} +7 -9
- package/dist/mcp/core/{kuzu-adapter.js → lbug-adapter.js} +77 -79
- package/dist/mcp/local/local-backend.d.ts +7 -6
- package/dist/mcp/local/local-backend.js +176 -147
- package/dist/mcp/resources.js +42 -42
- package/dist/mcp/server.js +18 -19
- package/dist/mcp/tools.js +103 -104
- package/dist/server/api.js +12 -12
- package/dist/server/mcp-http.d.ts +1 -1
- package/dist/server/mcp-http.js +1 -1
- package/dist/storage/repo-manager.d.ts +20 -2
- package/dist/storage/repo-manager.js +55 -1
- package/dist/types/pipeline.d.ts +1 -1
- package/hooks/claude/gitnexus-hook.cjs +238 -155
- package/hooks/claude/pre-tool-use.sh +79 -79
- package/hooks/claude/session-start.sh +42 -42
- package/package.json +99 -96
- package/scripts/patch-tree-sitter-swift.cjs +74 -74
- package/skills/gitnexus-cli.md +82 -82
- package/skills/gitnexus-debugging.md +89 -89
- package/skills/gitnexus-exploring.md +78 -78
- package/skills/gitnexus-guide.md +64 -64
- package/skills/gitnexus-impact-analysis.md +97 -97
- package/skills/gitnexus-pr-review.md +163 -163
- package/skills/gitnexus-refactoring.md +121 -121
- package/vendor/leiden/index.cjs +355 -355
- package/vendor/leiden/utils.cjs +392 -392
- package/dist/core/wiki/diagrams.d.ts +0 -27
- package/dist/core/wiki/diagrams.js +0 -163
|
@@ -1,17 +1,21 @@
|
|
|
1
1
|
import { createKnowledgeGraph } from '../graph/graph.js';
|
|
2
2
|
import { processStructure } from './structure-processor.js';
|
|
3
3
|
import { processParsing } from './parsing-processor.js';
|
|
4
|
-
import { processImports, processImportsFromExtracted,
|
|
4
|
+
import { processImports, processImportsFromExtracted, buildImportResolutionContext } from './import-processor.js';
|
|
5
5
|
import { processCalls, processCallsFromExtracted, processRoutesFromExtracted } from './call-processor.js';
|
|
6
6
|
import { processHeritage, processHeritageFromExtracted } from './heritage-processor.js';
|
|
7
|
+
import { computeMRO } from './mro-processor.js';
|
|
7
8
|
import { processCommunities } from './community-processor.js';
|
|
8
9
|
import { processProcesses } from './process-processor.js';
|
|
9
|
-
import {
|
|
10
|
+
import { createResolutionContext } from './resolution-context.js';
|
|
10
11
|
import { createASTCache } from './ast-cache.js';
|
|
11
12
|
import { walkRepositoryPaths, readFileContents } from './filesystem-walker.js';
|
|
12
13
|
import { getLanguageFromFilename } from './utils.js';
|
|
13
14
|
import { isLanguageAvailable } from '../tree-sitter/parser-loader.js';
|
|
14
15
|
import { createWorkerPool } from './workers/worker-pool.js';
|
|
16
|
+
import fs from 'node:fs';
|
|
17
|
+
import path from 'node:path';
|
|
18
|
+
import { fileURLToPath, pathToFileURL } from 'node:url';
|
|
15
19
|
const isDev = process.env.NODE_ENV === 'development';
|
|
16
20
|
/** Max bytes of source content to load per parse chunk. Each chunk's source +
|
|
17
21
|
* parsed ASTs + extracted records + worker serialization overhead all live in
|
|
@@ -20,14 +24,14 @@ const isDev = process.env.NODE_ENV === 'development';
|
|
|
20
24
|
const CHUNK_BYTE_BUDGET = 20 * 1024 * 1024; // 20MB
|
|
21
25
|
/** Max AST trees to keep in LRU cache */
|
|
22
26
|
const AST_CACHE_CAP = 50;
|
|
23
|
-
export const runPipelineFromRepo = async (repoPath, onProgress) => {
|
|
27
|
+
export const runPipelineFromRepo = async (repoPath, onProgress, options) => {
|
|
24
28
|
const graph = createKnowledgeGraph();
|
|
25
|
-
const
|
|
29
|
+
const ctx = createResolutionContext();
|
|
30
|
+
const symbolTable = ctx.symbols;
|
|
26
31
|
let astCache = createASTCache(AST_CACHE_CAP);
|
|
27
|
-
const importMap = createImportMap();
|
|
28
32
|
const cleanup = () => {
|
|
29
33
|
astCache.clear();
|
|
30
|
-
|
|
34
|
+
ctx.clear();
|
|
31
35
|
};
|
|
32
36
|
try {
|
|
33
37
|
// ── Phase 1: Scan paths only (no content read) ─────────────────────
|
|
@@ -87,6 +91,14 @@ export const runPipelineFromRepo = async (repoPath, onProgress) => {
|
|
|
87
91
|
console.warn(`Skipping ${count} ${lang} file(s) — ${lang} parser not available (native binding may not have built). Try: npm rebuild tree-sitter-${lang}`);
|
|
88
92
|
}
|
|
89
93
|
const totalParseable = parseableScanned.length;
|
|
94
|
+
if (totalParseable === 0) {
|
|
95
|
+
onProgress({
|
|
96
|
+
phase: 'parsing',
|
|
97
|
+
percent: 82,
|
|
98
|
+
message: 'No parseable files found — skipping parsing phase',
|
|
99
|
+
stats: { filesProcessed: 0, totalFiles: 0, nodesCreated: graph.nodeCount },
|
|
100
|
+
});
|
|
101
|
+
}
|
|
90
102
|
// Build byte-budget chunks
|
|
91
103
|
const chunks = [];
|
|
92
104
|
let currentChunk = [];
|
|
@@ -113,14 +125,30 @@ export const runPipelineFromRepo = async (repoPath, onProgress) => {
|
|
|
113
125
|
message: `Parsing ${totalParseable} files in ${numChunks} chunk${numChunks !== 1 ? 's' : ''}...`,
|
|
114
126
|
stats: { filesProcessed: 0, totalFiles: totalParseable, nodesCreated: graph.nodeCount },
|
|
115
127
|
});
|
|
128
|
+
// Don't spawn workers for tiny repos — overhead exceeds benefit
|
|
129
|
+
const MIN_FILES_FOR_WORKERS = 15;
|
|
130
|
+
const MIN_BYTES_FOR_WORKERS = 512 * 1024;
|
|
131
|
+
const totalBytes = parseableScanned.reduce((s, f) => s + f.size, 0);
|
|
116
132
|
// Create worker pool once, reuse across chunks
|
|
117
133
|
let workerPool;
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
134
|
+
if (totalParseable >= MIN_FILES_FOR_WORKERS || totalBytes >= MIN_BYTES_FOR_WORKERS) {
|
|
135
|
+
try {
|
|
136
|
+
let workerUrl = new URL('./workers/parse-worker.js', import.meta.url);
|
|
137
|
+
// When running under vitest, import.meta.url points to src/ where no .js exists.
|
|
138
|
+
// Fall back to the compiled dist/ worker so the pool can spawn real worker threads.
|
|
139
|
+
const thisDir = fileURLToPath(new URL('.', import.meta.url));
|
|
140
|
+
if (!fs.existsSync(fileURLToPath(workerUrl))) {
|
|
141
|
+
const distWorker = path.resolve(thisDir, '..', '..', '..', 'dist', 'core', 'ingestion', 'workers', 'parse-worker.js');
|
|
142
|
+
if (fs.existsSync(distWorker)) {
|
|
143
|
+
workerUrl = pathToFileURL(distWorker);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
workerPool = createWorkerPool(workerUrl);
|
|
147
|
+
}
|
|
148
|
+
catch (err) {
|
|
149
|
+
if (isDev)
|
|
150
|
+
console.warn('Worker pool creation failed, using sequential fallback:', err.message);
|
|
151
|
+
}
|
|
124
152
|
}
|
|
125
153
|
let filesParsedSoFar = 0;
|
|
126
154
|
// AST cache sized for one chunk (sequential fallback uses it for import/call/heritage)
|
|
@@ -155,24 +183,53 @@ export const runPipelineFromRepo = async (repoPath, onProgress) => {
|
|
|
155
183
|
stats: { filesProcessed: globalCurrent, totalFiles: totalParseable, nodesCreated: graph.nodeCount },
|
|
156
184
|
});
|
|
157
185
|
}, workerPool);
|
|
186
|
+
const chunkBasePercent = 20 + ((filesParsedSoFar / totalParseable) * 62);
|
|
158
187
|
if (chunkWorkerData) {
|
|
159
188
|
// Imports
|
|
160
|
-
await processImportsFromExtracted(graph, allPathObjects, chunkWorkerData.imports,
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
}
|
|
169
|
-
// Routes — resolve
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
189
|
+
await processImportsFromExtracted(graph, allPathObjects, chunkWorkerData.imports, ctx, (current, total) => {
|
|
190
|
+
onProgress({
|
|
191
|
+
phase: 'parsing',
|
|
192
|
+
percent: Math.round(chunkBasePercent),
|
|
193
|
+
message: `Resolving imports (chunk ${chunkIdx + 1}/${numChunks})...`,
|
|
194
|
+
detail: `${current}/${total} files`,
|
|
195
|
+
stats: { filesProcessed: filesParsedSoFar, totalFiles: totalParseable, nodesCreated: graph.nodeCount },
|
|
196
|
+
});
|
|
197
|
+
}, repoPath, importCtx);
|
|
198
|
+
// Calls + Heritage + Routes — resolve in parallel (no shared mutable state between them)
|
|
199
|
+
// This is safe because each writes disjoint relationship types into idempotent id-keyed Maps,
|
|
200
|
+
// and the single-threaded event loop prevents races between synchronous addRelationship calls.
|
|
201
|
+
await Promise.all([
|
|
202
|
+
processCallsFromExtracted(graph, chunkWorkerData.calls, ctx, (current, total) => {
|
|
203
|
+
onProgress({
|
|
204
|
+
phase: 'parsing',
|
|
205
|
+
percent: Math.round(chunkBasePercent),
|
|
206
|
+
message: `Resolving calls (chunk ${chunkIdx + 1}/${numChunks})...`,
|
|
207
|
+
detail: `${current}/${total} files`,
|
|
208
|
+
stats: { filesProcessed: filesParsedSoFar, totalFiles: totalParseable, nodesCreated: graph.nodeCount },
|
|
209
|
+
});
|
|
210
|
+
}, chunkWorkerData.constructorBindings),
|
|
211
|
+
processHeritageFromExtracted(graph, chunkWorkerData.heritage, ctx, (current, total) => {
|
|
212
|
+
onProgress({
|
|
213
|
+
phase: 'parsing',
|
|
214
|
+
percent: Math.round(chunkBasePercent),
|
|
215
|
+
message: `Resolving heritage (chunk ${chunkIdx + 1}/${numChunks})...`,
|
|
216
|
+
detail: `${current}/${total} records`,
|
|
217
|
+
stats: { filesProcessed: filesParsedSoFar, totalFiles: totalParseable, nodesCreated: graph.nodeCount },
|
|
218
|
+
});
|
|
219
|
+
}),
|
|
220
|
+
processRoutesFromExtracted(graph, chunkWorkerData.routes ?? [], ctx, (current, total) => {
|
|
221
|
+
onProgress({
|
|
222
|
+
phase: 'parsing',
|
|
223
|
+
percent: Math.round(chunkBasePercent),
|
|
224
|
+
message: `Resolving routes (chunk ${chunkIdx + 1}/${numChunks})...`,
|
|
225
|
+
detail: `${current}/${total} routes`,
|
|
226
|
+
stats: { filesProcessed: filesParsedSoFar, totalFiles: totalParseable, nodesCreated: graph.nodeCount },
|
|
227
|
+
});
|
|
228
|
+
}),
|
|
229
|
+
]);
|
|
173
230
|
}
|
|
174
231
|
else {
|
|
175
|
-
await processImports(graph, chunkFiles, astCache,
|
|
232
|
+
await processImports(graph, chunkFiles, astCache, ctx, undefined, repoPath, allPaths);
|
|
176
233
|
sequentialChunkPaths.push(chunkPaths);
|
|
177
234
|
}
|
|
178
235
|
filesParsedSoFar += chunkFiles.length;
|
|
@@ -191,120 +248,139 @@ export const runPipelineFromRepo = async (repoPath, onProgress) => {
|
|
|
191
248
|
.filter(p => chunkContents.has(p))
|
|
192
249
|
.map(p => ({ path: p, content: chunkContents.get(p) }));
|
|
193
250
|
astCache = createASTCache(chunkFiles.length);
|
|
194
|
-
await processCalls(graph, chunkFiles, astCache,
|
|
195
|
-
await processHeritage(graph, chunkFiles, astCache,
|
|
251
|
+
const rubyHeritage = await processCalls(graph, chunkFiles, astCache, ctx);
|
|
252
|
+
await processHeritage(graph, chunkFiles, astCache, ctx);
|
|
253
|
+
if (rubyHeritage.length > 0) {
|
|
254
|
+
await processHeritageFromExtracted(graph, rubyHeritage, ctx);
|
|
255
|
+
}
|
|
196
256
|
astCache.clear();
|
|
197
257
|
}
|
|
258
|
+
// Log resolution cache stats
|
|
259
|
+
if (isDev) {
|
|
260
|
+
const rcStats = ctx.getStats();
|
|
261
|
+
const total = rcStats.cacheHits + rcStats.cacheMisses;
|
|
262
|
+
const hitRate = total > 0 ? ((rcStats.cacheHits / total) * 100).toFixed(1) : '0';
|
|
263
|
+
console.log(`🔍 Resolution cache: ${rcStats.cacheHits} hits, ${rcStats.cacheMisses} misses (${hitRate}% hit rate)`);
|
|
264
|
+
}
|
|
198
265
|
// Free import resolution context — suffix index + resolve cache no longer needed
|
|
199
266
|
// (allPathObjects and importCtx hold ~94MB+ for large repos)
|
|
200
267
|
allPathObjects.length = 0;
|
|
201
268
|
importCtx.resolveCache.clear();
|
|
202
269
|
importCtx.suffixIndex = null;
|
|
203
270
|
importCtx.normalizedFileList = null;
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
271
|
+
let communityResult;
|
|
272
|
+
let processResult;
|
|
273
|
+
if (!options?.skipGraphPhases) {
|
|
274
|
+
// ── Phase 4.5: Method Resolution Order ──────────────────────────────
|
|
275
|
+
onProgress({
|
|
276
|
+
phase: 'parsing',
|
|
277
|
+
percent: 81,
|
|
278
|
+
message: 'Computing method resolution order...',
|
|
279
|
+
stats: { filesProcessed: totalFiles, totalFiles, nodesCreated: graph.nodeCount },
|
|
280
|
+
});
|
|
281
|
+
const mroResult = computeMRO(graph);
|
|
282
|
+
if (isDev && mroResult.entries.length > 0) {
|
|
283
|
+
console.log(`🔀 MRO: ${mroResult.entries.length} classes analyzed, ${mroResult.ambiguityCount} ambiguities found, ${mroResult.overrideEdges} OVERRIDES edges`);
|
|
209
284
|
}
|
|
210
|
-
|
|
211
|
-
}
|
|
212
|
-
// ── Phase 5: Communities ───────────────────────────────────────────
|
|
213
|
-
onProgress({
|
|
214
|
-
phase: 'communities',
|
|
215
|
-
percent: 82,
|
|
216
|
-
message: 'Detecting code communities...',
|
|
217
|
-
stats: { filesProcessed: totalFiles, totalFiles, nodesCreated: graph.nodeCount },
|
|
218
|
-
});
|
|
219
|
-
const communityResult = await processCommunities(graph, (message, progress) => {
|
|
220
|
-
const communityProgress = 82 + (progress * 0.10);
|
|
285
|
+
// ── Phase 5: Communities ───────────────────────────────────────────
|
|
221
286
|
onProgress({
|
|
222
287
|
phase: 'communities',
|
|
223
|
-
percent:
|
|
224
|
-
message,
|
|
288
|
+
percent: 82,
|
|
289
|
+
message: 'Detecting code communities...',
|
|
225
290
|
stats: { filesProcessed: totalFiles, totalFiles, nodesCreated: graph.nodeCount },
|
|
226
291
|
});
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
properties: {
|
|
236
|
-
name: comm.label,
|
|
237
|
-
filePath: '',
|
|
238
|
-
heuristicLabel: comm.heuristicLabel,
|
|
239
|
-
cohesion: comm.cohesion,
|
|
240
|
-
symbolCount: comm.symbolCount,
|
|
241
|
-
}
|
|
292
|
+
communityResult = await processCommunities(graph, (message, progress) => {
|
|
293
|
+
const communityProgress = 82 + (progress * 0.10);
|
|
294
|
+
onProgress({
|
|
295
|
+
phase: 'communities',
|
|
296
|
+
percent: Math.round(communityProgress),
|
|
297
|
+
message,
|
|
298
|
+
stats: { filesProcessed: totalFiles, totalFiles, nodesCreated: graph.nodeCount },
|
|
299
|
+
});
|
|
242
300
|
});
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
301
|
+
if (isDev) {
|
|
302
|
+
console.log(`🏘️ Community detection: ${communityResult.stats.totalCommunities} communities found (modularity: ${communityResult.stats.modularity.toFixed(3)})`);
|
|
303
|
+
}
|
|
304
|
+
communityResult.communities.forEach(comm => {
|
|
305
|
+
graph.addNode({
|
|
306
|
+
id: comm.id,
|
|
307
|
+
label: 'Community',
|
|
308
|
+
properties: {
|
|
309
|
+
name: comm.label,
|
|
310
|
+
filePath: '',
|
|
311
|
+
heuristicLabel: comm.heuristicLabel,
|
|
312
|
+
cohesion: comm.cohesion,
|
|
313
|
+
symbolCount: comm.symbolCount,
|
|
314
|
+
}
|
|
315
|
+
});
|
|
252
316
|
});
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
const dynamicMaxProcesses = Math.max(20, Math.min(300, Math.round(symbolCount / 10)));
|
|
265
|
-
const processResult = await processProcesses(graph, communityResult.memberships, (message, progress) => {
|
|
266
|
-
const processProgress = 94 + (progress * 0.05);
|
|
317
|
+
communityResult.memberships.forEach(membership => {
|
|
318
|
+
graph.addRelationship({
|
|
319
|
+
id: `${membership.nodeId}_member_of_${membership.communityId}`,
|
|
320
|
+
type: 'MEMBER_OF',
|
|
321
|
+
sourceId: membership.nodeId,
|
|
322
|
+
targetId: membership.communityId,
|
|
323
|
+
confidence: 1.0,
|
|
324
|
+
reason: 'leiden-algorithm',
|
|
325
|
+
});
|
|
326
|
+
});
|
|
327
|
+
// ── Phase 6: Processes ─────────────────────────────────────────────
|
|
267
328
|
onProgress({
|
|
268
329
|
phase: 'processes',
|
|
269
|
-
percent:
|
|
270
|
-
message,
|
|
330
|
+
percent: 94,
|
|
331
|
+
message: 'Detecting execution flows...',
|
|
271
332
|
stats: { filesProcessed: totalFiles, totalFiles, nodesCreated: graph.nodeCount },
|
|
272
333
|
});
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
334
|
+
let symbolCount = 0;
|
|
335
|
+
graph.forEachNode(n => { if (n.label !== 'File')
|
|
336
|
+
symbolCount++; });
|
|
337
|
+
const dynamicMaxProcesses = Math.max(20, Math.min(300, Math.round(symbolCount / 10)));
|
|
338
|
+
processResult = await processProcesses(graph, communityResult.memberships, (message, progress) => {
|
|
339
|
+
const processProgress = 94 + (progress * 0.05);
|
|
340
|
+
onProgress({
|
|
341
|
+
phase: 'processes',
|
|
342
|
+
percent: Math.round(processProgress),
|
|
343
|
+
message,
|
|
344
|
+
stats: { filesProcessed: totalFiles, totalFiles, nodesCreated: graph.nodeCount },
|
|
345
|
+
});
|
|
346
|
+
}, { maxProcesses: dynamicMaxProcesses, minSteps: 3 });
|
|
347
|
+
if (isDev) {
|
|
348
|
+
console.log(`🔄 Process detection: ${processResult.stats.totalProcesses} processes found (${processResult.stats.crossCommunityCount} cross-community)`);
|
|
349
|
+
}
|
|
350
|
+
processResult.processes.forEach(proc => {
|
|
351
|
+
graph.addNode({
|
|
352
|
+
id: proc.id,
|
|
353
|
+
label: 'Process',
|
|
354
|
+
properties: {
|
|
355
|
+
name: proc.label,
|
|
356
|
+
filePath: '',
|
|
357
|
+
heuristicLabel: proc.heuristicLabel,
|
|
358
|
+
processType: proc.processType,
|
|
359
|
+
stepCount: proc.stepCount,
|
|
360
|
+
communities: proc.communities,
|
|
361
|
+
entryPointId: proc.entryPointId,
|
|
362
|
+
terminalId: proc.terminalId,
|
|
363
|
+
}
|
|
364
|
+
});
|
|
291
365
|
});
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
366
|
+
processResult.steps.forEach(step => {
|
|
367
|
+
graph.addRelationship({
|
|
368
|
+
id: `${step.nodeId}_step_${step.step}_${step.processId}`,
|
|
369
|
+
type: 'STEP_IN_PROCESS',
|
|
370
|
+
sourceId: step.nodeId,
|
|
371
|
+
targetId: step.processId,
|
|
372
|
+
confidence: 1.0,
|
|
373
|
+
reason: 'trace-detection',
|
|
374
|
+
step: step.step,
|
|
375
|
+
});
|
|
302
376
|
});
|
|
303
|
-
}
|
|
377
|
+
}
|
|
304
378
|
onProgress({
|
|
305
379
|
phase: 'complete',
|
|
306
380
|
percent: 100,
|
|
307
|
-
message:
|
|
381
|
+
message: communityResult && processResult
|
|
382
|
+
? `Graph complete! ${communityResult.stats.totalCommunities} communities, ${processResult.stats.totalProcesses} processes detected.`
|
|
383
|
+
: 'Graph complete! (graph phases skipped)',
|
|
308
384
|
stats: {
|
|
309
385
|
filesProcessed: totalFiles,
|
|
310
386
|
totalFiles,
|
|
@@ -10,6 +10,7 @@
|
|
|
10
10
|
* Processes help agents understand how features work through the codebase.
|
|
11
11
|
*/
|
|
12
12
|
import { calculateEntryPointScore, isTestFile } from './entry-point-scoring.js';
|
|
13
|
+
import { SupportedLanguages } from '../../config/supported-languages.js';
|
|
13
14
|
const isDev = process.env.NODE_ENV === 'development';
|
|
14
15
|
const DEFAULT_CONFIG = {
|
|
15
16
|
maxTraceDepth: 10,
|
|
@@ -178,7 +179,7 @@ const findEntryPoints = (graph, reverseCallsEdges, callsEdges) => {
|
|
|
178
179
|
if (callees.length === 0)
|
|
179
180
|
continue;
|
|
180
181
|
// Calculate entry point score using new scoring system
|
|
181
|
-
const { score: baseScore, reasons } = calculateEntryPointScore(node.properties.name, node.properties.language
|
|
182
|
+
const { score: baseScore, reasons } = calculateEntryPointScore(node.properties.name, node.properties.language ?? SupportedLanguages.JavaScript, node.properties.isExported ?? false, callers.length, callees.length, filePath // Pass filePath for framework detection
|
|
182
183
|
);
|
|
183
184
|
let score = baseScore;
|
|
184
185
|
const astFrameworkMultiplier = node.properties.astFrameworkMultiplier ?? 1.0;
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Resolution Context
|
|
3
|
+
*
|
|
4
|
+
* Single implementation of tiered name resolution. Replaces the duplicated
|
|
5
|
+
* tier-selection logic previously split between symbol-resolver.ts and
|
|
6
|
+
* call-processor.ts.
|
|
7
|
+
*
|
|
8
|
+
* Resolution tiers (highest confidence first):
|
|
9
|
+
* 1. Same file (lookupExactFull — authoritative)
|
|
10
|
+
* 2a-named. Named binding chain (walkBindingChain via NamedImportMap)
|
|
11
|
+
* 2a. Import-scoped (lookupFuzzy filtered by ImportMap)
|
|
12
|
+
* 2b. Package-scoped (lookupFuzzy filtered by PackageMap)
|
|
13
|
+
* 3. Global (all candidates — consumers must check candidate count)
|
|
14
|
+
*/
|
|
15
|
+
import type { SymbolTable, SymbolDefinition } from './symbol-table.js';
|
|
16
|
+
import type { NamedImportBinding } from './import-processor.js';
|
|
17
|
+
/** Resolution tier for tracking, logging, and test assertions. */
|
|
18
|
+
export type ResolutionTier = 'same-file' | 'import-scoped' | 'global';
|
|
19
|
+
/** Tier-selected candidates with metadata. */
|
|
20
|
+
export interface TieredCandidates {
|
|
21
|
+
readonly candidates: readonly SymbolDefinition[];
|
|
22
|
+
readonly tier: ResolutionTier;
|
|
23
|
+
}
|
|
24
|
+
/** Confidence scores per resolution tier. */
|
|
25
|
+
export declare const TIER_CONFIDENCE: Record<ResolutionTier, number>;
|
|
26
|
+
export type ImportMap = Map<string, Set<string>>;
|
|
27
|
+
export type PackageMap = Map<string, Set<string>>;
|
|
28
|
+
export type NamedImportMap = Map<string, Map<string, NamedImportBinding>>;
|
|
29
|
+
export interface ResolutionContext {
|
|
30
|
+
/**
|
|
31
|
+
* The only resolution API. Returns all candidates at the winning tier.
|
|
32
|
+
*
|
|
33
|
+
* Tier 3 ('global') returns ALL candidates regardless of count —
|
|
34
|
+
* consumers must check candidates.length and refuse ambiguous matches.
|
|
35
|
+
*/
|
|
36
|
+
resolve(name: string, fromFile: string): TieredCandidates | null;
|
|
37
|
+
/** Symbol table — used by parsing-processor to populate symbols. */
|
|
38
|
+
readonly symbols: SymbolTable;
|
|
39
|
+
/** Raw maps — used by import-processor to populate import data. */
|
|
40
|
+
readonly importMap: ImportMap;
|
|
41
|
+
readonly packageMap: PackageMap;
|
|
42
|
+
readonly namedImportMap: NamedImportMap;
|
|
43
|
+
enableCache(filePath: string): void;
|
|
44
|
+
clearCache(): void;
|
|
45
|
+
getStats(): {
|
|
46
|
+
fileCount: number;
|
|
47
|
+
globalSymbolCount: number;
|
|
48
|
+
cacheHits: number;
|
|
49
|
+
cacheMisses: number;
|
|
50
|
+
};
|
|
51
|
+
clear(): void;
|
|
52
|
+
}
|
|
53
|
+
export declare const createResolutionContext: () => ResolutionContext;
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Resolution Context
|
|
3
|
+
*
|
|
4
|
+
* Single implementation of tiered name resolution. Replaces the duplicated
|
|
5
|
+
* tier-selection logic previously split between symbol-resolver.ts and
|
|
6
|
+
* call-processor.ts.
|
|
7
|
+
*
|
|
8
|
+
* Resolution tiers (highest confidence first):
|
|
9
|
+
* 1. Same file (lookupExactFull — authoritative)
|
|
10
|
+
* 2a-named. Named binding chain (walkBindingChain via NamedImportMap)
|
|
11
|
+
* 2a. Import-scoped (lookupFuzzy filtered by ImportMap)
|
|
12
|
+
* 2b. Package-scoped (lookupFuzzy filtered by PackageMap)
|
|
13
|
+
* 3. Global (all candidates — consumers must check candidate count)
|
|
14
|
+
*/
|
|
15
|
+
import { createSymbolTable } from './symbol-table.js';
|
|
16
|
+
import { isFileInPackageDir } from './import-processor.js';
|
|
17
|
+
import { walkBindingChain } from './named-binding-extraction.js';
|
|
18
|
+
/** Confidence scores per resolution tier. */
|
|
19
|
+
export const TIER_CONFIDENCE = {
|
|
20
|
+
'same-file': 0.95,
|
|
21
|
+
'import-scoped': 0.9,
|
|
22
|
+
'global': 0.5,
|
|
23
|
+
};
|
|
24
|
+
export const createResolutionContext = () => {
|
|
25
|
+
const symbols = createSymbolTable();
|
|
26
|
+
const importMap = new Map();
|
|
27
|
+
const packageMap = new Map();
|
|
28
|
+
const namedImportMap = new Map();
|
|
29
|
+
// Per-file cache state
|
|
30
|
+
let cacheFile = null;
|
|
31
|
+
let cache = null;
|
|
32
|
+
let cacheHits = 0;
|
|
33
|
+
let cacheMisses = 0;
|
|
34
|
+
// --- Core resolution (single implementation of tier logic) ---
|
|
35
|
+
const resolveUncached = (name, fromFile) => {
|
|
36
|
+
// Tier 1: Same file — authoritative match
|
|
37
|
+
const localDef = symbols.lookupExactFull(fromFile, name);
|
|
38
|
+
if (localDef) {
|
|
39
|
+
return { candidates: [localDef], tier: 'same-file' };
|
|
40
|
+
}
|
|
41
|
+
// Get all global definitions for subsequent tiers
|
|
42
|
+
const allDefs = symbols.lookupFuzzy(name);
|
|
43
|
+
// Tier 2a-named: Check named bindings BEFORE empty-allDefs early return
|
|
44
|
+
// because aliased imports mean lookupFuzzy('U') returns empty but we
|
|
45
|
+
// can resolve via the exported name.
|
|
46
|
+
const chainResult = walkBindingChain(name, fromFile, symbols, namedImportMap, allDefs);
|
|
47
|
+
if (chainResult && chainResult.length > 0) {
|
|
48
|
+
return { candidates: chainResult, tier: 'import-scoped' };
|
|
49
|
+
}
|
|
50
|
+
if (allDefs.length === 0)
|
|
51
|
+
return null;
|
|
52
|
+
// Tier 2a: Import-scoped — definition in a file imported by fromFile
|
|
53
|
+
const importedFiles = importMap.get(fromFile);
|
|
54
|
+
if (importedFiles) {
|
|
55
|
+
const importedDefs = allDefs.filter(def => importedFiles.has(def.filePath));
|
|
56
|
+
if (importedDefs.length > 0) {
|
|
57
|
+
return { candidates: importedDefs, tier: 'import-scoped' };
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
// Tier 2b: Package-scoped — definition in a package dir imported by fromFile
|
|
61
|
+
const importedPackages = packageMap.get(fromFile);
|
|
62
|
+
if (importedPackages) {
|
|
63
|
+
const packageDefs = allDefs.filter(def => {
|
|
64
|
+
for (const dirSuffix of importedPackages) {
|
|
65
|
+
if (isFileInPackageDir(def.filePath, dirSuffix))
|
|
66
|
+
return true;
|
|
67
|
+
}
|
|
68
|
+
return false;
|
|
69
|
+
});
|
|
70
|
+
if (packageDefs.length > 0) {
|
|
71
|
+
return { candidates: packageDefs, tier: 'import-scoped' };
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
// Tier 3: Global — pass all candidates through.
|
|
75
|
+
// Consumers must check candidate count and refuse ambiguous matches.
|
|
76
|
+
return { candidates: allDefs, tier: 'global' };
|
|
77
|
+
};
|
|
78
|
+
const resolve = (name, fromFile) => {
|
|
79
|
+
// Check cache (only when enabled AND fromFile matches cached file)
|
|
80
|
+
if (cache && cacheFile === fromFile) {
|
|
81
|
+
if (cache.has(name)) {
|
|
82
|
+
cacheHits++;
|
|
83
|
+
return cache.get(name);
|
|
84
|
+
}
|
|
85
|
+
cacheMisses++;
|
|
86
|
+
}
|
|
87
|
+
const result = resolveUncached(name, fromFile);
|
|
88
|
+
// Store in cache if active and file matches
|
|
89
|
+
if (cache && cacheFile === fromFile) {
|
|
90
|
+
cache.set(name, result);
|
|
91
|
+
}
|
|
92
|
+
return result;
|
|
93
|
+
};
|
|
94
|
+
// --- Cache lifecycle ---
|
|
95
|
+
const enableCache = (filePath) => {
|
|
96
|
+
cacheFile = filePath;
|
|
97
|
+
if (!cache)
|
|
98
|
+
cache = new Map();
|
|
99
|
+
else
|
|
100
|
+
cache.clear();
|
|
101
|
+
};
|
|
102
|
+
const clearCache = () => {
|
|
103
|
+
cacheFile = null;
|
|
104
|
+
// Reuse the Map instance — just clear entries to reduce GC pressure at scale.
|
|
105
|
+
cache?.clear();
|
|
106
|
+
};
|
|
107
|
+
const getStats = () => ({
|
|
108
|
+
...symbols.getStats(),
|
|
109
|
+
cacheHits,
|
|
110
|
+
cacheMisses,
|
|
111
|
+
});
|
|
112
|
+
const clear = () => {
|
|
113
|
+
symbols.clear();
|
|
114
|
+
importMap.clear();
|
|
115
|
+
packageMap.clear();
|
|
116
|
+
namedImportMap.clear();
|
|
117
|
+
clearCache();
|
|
118
|
+
cacheHits = 0;
|
|
119
|
+
cacheMisses = 0;
|
|
120
|
+
};
|
|
121
|
+
return {
|
|
122
|
+
resolve,
|
|
123
|
+
symbols,
|
|
124
|
+
importMap,
|
|
125
|
+
packageMap,
|
|
126
|
+
namedImportMap,
|
|
127
|
+
enableCache,
|
|
128
|
+
clearCache,
|
|
129
|
+
getStats,
|
|
130
|
+
clear,
|
|
131
|
+
};
|
|
132
|
+
};
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* C# namespace import resolution.
|
|
3
|
+
* Handles using-directive resolution via .csproj root namespace stripping.
|
|
4
|
+
*/
|
|
5
|
+
import type { SuffixIndex } from './utils.js';
|
|
6
|
+
/** C# project config parsed from .csproj files */
|
|
7
|
+
export interface CSharpProjectConfig {
|
|
8
|
+
/** Root namespace from <RootNamespace> or assembly name (default: project directory name) */
|
|
9
|
+
rootNamespace: string;
|
|
10
|
+
/** Directory containing the .csproj file */
|
|
11
|
+
projectDir: string;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Resolve a C# using-directive import path to matching .cs files.
|
|
15
|
+
* Tries single-file match first, then directory match for namespace imports.
|
|
16
|
+
*/
|
|
17
|
+
export declare function resolveCSharpImport(importPath: string, csharpConfigs: CSharpProjectConfig[], normalizedFileList: string[], allFileList: string[], index?: SuffixIndex): string[];
|
|
18
|
+
/**
|
|
19
|
+
* Compute the directory suffix for a C# namespace import (for PackageMap).
|
|
20
|
+
* Returns a suffix like "/ProjectDir/Models/" or null if no config matches.
|
|
21
|
+
*/
|
|
22
|
+
export declare function resolveCSharpNamespaceDir(importPath: string, csharpConfigs: CSharpProjectConfig[]): string | null;
|