@veewo/gitnexus 1.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +234 -0
- package/dist/benchmark/agent-context/evaluators.d.ts +9 -0
- package/dist/benchmark/agent-context/evaluators.js +196 -0
- package/dist/benchmark/agent-context/evaluators.test.d.ts +1 -0
- package/dist/benchmark/agent-context/evaluators.test.js +39 -0
- package/dist/benchmark/agent-context/io.d.ts +2 -0
- package/dist/benchmark/agent-context/io.js +23 -0
- package/dist/benchmark/agent-context/io.test.d.ts +1 -0
- package/dist/benchmark/agent-context/io.test.js +19 -0
- package/dist/benchmark/agent-context/report.d.ts +2 -0
- package/dist/benchmark/agent-context/report.js +59 -0
- package/dist/benchmark/agent-context/report.test.d.ts +1 -0
- package/dist/benchmark/agent-context/report.test.js +85 -0
- package/dist/benchmark/agent-context/runner.d.ts +46 -0
- package/dist/benchmark/agent-context/runner.js +111 -0
- package/dist/benchmark/agent-context/runner.test.d.ts +1 -0
- package/dist/benchmark/agent-context/runner.test.js +79 -0
- package/dist/benchmark/agent-context/tool-runner.d.ts +7 -0
- package/dist/benchmark/agent-context/tool-runner.js +18 -0
- package/dist/benchmark/agent-context/tool-runner.test.d.ts +1 -0
- package/dist/benchmark/agent-context/tool-runner.test.js +11 -0
- package/dist/benchmark/agent-context/types.d.ts +40 -0
- package/dist/benchmark/agent-context/types.js +1 -0
- package/dist/benchmark/analyze-runner.d.ts +16 -0
- package/dist/benchmark/analyze-runner.js +51 -0
- package/dist/benchmark/analyze-runner.test.d.ts +1 -0
- package/dist/benchmark/analyze-runner.test.js +37 -0
- package/dist/benchmark/evaluators.d.ts +6 -0
- package/dist/benchmark/evaluators.js +10 -0
- package/dist/benchmark/evaluators.test.d.ts +1 -0
- package/dist/benchmark/evaluators.test.js +12 -0
- package/dist/benchmark/io.d.ts +7 -0
- package/dist/benchmark/io.js +25 -0
- package/dist/benchmark/io.test.d.ts +1 -0
- package/dist/benchmark/io.test.js +35 -0
- package/dist/benchmark/neonspark-candidates.d.ts +19 -0
- package/dist/benchmark/neonspark-candidates.js +94 -0
- package/dist/benchmark/neonspark-candidates.test.d.ts +1 -0
- package/dist/benchmark/neonspark-candidates.test.js +43 -0
- package/dist/benchmark/neonspark-materialize.d.ts +19 -0
- package/dist/benchmark/neonspark-materialize.js +111 -0
- package/dist/benchmark/neonspark-materialize.test.d.ts +1 -0
- package/dist/benchmark/neonspark-materialize.test.js +124 -0
- package/dist/benchmark/neonspark-sync.d.ts +3 -0
- package/dist/benchmark/neonspark-sync.js +53 -0
- package/dist/benchmark/neonspark-sync.test.d.ts +1 -0
- package/dist/benchmark/neonspark-sync.test.js +20 -0
- package/dist/benchmark/report.d.ts +1 -0
- package/dist/benchmark/report.js +7 -0
- package/dist/benchmark/runner.d.ts +48 -0
- package/dist/benchmark/runner.js +302 -0
- package/dist/benchmark/runner.test.d.ts +1 -0
- package/dist/benchmark/runner.test.js +50 -0
- package/dist/benchmark/scoring.d.ts +16 -0
- package/dist/benchmark/scoring.js +27 -0
- package/dist/benchmark/scoring.test.d.ts +1 -0
- package/dist/benchmark/scoring.test.js +24 -0
- package/dist/benchmark/tool-runner.d.ts +6 -0
- package/dist/benchmark/tool-runner.js +17 -0
- package/dist/benchmark/types.d.ts +36 -0
- package/dist/benchmark/types.js +1 -0
- package/dist/cli/ai-context.d.ts +22 -0
- package/dist/cli/ai-context.js +184 -0
- package/dist/cli/ai-context.test.d.ts +1 -0
- package/dist/cli/ai-context.test.js +30 -0
- package/dist/cli/analyze-multi-scope-regression.test.d.ts +1 -0
- package/dist/cli/analyze-multi-scope-regression.test.js +22 -0
- package/dist/cli/analyze-options.d.ts +7 -0
- package/dist/cli/analyze-options.js +56 -0
- package/dist/cli/analyze-options.test.d.ts +1 -0
- package/dist/cli/analyze-options.test.js +36 -0
- package/dist/cli/analyze.d.ts +14 -0
- package/dist/cli/analyze.js +384 -0
- package/dist/cli/augment.d.ts +13 -0
- package/dist/cli/augment.js +33 -0
- package/dist/cli/benchmark-agent-context.d.ts +29 -0
- package/dist/cli/benchmark-agent-context.js +61 -0
- package/dist/cli/benchmark-agent-context.test.d.ts +1 -0
- package/dist/cli/benchmark-agent-context.test.js +80 -0
- package/dist/cli/benchmark-unity.d.ts +15 -0
- package/dist/cli/benchmark-unity.js +31 -0
- package/dist/cli/benchmark-unity.test.d.ts +1 -0
- package/dist/cli/benchmark-unity.test.js +18 -0
- package/dist/cli/claude-hooks.d.ts +22 -0
- package/dist/cli/claude-hooks.js +97 -0
- package/dist/cli/clean.d.ts +10 -0
- package/dist/cli/clean.js +60 -0
- package/dist/cli/eval-server.d.ts +30 -0
- package/dist/cli/eval-server.js +372 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +182 -0
- package/dist/cli/list.d.ts +6 -0
- package/dist/cli/list.js +33 -0
- package/dist/cli/mcp.d.ts +8 -0
- package/dist/cli/mcp.js +34 -0
- package/dist/cli/repo-manager-alias.test.d.ts +1 -0
- package/dist/cli/repo-manager-alias.test.js +40 -0
- package/dist/cli/scope-filter.test.d.ts +1 -0
- package/dist/cli/scope-filter.test.js +49 -0
- package/dist/cli/serve.d.ts +4 -0
- package/dist/cli/serve.js +6 -0
- package/dist/cli/setup.d.ts +8 -0
- package/dist/cli/setup.js +311 -0
- package/dist/cli/setup.test.d.ts +1 -0
- package/dist/cli/setup.test.js +31 -0
- package/dist/cli/status.d.ts +6 -0
- package/dist/cli/status.js +27 -0
- package/dist/cli/tool.d.ts +40 -0
- package/dist/cli/tool.js +94 -0
- package/dist/cli/version.test.d.ts +1 -0
- package/dist/cli/version.test.js +19 -0
- package/dist/cli/wiki.d.ts +15 -0
- package/dist/cli/wiki.js +361 -0
- package/dist/config/ignore-service.d.ts +1 -0
- package/dist/config/ignore-service.js +210 -0
- package/dist/config/supported-languages.d.ts +12 -0
- package/dist/config/supported-languages.js +15 -0
- package/dist/core/augmentation/engine.d.ts +26 -0
- package/dist/core/augmentation/engine.js +213 -0
- package/dist/core/embeddings/embedder.d.ts +60 -0
- package/dist/core/embeddings/embedder.js +251 -0
- package/dist/core/embeddings/embedding-pipeline.d.ts +51 -0
- package/dist/core/embeddings/embedding-pipeline.js +329 -0
- package/dist/core/embeddings/index.d.ts +9 -0
- package/dist/core/embeddings/index.js +9 -0
- package/dist/core/embeddings/text-generator.d.ts +24 -0
- package/dist/core/embeddings/text-generator.js +182 -0
- package/dist/core/embeddings/types.d.ts +87 -0
- package/dist/core/embeddings/types.js +32 -0
- package/dist/core/graph/graph.d.ts +2 -0
- package/dist/core/graph/graph.js +66 -0
- package/dist/core/graph/types.d.ts +61 -0
- package/dist/core/graph/types.js +1 -0
- package/dist/core/ingestion/ast-cache.d.ts +11 -0
- package/dist/core/ingestion/ast-cache.js +34 -0
- package/dist/core/ingestion/call-processor.d.ts +15 -0
- package/dist/core/ingestion/call-processor.js +327 -0
- package/dist/core/ingestion/cluster-enricher.d.ts +38 -0
- package/dist/core/ingestion/cluster-enricher.js +170 -0
- package/dist/core/ingestion/community-processor.d.ts +39 -0
- package/dist/core/ingestion/community-processor.js +312 -0
- package/dist/core/ingestion/entry-point-scoring.d.ts +39 -0
- package/dist/core/ingestion/entry-point-scoring.js +260 -0
- package/dist/core/ingestion/filesystem-walker.d.ts +28 -0
- package/dist/core/ingestion/filesystem-walker.js +80 -0
- package/dist/core/ingestion/framework-detection.d.ts +39 -0
- package/dist/core/ingestion/framework-detection.js +235 -0
- package/dist/core/ingestion/heritage-processor.d.ts +20 -0
- package/dist/core/ingestion/heritage-processor.js +197 -0
- package/dist/core/ingestion/import-processor.d.ts +38 -0
- package/dist/core/ingestion/import-processor.js +778 -0
- package/dist/core/ingestion/parsing-processor.d.ts +15 -0
- package/dist/core/ingestion/parsing-processor.js +291 -0
- package/dist/core/ingestion/pipeline.d.ts +5 -0
- package/dist/core/ingestion/pipeline.js +323 -0
- package/dist/core/ingestion/process-processor.d.ts +51 -0
- package/dist/core/ingestion/process-processor.js +309 -0
- package/dist/core/ingestion/scope-filter.d.ts +25 -0
- package/dist/core/ingestion/scope-filter.js +100 -0
- package/dist/core/ingestion/structure-processor.d.ts +2 -0
- package/dist/core/ingestion/structure-processor.js +36 -0
- package/dist/core/ingestion/symbol-table.d.ts +33 -0
- package/dist/core/ingestion/symbol-table.js +38 -0
- package/dist/core/ingestion/tree-sitter-queries.d.ts +12 -0
- package/dist/core/ingestion/tree-sitter-queries.js +398 -0
- package/dist/core/ingestion/utils.d.ts +10 -0
- package/dist/core/ingestion/utils.js +50 -0
- package/dist/core/ingestion/workers/parse-worker.d.ts +59 -0
- package/dist/core/ingestion/workers/parse-worker.js +672 -0
- package/dist/core/ingestion/workers/worker-pool.d.ts +16 -0
- package/dist/core/ingestion/workers/worker-pool.js +120 -0
- package/dist/core/kuzu/csv-generator.d.ts +29 -0
- package/dist/core/kuzu/csv-generator.js +336 -0
- package/dist/core/kuzu/kuzu-adapter.d.ts +101 -0
- package/dist/core/kuzu/kuzu-adapter.js +753 -0
- package/dist/core/kuzu/schema.d.ts +53 -0
- package/dist/core/kuzu/schema.js +407 -0
- package/dist/core/search/bm25-index.d.ts +23 -0
- package/dist/core/search/bm25-index.js +95 -0
- package/dist/core/search/hybrid-search.d.ts +49 -0
- package/dist/core/search/hybrid-search.js +118 -0
- package/dist/core/tree-sitter/parser-loader.d.ts +4 -0
- package/dist/core/tree-sitter/parser-loader.js +44 -0
- package/dist/core/wiki/generator.d.ts +110 -0
- package/dist/core/wiki/generator.js +786 -0
- package/dist/core/wiki/graph-queries.d.ts +80 -0
- package/dist/core/wiki/graph-queries.js +238 -0
- package/dist/core/wiki/html-viewer.d.ts +10 -0
- package/dist/core/wiki/html-viewer.js +297 -0
- package/dist/core/wiki/llm-client.d.ts +40 -0
- package/dist/core/wiki/llm-client.js +162 -0
- package/dist/core/wiki/prompts.d.ts +53 -0
- package/dist/core/wiki/prompts.js +174 -0
- package/dist/lib/utils.d.ts +1 -0
- package/dist/lib/utils.js +3 -0
- package/dist/mcp/core/embedder.d.ts +27 -0
- package/dist/mcp/core/embedder.js +108 -0
- package/dist/mcp/core/kuzu-adapter.d.ts +34 -0
- package/dist/mcp/core/kuzu-adapter.js +231 -0
- package/dist/mcp/local/local-backend.d.ts +160 -0
- package/dist/mcp/local/local-backend.js +1646 -0
- package/dist/mcp/resources.d.ts +31 -0
- package/dist/mcp/resources.js +407 -0
- package/dist/mcp/server.d.ts +23 -0
- package/dist/mcp/server.js +251 -0
- package/dist/mcp/staleness.d.ts +15 -0
- package/dist/mcp/staleness.js +29 -0
- package/dist/mcp/tools.d.ts +24 -0
- package/dist/mcp/tools.js +195 -0
- package/dist/server/api.d.ts +10 -0
- package/dist/server/api.js +344 -0
- package/dist/server/mcp-http.d.ts +13 -0
- package/dist/server/mcp-http.js +100 -0
- package/dist/storage/git.d.ts +6 -0
- package/dist/storage/git.js +32 -0
- package/dist/storage/repo-manager.d.ts +125 -0
- package/dist/storage/repo-manager.js +257 -0
- package/dist/types/pipeline.d.ts +34 -0
- package/dist/types/pipeline.js +18 -0
- package/hooks/claude/gitnexus-hook.cjs +135 -0
- package/hooks/claude/pre-tool-use.sh +78 -0
- package/hooks/claude/session-start.sh +42 -0
- package/package.json +92 -0
- package/skills/gitnexus-cli.md +82 -0
- package/skills/gitnexus-debugging.md +89 -0
- package/skills/gitnexus-exploring.md +78 -0
- package/skills/gitnexus-guide.md +64 -0
- package/skills/gitnexus-impact-analysis.md +97 -0
- package/skills/gitnexus-refactoring.md +121 -0
- package/vendor/leiden/index.cjs +355 -0
- package/vendor/leiden/utils.cjs +392 -0
|
@@ -0,0 +1,1646 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Local Backend (Multi-Repo)
|
|
3
|
+
*
|
|
4
|
+
* Provides tool implementations using local .gitnexus/ indexes.
|
|
5
|
+
* Supports multiple indexed repositories via a global registry.
|
|
6
|
+
* KuzuDB connections are opened lazily per repo on first query.
|
|
7
|
+
*/
|
|
8
|
+
import fs from 'fs/promises';
|
|
9
|
+
import path from 'path';
|
|
10
|
+
import { initKuzu, executeQuery, closeKuzu, isKuzuReady } from '../core/kuzu-adapter.js';
|
|
11
|
+
// Embedding imports are lazy (dynamic import) to avoid loading onnxruntime-node
|
|
12
|
+
// at MCP server startup — crashes on unsupported Node ABI versions (#89)
|
|
13
|
+
// git utilities available if needed
|
|
14
|
+
// import { isGitRepo, getCurrentCommit, getGitRoot } from '../../storage/git.js';
|
|
15
|
+
import { listRegisteredRepos, } from '../../storage/repo-manager.js';
|
|
16
|
+
// AI context generation is CLI-only (gitnexus analyze)
|
|
17
|
+
// import { generateAIContextFiles } from '../../cli/ai-context.js';
|
|
18
|
+
/**
|
|
19
|
+
* Quick test-file detection for filtering impact results.
|
|
20
|
+
* Matches common test file patterns across all supported languages.
|
|
21
|
+
*/
|
|
22
|
+
function isTestFilePath(filePath) {
|
|
23
|
+
const p = filePath.toLowerCase().replace(/\\/g, '/');
|
|
24
|
+
return (p.includes('.test.') || p.includes('.spec.') ||
|
|
25
|
+
p.includes('__tests__/') || p.includes('__mocks__/') ||
|
|
26
|
+
p.includes('/test/') || p.includes('/tests/') ||
|
|
27
|
+
p.includes('/testing/') || p.includes('/fixtures/') ||
|
|
28
|
+
p.endsWith('_test.go') || p.endsWith('_test.py') ||
|
|
29
|
+
p.includes('/test_') || p.includes('/conftest.'));
|
|
30
|
+
}
|
|
31
|
+
/** Valid KuzuDB node labels for safe Cypher query construction */
|
|
32
|
+
const VALID_NODE_LABELS = new Set([
|
|
33
|
+
'File', 'Folder', 'Function', 'Class', 'Interface', 'Method', 'CodeElement',
|
|
34
|
+
'Community', 'Process', 'Struct', 'Enum', 'Macro', 'Typedef', 'Union',
|
|
35
|
+
'Namespace', 'Trait', 'Impl', 'TypeAlias', 'Const', 'Static', 'Property',
|
|
36
|
+
'Record', 'Delegate', 'Annotation', 'Constructor', 'Template', 'Module',
|
|
37
|
+
]);
|
|
38
|
+
function inferTypeFromId(id) {
|
|
39
|
+
if (typeof id !== 'string' || id.length === 0)
|
|
40
|
+
return undefined;
|
|
41
|
+
const firstColon = id.indexOf(':');
|
|
42
|
+
if (firstColon <= 0)
|
|
43
|
+
return undefined;
|
|
44
|
+
return id.slice(0, firstColon);
|
|
45
|
+
}
|
|
46
|
+
function getRowValue(row, key, index) {
|
|
47
|
+
if (!row || typeof row !== 'object')
|
|
48
|
+
return undefined;
|
|
49
|
+
const named = row[key];
|
|
50
|
+
if (named !== undefined)
|
|
51
|
+
return named;
|
|
52
|
+
return row[index];
|
|
53
|
+
}
|
|
54
|
+
export class LocalBackend {
|
|
55
|
+
repos = new Map();
|
|
56
|
+
contextCache = new Map();
|
|
57
|
+
initializedRepos = new Set();
|
|
58
|
+
// ─── Initialization ──────────────────────────────────────────────
|
|
59
|
+
/**
|
|
60
|
+
* Initialize from the global registry.
|
|
61
|
+
* Returns true if at least one repo is available.
|
|
62
|
+
*/
|
|
63
|
+
async init() {
|
|
64
|
+
await this.refreshRepos();
|
|
65
|
+
return this.repos.size > 0;
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Re-read the global registry and update the in-memory repo map.
|
|
69
|
+
* New repos are added, existing repos are updated, removed repos are pruned.
|
|
70
|
+
* KuzuDB connections for removed repos are NOT closed (they idle-timeout naturally).
|
|
71
|
+
*/
|
|
72
|
+
async refreshRepos() {
|
|
73
|
+
const entries = await listRegisteredRepos({ validate: true });
|
|
74
|
+
const freshIds = new Set();
|
|
75
|
+
for (const entry of entries) {
|
|
76
|
+
const id = this.repoId(entry.name, entry.path);
|
|
77
|
+
freshIds.add(id);
|
|
78
|
+
const storagePath = entry.storagePath;
|
|
79
|
+
const kuzuPath = path.join(storagePath, 'kuzu');
|
|
80
|
+
const handle = {
|
|
81
|
+
id,
|
|
82
|
+
name: entry.name,
|
|
83
|
+
repoPath: entry.path,
|
|
84
|
+
storagePath,
|
|
85
|
+
kuzuPath,
|
|
86
|
+
indexedAt: entry.indexedAt,
|
|
87
|
+
lastCommit: entry.lastCommit,
|
|
88
|
+
stats: entry.stats,
|
|
89
|
+
};
|
|
90
|
+
this.repos.set(id, handle);
|
|
91
|
+
// Build lightweight context (no KuzuDB needed)
|
|
92
|
+
const s = entry.stats || {};
|
|
93
|
+
this.contextCache.set(id, {
|
|
94
|
+
projectName: entry.name,
|
|
95
|
+
stats: {
|
|
96
|
+
fileCount: s.files || 0,
|
|
97
|
+
functionCount: s.nodes || 0,
|
|
98
|
+
communityCount: s.communities || 0,
|
|
99
|
+
processCount: s.processes || 0,
|
|
100
|
+
},
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
// Prune repos that no longer exist in the registry
|
|
104
|
+
for (const id of this.repos.keys()) {
|
|
105
|
+
if (!freshIds.has(id)) {
|
|
106
|
+
this.repos.delete(id);
|
|
107
|
+
this.contextCache.delete(id);
|
|
108
|
+
this.initializedRepos.delete(id);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Generate a stable repo ID from name + path.
|
|
114
|
+
* If names collide, append a hash of the path.
|
|
115
|
+
*/
|
|
116
|
+
repoId(name, repoPath) {
|
|
117
|
+
const base = name.toLowerCase();
|
|
118
|
+
// Check for name collision with a different path
|
|
119
|
+
for (const [id, handle] of this.repos) {
|
|
120
|
+
if (id === base && handle.repoPath !== path.resolve(repoPath)) {
|
|
121
|
+
// Collision — use path hash
|
|
122
|
+
const hash = Buffer.from(repoPath).toString('base64url').slice(0, 6);
|
|
123
|
+
return `${base}-${hash}`;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return base;
|
|
127
|
+
}
|
|
128
|
+
// ─── Repo Resolution ─────────────────────────────────────────────
|
|
129
|
+
/**
|
|
130
|
+
* Resolve which repo to use.
|
|
131
|
+
* - If repoParam is given, match by name or path
|
|
132
|
+
* - If only 1 repo, use it
|
|
133
|
+
* - If 0 or multiple without param, throw with helpful message
|
|
134
|
+
*
|
|
135
|
+
* On a miss, re-reads the registry once in case a new repo was indexed
|
|
136
|
+
* while the MCP server was running.
|
|
137
|
+
*/
|
|
138
|
+
async resolveRepo(repoParam) {
|
|
139
|
+
const result = this.resolveRepoFromCache(repoParam);
|
|
140
|
+
if (result)
|
|
141
|
+
return result;
|
|
142
|
+
// Miss — refresh registry and try once more
|
|
143
|
+
await this.refreshRepos();
|
|
144
|
+
const retried = this.resolveRepoFromCache(repoParam);
|
|
145
|
+
if (retried)
|
|
146
|
+
return retried;
|
|
147
|
+
// Still no match — throw with helpful message
|
|
148
|
+
if (this.repos.size === 0) {
|
|
149
|
+
throw new Error('No indexed repositories. Run: gitnexus analyze');
|
|
150
|
+
}
|
|
151
|
+
if (repoParam) {
|
|
152
|
+
const names = [...this.repos.values()].map(h => h.name);
|
|
153
|
+
throw new Error(`Repository "${repoParam}" not found. Available: ${names.join(', ')}`);
|
|
154
|
+
}
|
|
155
|
+
const names = [...this.repos.values()].map(h => h.name);
|
|
156
|
+
throw new Error(`Multiple repositories indexed. Specify which one with the "repo" parameter. Available: ${names.join(', ')}`);
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Try to resolve a repo from the in-memory cache. Returns null on miss.
|
|
160
|
+
*/
|
|
161
|
+
resolveRepoFromCache(repoParam) {
|
|
162
|
+
if (this.repos.size === 0)
|
|
163
|
+
return null;
|
|
164
|
+
if (repoParam) {
|
|
165
|
+
const paramLower = repoParam.toLowerCase();
|
|
166
|
+
// Match by id
|
|
167
|
+
if (this.repos.has(paramLower))
|
|
168
|
+
return this.repos.get(paramLower);
|
|
169
|
+
// Match by name (case-insensitive)
|
|
170
|
+
for (const handle of this.repos.values()) {
|
|
171
|
+
if (handle.name.toLowerCase() === paramLower)
|
|
172
|
+
return handle;
|
|
173
|
+
}
|
|
174
|
+
// Match by path (substring)
|
|
175
|
+
const resolved = path.resolve(repoParam);
|
|
176
|
+
for (const handle of this.repos.values()) {
|
|
177
|
+
if (handle.repoPath === resolved)
|
|
178
|
+
return handle;
|
|
179
|
+
}
|
|
180
|
+
// Match by partial name
|
|
181
|
+
for (const handle of this.repos.values()) {
|
|
182
|
+
if (handle.name.toLowerCase().includes(paramLower))
|
|
183
|
+
return handle;
|
|
184
|
+
}
|
|
185
|
+
return null;
|
|
186
|
+
}
|
|
187
|
+
if (this.repos.size === 1) {
|
|
188
|
+
return this.repos.values().next().value;
|
|
189
|
+
}
|
|
190
|
+
return null; // Multiple repos, no param — ambiguous
|
|
191
|
+
}
|
|
192
|
+
// ─── Lazy KuzuDB Init ────────────────────────────────────────────
|
|
193
|
+
async ensureInitialized(repoId) {
|
|
194
|
+
// Always check the actual pool — the idle timer may have evicted the connection
|
|
195
|
+
if (this.initializedRepos.has(repoId) && isKuzuReady(repoId))
|
|
196
|
+
return;
|
|
197
|
+
const handle = this.repos.get(repoId);
|
|
198
|
+
if (!handle)
|
|
199
|
+
throw new Error(`Unknown repo: ${repoId}`);
|
|
200
|
+
try {
|
|
201
|
+
await initKuzu(repoId, handle.kuzuPath);
|
|
202
|
+
this.initializedRepos.add(repoId);
|
|
203
|
+
}
|
|
204
|
+
catch (err) {
|
|
205
|
+
// If lock error, mark as not initialized so next call retries
|
|
206
|
+
this.initializedRepos.delete(repoId);
|
|
207
|
+
throw err;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
// ─── Public Getters ──────────────────────────────────────────────
|
|
211
|
+
/**
|
|
212
|
+
* Get context for a specific repo (or the single repo if only one).
|
|
213
|
+
*/
|
|
214
|
+
getContext(repoId) {
|
|
215
|
+
if (repoId && this.contextCache.has(repoId)) {
|
|
216
|
+
return this.contextCache.get(repoId);
|
|
217
|
+
}
|
|
218
|
+
if (this.repos.size === 1) {
|
|
219
|
+
return this.contextCache.values().next().value ?? null;
|
|
220
|
+
}
|
|
221
|
+
return null;
|
|
222
|
+
}
|
|
223
|
+
/**
|
|
224
|
+
* List all registered repos with their metadata.
|
|
225
|
+
* Re-reads the global registry so newly indexed repos are discovered
|
|
226
|
+
* without restarting the MCP server.
|
|
227
|
+
*/
|
|
228
|
+
async listRepos() {
|
|
229
|
+
await this.refreshRepos();
|
|
230
|
+
return [...this.repos.values()].map(h => ({
|
|
231
|
+
name: h.name,
|
|
232
|
+
path: h.repoPath,
|
|
233
|
+
indexedAt: h.indexedAt,
|
|
234
|
+
lastCommit: h.lastCommit,
|
|
235
|
+
stats: h.stats,
|
|
236
|
+
}));
|
|
237
|
+
}
|
|
238
|
+
// ─── Tool Dispatch ───────────────────────────────────────────────
|
|
239
|
+
async callTool(method, params) {
|
|
240
|
+
if (method === 'list_repos') {
|
|
241
|
+
return this.listRepos();
|
|
242
|
+
}
|
|
243
|
+
// Resolve repo from optional param (re-reads registry on miss)
|
|
244
|
+
const repo = await this.resolveRepo(params?.repo);
|
|
245
|
+
switch (method) {
|
|
246
|
+
case 'query':
|
|
247
|
+
return this.query(repo, params);
|
|
248
|
+
case 'cypher': {
|
|
249
|
+
const raw = await this.cypher(repo, params);
|
|
250
|
+
return this.formatCypherAsMarkdown(raw);
|
|
251
|
+
}
|
|
252
|
+
case 'context':
|
|
253
|
+
return this.context(repo, params);
|
|
254
|
+
case 'impact':
|
|
255
|
+
return this.impact(repo, params);
|
|
256
|
+
case 'detect_changes':
|
|
257
|
+
return this.detectChanges(repo, params);
|
|
258
|
+
case 'rename':
|
|
259
|
+
return this.rename(repo, params);
|
|
260
|
+
// Legacy aliases for backwards compatibility
|
|
261
|
+
case 'search':
|
|
262
|
+
return this.query(repo, params);
|
|
263
|
+
case 'explore':
|
|
264
|
+
return this.context(repo, { name: params?.name, ...params });
|
|
265
|
+
case 'overview':
|
|
266
|
+
return this.overview(repo, params);
|
|
267
|
+
default:
|
|
268
|
+
throw new Error(`Unknown tool: ${method}`);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
// ─── Tool Implementations ────────────────────────────────────────
|
|
272
|
+
/**
|
|
273
|
+
* Query tool — process-grouped search.
|
|
274
|
+
*
|
|
275
|
+
* 1. Hybrid search (BM25 + semantic) to find matching symbols
|
|
276
|
+
* 2. Trace each match to its process(es) via STEP_IN_PROCESS
|
|
277
|
+
* 3. Group by process, rank by aggregate relevance + internal cluster cohesion
|
|
278
|
+
* 4. Return: { processes, process_symbols, definitions }
|
|
279
|
+
*/
|
|
280
|
+
async query(repo, params) {
|
|
281
|
+
if (!params.query?.trim()) {
|
|
282
|
+
return { error: 'query parameter is required and cannot be empty.' };
|
|
283
|
+
}
|
|
284
|
+
await this.ensureInitialized(repo.id);
|
|
285
|
+
const processLimit = params.limit || 5;
|
|
286
|
+
const maxSymbolsPerProcess = params.max_symbols || 10;
|
|
287
|
+
const includeContent = params.include_content ?? false;
|
|
288
|
+
const searchQuery = params.query.trim();
|
|
289
|
+
// Step 1: Run hybrid search to get matching symbols
|
|
290
|
+
const searchLimit = processLimit * maxSymbolsPerProcess; // fetch enough raw results
|
|
291
|
+
const [bm25Results, semanticResults] = await Promise.all([
|
|
292
|
+
this.bm25Search(repo, searchQuery, searchLimit),
|
|
293
|
+
this.semanticSearch(repo, searchQuery, searchLimit),
|
|
294
|
+
]);
|
|
295
|
+
// Merge via reciprocal rank fusion
|
|
296
|
+
const scoreMap = new Map();
|
|
297
|
+
for (let i = 0; i < bm25Results.length; i++) {
|
|
298
|
+
const result = bm25Results[i];
|
|
299
|
+
const key = result.nodeId || result.filePath;
|
|
300
|
+
const rrfScore = 1 / (60 + i);
|
|
301
|
+
const existing = scoreMap.get(key);
|
|
302
|
+
if (existing) {
|
|
303
|
+
existing.score += rrfScore;
|
|
304
|
+
}
|
|
305
|
+
else {
|
|
306
|
+
scoreMap.set(key, { score: rrfScore, data: result });
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
for (let i = 0; i < semanticResults.length; i++) {
|
|
310
|
+
const result = semanticResults[i];
|
|
311
|
+
const key = result.nodeId || result.filePath;
|
|
312
|
+
const rrfScore = 1 / (60 + i);
|
|
313
|
+
const existing = scoreMap.get(key);
|
|
314
|
+
if (existing) {
|
|
315
|
+
existing.score += rrfScore;
|
|
316
|
+
}
|
|
317
|
+
else {
|
|
318
|
+
scoreMap.set(key, { score: rrfScore, data: result });
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
const merged = Array.from(scoreMap.entries())
|
|
322
|
+
.sort((a, b) => b[1].score - a[1].score)
|
|
323
|
+
.slice(0, searchLimit);
|
|
324
|
+
// Step 2: For each match with a nodeId, trace to process(es)
|
|
325
|
+
const processMap = new Map();
|
|
326
|
+
const definitions = []; // standalone symbols not in any process
|
|
327
|
+
for (const [_, item] of merged) {
|
|
328
|
+
const sym = item.data;
|
|
329
|
+
if (!sym.nodeId) {
|
|
330
|
+
// File-level results go to definitions
|
|
331
|
+
definitions.push({
|
|
332
|
+
name: sym.name,
|
|
333
|
+
type: sym.type || 'File',
|
|
334
|
+
filePath: sym.filePath,
|
|
335
|
+
});
|
|
336
|
+
continue;
|
|
337
|
+
}
|
|
338
|
+
const escaped = sym.nodeId.replace(/'/g, "''");
|
|
339
|
+
// Find processes this symbol participates in
|
|
340
|
+
let processRows = [];
|
|
341
|
+
try {
|
|
342
|
+
processRows = await executeQuery(repo.id, `
|
|
343
|
+
MATCH (n {id: '${escaped}'})-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process)
|
|
344
|
+
RETURN p.id AS pid, p.label AS label, p.heuristicLabel AS heuristicLabel, p.processType AS processType, p.stepCount AS stepCount, r.step AS step
|
|
345
|
+
`);
|
|
346
|
+
}
|
|
347
|
+
catch { /* symbol might not be in any process */ }
|
|
348
|
+
// Get cluster membership + cohesion (cohesion used as internal ranking signal)
|
|
349
|
+
let cohesion = 0;
|
|
350
|
+
let module;
|
|
351
|
+
try {
|
|
352
|
+
const cohesionRows = await executeQuery(repo.id, `
|
|
353
|
+
MATCH (n {id: '${escaped}'})-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
354
|
+
RETURN c.cohesion AS cohesion, c.heuristicLabel AS module
|
|
355
|
+
LIMIT 1
|
|
356
|
+
`);
|
|
357
|
+
if (cohesionRows.length > 0) {
|
|
358
|
+
cohesion = (cohesionRows[0].cohesion ?? cohesionRows[0][0]) || 0;
|
|
359
|
+
module = cohesionRows[0].module ?? cohesionRows[0][1];
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
catch { /* no cluster info */ }
|
|
363
|
+
// Optionally fetch content
|
|
364
|
+
let content;
|
|
365
|
+
if (includeContent) {
|
|
366
|
+
try {
|
|
367
|
+
const contentRows = await executeQuery(repo.id, `
|
|
368
|
+
MATCH (n {id: '${escaped}'})
|
|
369
|
+
RETURN n.content AS content
|
|
370
|
+
`);
|
|
371
|
+
if (contentRows.length > 0) {
|
|
372
|
+
content = contentRows[0].content ?? contentRows[0][0];
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
catch { /* skip */ }
|
|
376
|
+
}
|
|
377
|
+
const symbolEntry = {
|
|
378
|
+
id: sym.nodeId,
|
|
379
|
+
name: sym.name,
|
|
380
|
+
type: sym.type,
|
|
381
|
+
filePath: sym.filePath,
|
|
382
|
+
startLine: sym.startLine,
|
|
383
|
+
endLine: sym.endLine,
|
|
384
|
+
...(module ? { module } : {}),
|
|
385
|
+
...(includeContent && content ? { content } : {}),
|
|
386
|
+
};
|
|
387
|
+
if (processRows.length === 0) {
|
|
388
|
+
// Symbol not in any process — goes to definitions
|
|
389
|
+
definitions.push(symbolEntry);
|
|
390
|
+
}
|
|
391
|
+
else {
|
|
392
|
+
// Add to each process it belongs to
|
|
393
|
+
for (const row of processRows) {
|
|
394
|
+
const pid = row.pid ?? row[0];
|
|
395
|
+
const label = row.label ?? row[1];
|
|
396
|
+
const hLabel = row.heuristicLabel ?? row[2];
|
|
397
|
+
const pType = row.processType ?? row[3];
|
|
398
|
+
const stepCount = row.stepCount ?? row[4];
|
|
399
|
+
const step = row.step ?? row[5];
|
|
400
|
+
if (!processMap.has(pid)) {
|
|
401
|
+
processMap.set(pid, {
|
|
402
|
+
id: pid,
|
|
403
|
+
label,
|
|
404
|
+
heuristicLabel: hLabel,
|
|
405
|
+
processType: pType,
|
|
406
|
+
stepCount,
|
|
407
|
+
totalScore: 0,
|
|
408
|
+
cohesionBoost: 0,
|
|
409
|
+
symbols: [],
|
|
410
|
+
});
|
|
411
|
+
}
|
|
412
|
+
const proc = processMap.get(pid);
|
|
413
|
+
proc.totalScore += item.score;
|
|
414
|
+
proc.cohesionBoost = Math.max(proc.cohesionBoost, cohesion);
|
|
415
|
+
proc.symbols.push({
|
|
416
|
+
...symbolEntry,
|
|
417
|
+
process_id: pid,
|
|
418
|
+
step_index: step,
|
|
419
|
+
});
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
// Step 3: Rank processes by aggregate score + internal cohesion boost
|
|
424
|
+
const rankedProcesses = Array.from(processMap.values())
|
|
425
|
+
.map(p => ({
|
|
426
|
+
...p,
|
|
427
|
+
priority: p.totalScore + (p.cohesionBoost * 0.1), // cohesion as subtle ranking signal
|
|
428
|
+
}))
|
|
429
|
+
.sort((a, b) => b.priority - a.priority)
|
|
430
|
+
.slice(0, processLimit);
|
|
431
|
+
// Step 4: Build response
|
|
432
|
+
const processes = rankedProcesses.map(p => ({
|
|
433
|
+
id: p.id,
|
|
434
|
+
summary: p.heuristicLabel || p.label,
|
|
435
|
+
priority: Math.round(p.priority * 1000) / 1000,
|
|
436
|
+
symbol_count: p.symbols.length,
|
|
437
|
+
process_type: p.processType,
|
|
438
|
+
step_count: p.stepCount,
|
|
439
|
+
}));
|
|
440
|
+
const processSymbols = rankedProcesses.flatMap(p => p.symbols.slice(0, maxSymbolsPerProcess).map(s => ({
|
|
441
|
+
...s,
|
|
442
|
+
// remove internal fields
|
|
443
|
+
})));
|
|
444
|
+
// Deduplicate process_symbols by id
|
|
445
|
+
const seen = new Set();
|
|
446
|
+
const dedupedSymbols = processSymbols.filter(s => {
|
|
447
|
+
if (seen.has(s.id))
|
|
448
|
+
return false;
|
|
449
|
+
seen.add(s.id);
|
|
450
|
+
return true;
|
|
451
|
+
});
|
|
452
|
+
return {
|
|
453
|
+
processes,
|
|
454
|
+
process_symbols: dedupedSymbols,
|
|
455
|
+
definitions: definitions.slice(0, 20), // cap standalone definitions
|
|
456
|
+
};
|
|
457
|
+
}
|
|
458
|
+
/**
|
|
459
|
+
* BM25 keyword search helper - uses KuzuDB FTS for always-fresh results
|
|
460
|
+
*/
|
|
461
|
+
async bm25Search(repo, query, limit) {
|
|
462
|
+
const escapedQuery = query.replace(/'/g, "''");
|
|
463
|
+
const symbolTables = [
|
|
464
|
+
{ table: 'Function', index: 'function_fts', type: 'Function' },
|
|
465
|
+
{ table: 'Class', index: 'class_fts', type: 'Class' },
|
|
466
|
+
{ table: 'Method', index: 'method_fts', type: 'Method' },
|
|
467
|
+
{ table: 'Interface', index: 'interface_fts', type: 'Interface' },
|
|
468
|
+
];
|
|
469
|
+
const symbolHits = [];
|
|
470
|
+
for (const spec of symbolTables) {
|
|
471
|
+
try {
|
|
472
|
+
const rows = await executeQuery(repo.id, `
|
|
473
|
+
CALL QUERY_FTS_INDEX('${spec.table}', '${spec.index}', '${escapedQuery}', conjunctive := false)
|
|
474
|
+
RETURN node.id AS id, node.name AS name, node.filePath AS filePath, node.startLine AS startLine, node.endLine AS endLine, score AS score
|
|
475
|
+
ORDER BY score DESC
|
|
476
|
+
LIMIT ${limit}
|
|
477
|
+
`);
|
|
478
|
+
for (const row of rows) {
|
|
479
|
+
const nodeId = getRowValue(row, 'id', 0);
|
|
480
|
+
if (!nodeId)
|
|
481
|
+
continue;
|
|
482
|
+
symbolHits.push({
|
|
483
|
+
nodeId,
|
|
484
|
+
name: getRowValue(row, 'name', 1) || '',
|
|
485
|
+
type: spec.type,
|
|
486
|
+
filePath: getRowValue(row, 'filePath', 2) || '',
|
|
487
|
+
startLine: getRowValue(row, 'startLine', 3),
|
|
488
|
+
endLine: getRowValue(row, 'endLine', 4),
|
|
489
|
+
bm25Score: Number(getRowValue(row, 'score', 5) ?? 0),
|
|
490
|
+
});
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
catch {
|
|
494
|
+
// Missing FTS index for this table is expected in some repos/languages.
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
const rawTokens = query
|
|
498
|
+
.toLowerCase()
|
|
499
|
+
.split(/[^a-z0-9_]+/)
|
|
500
|
+
.filter(Boolean);
|
|
501
|
+
const stopTokens = new Set(['class', 'method', 'function', 'interface', 'file', 'symbol']);
|
|
502
|
+
const meaningfulTokens = rawTokens.filter(t => t.length >= 4 && !stopTokens.has(t));
|
|
503
|
+
// Add exact-name hits so symbol lookup queries rank correctly.
|
|
504
|
+
for (const token of meaningfulTokens) {
|
|
505
|
+
try {
|
|
506
|
+
const exactRows = await executeQuery(repo.id, `
|
|
507
|
+
MATCH (n)
|
|
508
|
+
WHERE toLower(n.name) = '${token.replace(/'/g, "''")}'
|
|
509
|
+
RETURN n.id AS id, n.name AS name, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine
|
|
510
|
+
LIMIT ${limit}
|
|
511
|
+
`);
|
|
512
|
+
for (const row of exactRows) {
|
|
513
|
+
const nodeId = getRowValue(row, 'id', 0);
|
|
514
|
+
if (!nodeId)
|
|
515
|
+
continue;
|
|
516
|
+
symbolHits.push({
|
|
517
|
+
nodeId,
|
|
518
|
+
name: getRowValue(row, 'name', 1) || '',
|
|
519
|
+
type: inferTypeFromId(nodeId) || 'CodeElement',
|
|
520
|
+
filePath: getRowValue(row, 'filePath', 2) || '',
|
|
521
|
+
startLine: getRowValue(row, 'startLine', 3),
|
|
522
|
+
endLine: getRowValue(row, 'endLine', 4),
|
|
523
|
+
bm25Score: 10_000,
|
|
524
|
+
});
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
catch {
|
|
528
|
+
// ignore exact-hit fallback failures
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
if (symbolHits.length > 0) {
|
|
532
|
+
const filteredByName = meaningfulTokens.length > 0
|
|
533
|
+
? symbolHits.filter(hit => meaningfulTokens.some(t => (hit.name || '').toLowerCase().includes(t)))
|
|
534
|
+
: symbolHits;
|
|
535
|
+
const candidateHits = filteredByName.length > 0 ? filteredByName : symbolHits;
|
|
536
|
+
const deduped = new Map();
|
|
537
|
+
for (const hit of candidateHits) {
|
|
538
|
+
const existing = deduped.get(hit.nodeId);
|
|
539
|
+
if (!existing || hit.bm25Score > existing.bm25Score) {
|
|
540
|
+
deduped.set(hit.nodeId, hit);
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
return Array.from(deduped.values())
|
|
544
|
+
.sort((a, b) => b.bm25Score - a.bm25Score)
|
|
545
|
+
.slice(0, limit);
|
|
546
|
+
}
|
|
547
|
+
// Fallback to file-level FTS when symbol indexes are missing/empty.
|
|
548
|
+
try {
|
|
549
|
+
const fileRows = await executeQuery(repo.id, `
|
|
550
|
+
CALL QUERY_FTS_INDEX('File', 'file_fts', '${escapedQuery}', conjunctive := false)
|
|
551
|
+
RETURN node.filePath AS filePath, score AS score
|
|
552
|
+
ORDER BY score DESC
|
|
553
|
+
LIMIT ${limit}
|
|
554
|
+
`);
|
|
555
|
+
return fileRows.map((row) => {
|
|
556
|
+
const filePath = getRowValue(row, 'filePath', 0) || '';
|
|
557
|
+
const fileName = filePath.split('/').pop() || filePath;
|
|
558
|
+
return {
|
|
559
|
+
name: fileName,
|
|
560
|
+
type: 'File',
|
|
561
|
+
filePath,
|
|
562
|
+
bm25Score: Number(getRowValue(row, 'score', 1) ?? 0),
|
|
563
|
+
};
|
|
564
|
+
});
|
|
565
|
+
}
|
|
566
|
+
catch (err) {
|
|
567
|
+
console.error('GitNexus: BM25/FTS search failed (FTS indexes may not exist) -', err.message);
|
|
568
|
+
return [];
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
/**
|
|
572
|
+
* Semantic vector search helper
|
|
573
|
+
*/
|
|
574
|
+
async semanticSearch(repo, query, limit) {
|
|
575
|
+
try {
|
|
576
|
+
// Check if embedding table exists before loading the model (avoids heavy model init when embeddings are off)
|
|
577
|
+
const tableCheck = await executeQuery(repo.id, `MATCH (e:CodeEmbedding) RETURN COUNT(*) AS cnt LIMIT 1`);
|
|
578
|
+
if (!tableCheck.length || (tableCheck[0].cnt ?? tableCheck[0][0]) === 0)
|
|
579
|
+
return [];
|
|
580
|
+
const { embedQuery, getEmbeddingDims } = await import('../core/embedder.js');
|
|
581
|
+
const queryVec = await embedQuery(query);
|
|
582
|
+
const dims = getEmbeddingDims();
|
|
583
|
+
const queryVecStr = `[${queryVec.join(',')}]`;
|
|
584
|
+
const vectorQuery = `
|
|
585
|
+
CALL QUERY_VECTOR_INDEX('CodeEmbedding', 'code_embedding_idx',
|
|
586
|
+
CAST(${queryVecStr} AS FLOAT[${dims}]), ${limit})
|
|
587
|
+
YIELD node AS emb, distance
|
|
588
|
+
WITH emb, distance
|
|
589
|
+
WHERE distance < 0.6
|
|
590
|
+
RETURN emb.nodeId AS nodeId, distance
|
|
591
|
+
ORDER BY distance
|
|
592
|
+
`;
|
|
593
|
+
const embResults = await executeQuery(repo.id, vectorQuery);
|
|
594
|
+
if (embResults.length === 0)
|
|
595
|
+
return [];
|
|
596
|
+
const results = [];
|
|
597
|
+
for (const embRow of embResults) {
|
|
598
|
+
const nodeId = embRow.nodeId ?? embRow[0];
|
|
599
|
+
const distance = embRow.distance ?? embRow[1];
|
|
600
|
+
const labelEndIdx = nodeId.indexOf(':');
|
|
601
|
+
const label = labelEndIdx > 0 ? nodeId.substring(0, labelEndIdx) : 'Unknown';
|
|
602
|
+
// Validate label against known node types to prevent Cypher injection
|
|
603
|
+
if (!VALID_NODE_LABELS.has(label))
|
|
604
|
+
continue;
|
|
605
|
+
try {
|
|
606
|
+
const escapedId = nodeId.replace(/'/g, "''");
|
|
607
|
+
const nodeQuery = label === 'File'
|
|
608
|
+
? `MATCH (n:File {id: '${escapedId}'}) RETURN n.name AS name, n.filePath AS filePath`
|
|
609
|
+
: `MATCH (n:\`${label}\` {id: '${escapedId}'}) RETURN n.name AS name, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine`;
|
|
610
|
+
const nodeRows = await executeQuery(repo.id, nodeQuery);
|
|
611
|
+
if (nodeRows.length > 0) {
|
|
612
|
+
const nodeRow = nodeRows[0];
|
|
613
|
+
results.push({
|
|
614
|
+
nodeId,
|
|
615
|
+
name: nodeRow.name ?? nodeRow[0] ?? '',
|
|
616
|
+
type: label,
|
|
617
|
+
filePath: nodeRow.filePath ?? nodeRow[1] ?? '',
|
|
618
|
+
distance,
|
|
619
|
+
startLine: label !== 'File' ? (nodeRow.startLine ?? nodeRow[2]) : undefined,
|
|
620
|
+
endLine: label !== 'File' ? (nodeRow.endLine ?? nodeRow[3]) : undefined,
|
|
621
|
+
});
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
catch { }
|
|
625
|
+
}
|
|
626
|
+
return results;
|
|
627
|
+
}
|
|
628
|
+
catch {
|
|
629
|
+
// Expected when embeddings are disabled — silently fall back to BM25-only
|
|
630
|
+
return [];
|
|
631
|
+
}
|
|
632
|
+
}
|
|
633
|
+
async executeCypher(repoName, query) {
|
|
634
|
+
const repo = await this.resolveRepo(repoName);
|
|
635
|
+
return this.cypher(repo, { query });
|
|
636
|
+
}
|
|
637
|
+
async cypher(repo, params) {
|
|
638
|
+
await this.ensureInitialized(repo.id);
|
|
639
|
+
if (!isKuzuReady(repo.id)) {
|
|
640
|
+
return { error: 'KuzuDB not ready. Index may be corrupted.' };
|
|
641
|
+
}
|
|
642
|
+
try {
|
|
643
|
+
const result = await executeQuery(repo.id, params.query);
|
|
644
|
+
return result;
|
|
645
|
+
}
|
|
646
|
+
catch (err) {
|
|
647
|
+
return { error: err.message || 'Query failed' };
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
/**
|
|
651
|
+
* Format raw Cypher result rows as a markdown table for LLM readability.
|
|
652
|
+
* Falls back to raw result if rows aren't tabular objects.
|
|
653
|
+
*/
|
|
654
|
+
formatCypherAsMarkdown(result) {
|
|
655
|
+
if (!Array.isArray(result) || result.length === 0)
|
|
656
|
+
return result;
|
|
657
|
+
const normalizedRows = result.map((row) => {
|
|
658
|
+
if (!row || typeof row !== 'object')
|
|
659
|
+
return row;
|
|
660
|
+
const next = { ...row };
|
|
661
|
+
if (Object.prototype.hasOwnProperty.call(next, 'type')) {
|
|
662
|
+
const typeValue = next.type;
|
|
663
|
+
if (typeValue === null || typeValue === undefined || (typeof typeValue === 'string' && typeValue.trim() === '')) {
|
|
664
|
+
const inferred = inferTypeFromId(next.id ?? next.uid ?? next.sourceId ?? next.targetId);
|
|
665
|
+
if (inferred)
|
|
666
|
+
next.type = inferred;
|
|
667
|
+
}
|
|
668
|
+
}
|
|
669
|
+
if (Object.prototype.hasOwnProperty.call(next, 'kind')) {
|
|
670
|
+
const kindValue = next.kind;
|
|
671
|
+
if (kindValue === null || kindValue === undefined || (typeof kindValue === 'string' && kindValue.trim() === '')) {
|
|
672
|
+
const inferred = inferTypeFromId(next.id ?? next.uid ?? next.sourceId ?? next.targetId);
|
|
673
|
+
if (inferred)
|
|
674
|
+
next.kind = inferred;
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
return next;
|
|
678
|
+
});
|
|
679
|
+
const firstRow = normalizedRows[0];
|
|
680
|
+
if (typeof firstRow !== 'object' || firstRow === null)
|
|
681
|
+
return result;
|
|
682
|
+
const keys = Object.keys(firstRow);
|
|
683
|
+
if (keys.length === 0)
|
|
684
|
+
return result;
|
|
685
|
+
const header = '| ' + keys.join(' | ') + ' |';
|
|
686
|
+
const separator = '| ' + keys.map(() => '---').join(' | ') + ' |';
|
|
687
|
+
const dataRows = normalizedRows.map((row) => '| ' + keys.map(k => {
|
|
688
|
+
const v = row[k];
|
|
689
|
+
if (v === null || v === undefined)
|
|
690
|
+
return '';
|
|
691
|
+
if (typeof v === 'object')
|
|
692
|
+
return JSON.stringify(v);
|
|
693
|
+
return String(v);
|
|
694
|
+
}).join(' | ') + ' |');
|
|
695
|
+
return {
|
|
696
|
+
markdown: [header, separator, ...dataRows].join('\n'),
|
|
697
|
+
row_count: normalizedRows.length,
|
|
698
|
+
};
|
|
699
|
+
}
|
|
700
|
+
/**
|
|
701
|
+
* Aggregate same-named clusters: group by heuristicLabel, sum symbols,
|
|
702
|
+
* weighted-average cohesion, filter out tiny clusters (<5 symbols).
|
|
703
|
+
* Raw communities stay intact in KuzuDB for Cypher queries.
|
|
704
|
+
*/
|
|
705
|
+
aggregateClusters(clusters) {
|
|
706
|
+
const groups = new Map();
|
|
707
|
+
for (const c of clusters) {
|
|
708
|
+
const label = c.heuristicLabel || c.label || 'Unknown';
|
|
709
|
+
const symbols = c.symbolCount || 0;
|
|
710
|
+
const cohesion = c.cohesion || 0;
|
|
711
|
+
const existing = groups.get(label);
|
|
712
|
+
if (!existing) {
|
|
713
|
+
groups.set(label, { ids: [c.id], totalSymbols: symbols, weightedCohesion: cohesion * symbols, largest: c });
|
|
714
|
+
}
|
|
715
|
+
else {
|
|
716
|
+
existing.ids.push(c.id);
|
|
717
|
+
existing.totalSymbols += symbols;
|
|
718
|
+
existing.weightedCohesion += cohesion * symbols;
|
|
719
|
+
if (symbols > (existing.largest.symbolCount || 0)) {
|
|
720
|
+
existing.largest = c;
|
|
721
|
+
}
|
|
722
|
+
}
|
|
723
|
+
}
|
|
724
|
+
return Array.from(groups.entries())
|
|
725
|
+
.map(([label, g]) => ({
|
|
726
|
+
id: g.largest.id,
|
|
727
|
+
label,
|
|
728
|
+
heuristicLabel: label,
|
|
729
|
+
symbolCount: g.totalSymbols,
|
|
730
|
+
cohesion: g.totalSymbols > 0 ? g.weightedCohesion / g.totalSymbols : 0,
|
|
731
|
+
subCommunities: g.ids.length,
|
|
732
|
+
}))
|
|
733
|
+
.filter(c => c.symbolCount >= 5)
|
|
734
|
+
.sort((a, b) => b.symbolCount - a.symbolCount);
|
|
735
|
+
}
|
|
736
|
+
async overview(repo, params) {
|
|
737
|
+
await this.ensureInitialized(repo.id);
|
|
738
|
+
const limit = params.limit || 20;
|
|
739
|
+
const result = {
|
|
740
|
+
repo: repo.name,
|
|
741
|
+
repoPath: repo.repoPath,
|
|
742
|
+
stats: repo.stats,
|
|
743
|
+
indexedAt: repo.indexedAt,
|
|
744
|
+
lastCommit: repo.lastCommit,
|
|
745
|
+
};
|
|
746
|
+
if (params.showClusters !== false) {
|
|
747
|
+
try {
|
|
748
|
+
// Fetch more raw communities than the display limit so aggregation has enough data
|
|
749
|
+
const rawLimit = Math.max(limit * 5, 200);
|
|
750
|
+
const clusters = await executeQuery(repo.id, `
|
|
751
|
+
MATCH (c:Community)
|
|
752
|
+
RETURN c.id AS id, c.label AS label, c.heuristicLabel AS heuristicLabel, c.cohesion AS cohesion, c.symbolCount AS symbolCount
|
|
753
|
+
ORDER BY c.symbolCount DESC
|
|
754
|
+
LIMIT ${rawLimit}
|
|
755
|
+
`);
|
|
756
|
+
const rawClusters = clusters.map((c) => ({
|
|
757
|
+
id: c.id || c[0],
|
|
758
|
+
label: c.label || c[1],
|
|
759
|
+
heuristicLabel: c.heuristicLabel || c[2],
|
|
760
|
+
cohesion: c.cohesion || c[3],
|
|
761
|
+
symbolCount: c.symbolCount || c[4],
|
|
762
|
+
}));
|
|
763
|
+
result.clusters = this.aggregateClusters(rawClusters).slice(0, limit);
|
|
764
|
+
}
|
|
765
|
+
catch {
|
|
766
|
+
result.clusters = [];
|
|
767
|
+
}
|
|
768
|
+
}
|
|
769
|
+
if (params.showProcesses !== false) {
|
|
770
|
+
try {
|
|
771
|
+
const processes = await executeQuery(repo.id, `
|
|
772
|
+
MATCH (p:Process)
|
|
773
|
+
RETURN p.id AS id, p.label AS label, p.heuristicLabel AS heuristicLabel, p.processType AS processType, p.stepCount AS stepCount
|
|
774
|
+
ORDER BY p.stepCount DESC
|
|
775
|
+
LIMIT ${limit}
|
|
776
|
+
`);
|
|
777
|
+
result.processes = processes.map((p) => ({
|
|
778
|
+
id: p.id || p[0],
|
|
779
|
+
label: p.label || p[1],
|
|
780
|
+
heuristicLabel: p.heuristicLabel || p[2],
|
|
781
|
+
processType: p.processType || p[3],
|
|
782
|
+
stepCount: p.stepCount || p[4],
|
|
783
|
+
}));
|
|
784
|
+
}
|
|
785
|
+
catch {
|
|
786
|
+
result.processes = [];
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
return result;
|
|
790
|
+
}
|
|
791
|
+
/**
|
|
792
|
+
* Context tool — 360-degree symbol view with categorized refs.
|
|
793
|
+
* Disambiguation when multiple symbols share a name.
|
|
794
|
+
* UID-based direct lookup. No cluster in output.
|
|
795
|
+
*/
|
|
796
|
+
async context(repo, params) {
|
|
797
|
+
await this.ensureInitialized(repo.id);
|
|
798
|
+
const { name, uid, file_path, include_content } = params;
|
|
799
|
+
if (!name && !uid) {
|
|
800
|
+
return { error: 'Either "name" or "uid" parameter is required.' };
|
|
801
|
+
}
|
|
802
|
+
// Step 1: Find the symbol
|
|
803
|
+
let symbols;
|
|
804
|
+
if (uid) {
|
|
805
|
+
const escaped = uid.replace(/'/g, "''");
|
|
806
|
+
symbols = await executeQuery(repo.id, `
|
|
807
|
+
MATCH (n {id: '${escaped}'})
|
|
808
|
+
RETURN n.id AS id, n.name AS name, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine${include_content ? ', n.content AS content' : ''}
|
|
809
|
+
LIMIT 1
|
|
810
|
+
`);
|
|
811
|
+
}
|
|
812
|
+
else {
|
|
813
|
+
const escaped = name.replace(/'/g, "''");
|
|
814
|
+
const isQualified = name.includes('/') || name.includes(':');
|
|
815
|
+
let whereClause;
|
|
816
|
+
if (file_path) {
|
|
817
|
+
const fpEscaped = file_path.replace(/'/g, "''");
|
|
818
|
+
whereClause = `WHERE n.name = '${escaped}' AND n.filePath CONTAINS '${fpEscaped}'`;
|
|
819
|
+
}
|
|
820
|
+
else if (isQualified) {
|
|
821
|
+
whereClause = `WHERE n.id = '${escaped}' OR n.name = '${escaped}'`;
|
|
822
|
+
}
|
|
823
|
+
else {
|
|
824
|
+
whereClause = `WHERE n.name = '${escaped}'`;
|
|
825
|
+
}
|
|
826
|
+
symbols = await executeQuery(repo.id, `
|
|
827
|
+
MATCH (n) ${whereClause}
|
|
828
|
+
RETURN n.id AS id, n.name AS name, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine${include_content ? ', n.content AS content' : ''}
|
|
829
|
+
LIMIT 10
|
|
830
|
+
`);
|
|
831
|
+
}
|
|
832
|
+
if (symbols.length === 0) {
|
|
833
|
+
return { error: `Symbol '${name || uid}' not found` };
|
|
834
|
+
}
|
|
835
|
+
// Step 2: Disambiguation
|
|
836
|
+
if (symbols.length > 1 && !uid) {
|
|
837
|
+
return {
|
|
838
|
+
status: 'ambiguous',
|
|
839
|
+
message: `Found ${symbols.length} symbols matching '${name}'. Use uid or file_path to disambiguate.`,
|
|
840
|
+
candidates: symbols.map((s) => ({
|
|
841
|
+
uid: getRowValue(s, 'id', 0),
|
|
842
|
+
name: getRowValue(s, 'name', 1),
|
|
843
|
+
kind: inferTypeFromId(getRowValue(s, 'id', 0)) || 'CodeElement',
|
|
844
|
+
filePath: getRowValue(s, 'filePath', 2),
|
|
845
|
+
line: getRowValue(s, 'startLine', 3),
|
|
846
|
+
})),
|
|
847
|
+
};
|
|
848
|
+
}
|
|
849
|
+
// Step 3: Build full context
|
|
850
|
+
const sym = symbols[0];
|
|
851
|
+
const symNodeId = getRowValue(sym, 'id', 0) || '';
|
|
852
|
+
const symId = symNodeId.replace(/'/g, "''");
|
|
853
|
+
const symKind = inferTypeFromId(symNodeId) || 'CodeElement';
|
|
854
|
+
const symFilePath = getRowValue(sym, 'filePath', 2) || '';
|
|
855
|
+
// Categorized incoming refs
|
|
856
|
+
let incomingRows = await executeQuery(repo.id, `
|
|
857
|
+
MATCH (caller)-[r:CodeRelation]->(n {id: '${symId}'})
|
|
858
|
+
WHERE r.type IN ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS']
|
|
859
|
+
RETURN r.type AS relType, caller.id AS uid, caller.name AS name, caller.filePath AS filePath
|
|
860
|
+
LIMIT 30
|
|
861
|
+
`);
|
|
862
|
+
// Categorized outgoing refs
|
|
863
|
+
let outgoingRows = await executeQuery(repo.id, `
|
|
864
|
+
MATCH (n {id: '${symId}'})-[r:CodeRelation]->(target)
|
|
865
|
+
WHERE r.type IN ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS']
|
|
866
|
+
RETURN r.type AS relType, target.id AS uid, target.name AS name, target.filePath AS filePath
|
|
867
|
+
LIMIT 30
|
|
868
|
+
`);
|
|
869
|
+
// Process participation
|
|
870
|
+
let processRows = [];
|
|
871
|
+
try {
|
|
872
|
+
processRows = await executeQuery(repo.id, `
|
|
873
|
+
MATCH (n {id: '${symId}'})-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process)
|
|
874
|
+
RETURN p.id AS pid, p.heuristicLabel AS label, r.step AS step, p.stepCount AS stepCount
|
|
875
|
+
`);
|
|
876
|
+
}
|
|
877
|
+
catch { /* no process info */ }
|
|
878
|
+
// Classes/interfaces are often represented by method/property level edges.
|
|
879
|
+
// When direct edges are empty, fall back to file-scoped references/processes.
|
|
880
|
+
if ((symKind === 'Class' || symKind === 'Interface') && symFilePath) {
|
|
881
|
+
const escapedPath = symFilePath.replace(/'/g, "''");
|
|
882
|
+
if (incomingRows.length === 0) {
|
|
883
|
+
try {
|
|
884
|
+
incomingRows = await executeQuery(repo.id, `
|
|
885
|
+
MATCH (caller)-[r:CodeRelation]->(n)
|
|
886
|
+
WHERE n.filePath = '${escapedPath}' AND r.type IN ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS']
|
|
887
|
+
RETURN r.type AS relType, caller.id AS uid, caller.name AS name, caller.filePath AS filePath
|
|
888
|
+
LIMIT 30
|
|
889
|
+
`);
|
|
890
|
+
}
|
|
891
|
+
catch { /* ignore fallback failures */ }
|
|
892
|
+
}
|
|
893
|
+
if (outgoingRows.length === 0) {
|
|
894
|
+
try {
|
|
895
|
+
outgoingRows = await executeQuery(repo.id, `
|
|
896
|
+
MATCH (n)-[r:CodeRelation]->(target)
|
|
897
|
+
WHERE n.filePath = '${escapedPath}' AND r.type IN ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS']
|
|
898
|
+
RETURN r.type AS relType, target.id AS uid, target.name AS name, target.filePath AS filePath
|
|
899
|
+
LIMIT 30
|
|
900
|
+
`);
|
|
901
|
+
}
|
|
902
|
+
catch { /* ignore fallback failures */ }
|
|
903
|
+
}
|
|
904
|
+
if (processRows.length === 0) {
|
|
905
|
+
try {
|
|
906
|
+
const scopedProcessRows = await executeQuery(repo.id, `
|
|
907
|
+
MATCH (n)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process)
|
|
908
|
+
WHERE n.filePath = '${escapedPath}'
|
|
909
|
+
RETURN p.id AS pid, p.heuristicLabel AS label, r.step AS step, p.stepCount AS stepCount
|
|
910
|
+
LIMIT 200
|
|
911
|
+
`);
|
|
912
|
+
const minStepByProcess = new Map();
|
|
913
|
+
for (const row of scopedProcessRows) {
|
|
914
|
+
const pid = getRowValue(row, 'pid', 0);
|
|
915
|
+
if (!pid)
|
|
916
|
+
continue;
|
|
917
|
+
const step = Number(getRowValue(row, 'step', 2) ?? Number.MAX_SAFE_INTEGER);
|
|
918
|
+
const existing = minStepByProcess.get(pid);
|
|
919
|
+
if (!existing || step < existing.step) {
|
|
920
|
+
minStepByProcess.set(pid, {
|
|
921
|
+
pid,
|
|
922
|
+
label: getRowValue(row, 'label', 1),
|
|
923
|
+
step,
|
|
924
|
+
stepCount: getRowValue(row, 'stepCount', 3),
|
|
925
|
+
});
|
|
926
|
+
}
|
|
927
|
+
}
|
|
928
|
+
processRows = Array.from(minStepByProcess.values());
|
|
929
|
+
}
|
|
930
|
+
catch { /* ignore fallback failures */ }
|
|
931
|
+
}
|
|
932
|
+
}
|
|
933
|
+
// Helper to categorize refs
|
|
934
|
+
const categorize = (rows) => {
|
|
935
|
+
const cats = {};
|
|
936
|
+
for (const row of rows) {
|
|
937
|
+
const relType = String(getRowValue(row, 'relType', 0) || '').toLowerCase();
|
|
938
|
+
const uid = getRowValue(row, 'uid', 1);
|
|
939
|
+
const entry = {
|
|
940
|
+
uid,
|
|
941
|
+
name: getRowValue(row, 'name', 2),
|
|
942
|
+
filePath: getRowValue(row, 'filePath', 3),
|
|
943
|
+
kind: getRowValue(row, 'kind', 4) || inferTypeFromId(uid) || 'CodeElement',
|
|
944
|
+
};
|
|
945
|
+
if (!cats[relType])
|
|
946
|
+
cats[relType] = [];
|
|
947
|
+
cats[relType].push(entry);
|
|
948
|
+
}
|
|
949
|
+
return cats;
|
|
950
|
+
};
|
|
951
|
+
return {
|
|
952
|
+
status: 'found',
|
|
953
|
+
symbol: {
|
|
954
|
+
uid: getRowValue(sym, 'id', 0),
|
|
955
|
+
name: getRowValue(sym, 'name', 1),
|
|
956
|
+
kind: symKind,
|
|
957
|
+
filePath: symFilePath,
|
|
958
|
+
startLine: getRowValue(sym, 'startLine', 3),
|
|
959
|
+
endLine: getRowValue(sym, 'endLine', 4),
|
|
960
|
+
...(include_content && getRowValue(sym, 'content', 5) ? { content: getRowValue(sym, 'content', 5) } : {}),
|
|
961
|
+
},
|
|
962
|
+
incoming: categorize(incomingRows),
|
|
963
|
+
outgoing: categorize(outgoingRows),
|
|
964
|
+
processes: processRows.map((r) => ({
|
|
965
|
+
id: getRowValue(r, 'pid', 0),
|
|
966
|
+
name: getRowValue(r, 'label', 1),
|
|
967
|
+
step_index: getRowValue(r, 'step', 2),
|
|
968
|
+
step_count: getRowValue(r, 'stepCount', 3),
|
|
969
|
+
})),
|
|
970
|
+
};
|
|
971
|
+
}
|
|
972
|
+
/**
|
|
973
|
+
* Legacy explore — kept for backwards compatibility with resources.ts.
|
|
974
|
+
* Routes cluster/process types to direct graph queries.
|
|
975
|
+
*/
|
|
976
|
+
async explore(repo, params) {
|
|
977
|
+
await this.ensureInitialized(repo.id);
|
|
978
|
+
const { name, type } = params;
|
|
979
|
+
if (type === 'symbol') {
|
|
980
|
+
return this.context(repo, { name });
|
|
981
|
+
}
|
|
982
|
+
if (type === 'cluster') {
|
|
983
|
+
const escaped = name.replace(/'/g, "''");
|
|
984
|
+
const clusterQuery = `
|
|
985
|
+
MATCH (c:Community)
|
|
986
|
+
WHERE c.label = '${escaped}' OR c.heuristicLabel = '${escaped}'
|
|
987
|
+
RETURN c.id AS id, c.label AS label, c.heuristicLabel AS heuristicLabel, c.cohesion AS cohesion, c.symbolCount AS symbolCount
|
|
988
|
+
`;
|
|
989
|
+
const clusters = await executeQuery(repo.id, clusterQuery);
|
|
990
|
+
if (clusters.length === 0)
|
|
991
|
+
return { error: `Cluster '${name}' not found` };
|
|
992
|
+
const rawClusters = clusters.map((c) => ({
|
|
993
|
+
id: c.id || c[0], label: c.label || c[1], heuristicLabel: c.heuristicLabel || c[2],
|
|
994
|
+
cohesion: c.cohesion || c[3], symbolCount: c.symbolCount || c[4],
|
|
995
|
+
}));
|
|
996
|
+
let totalSymbols = 0, weightedCohesion = 0;
|
|
997
|
+
for (const c of rawClusters) {
|
|
998
|
+
const s = c.symbolCount || 0;
|
|
999
|
+
totalSymbols += s;
|
|
1000
|
+
weightedCohesion += (c.cohesion || 0) * s;
|
|
1001
|
+
}
|
|
1002
|
+
const members = await executeQuery(repo.id, `
|
|
1003
|
+
MATCH (n)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
1004
|
+
WHERE c.label = '${escaped}' OR c.heuristicLabel = '${escaped}'
|
|
1005
|
+
RETURN DISTINCT n.id AS id, n.name AS name, n.filePath AS filePath
|
|
1006
|
+
LIMIT 30
|
|
1007
|
+
`);
|
|
1008
|
+
return {
|
|
1009
|
+
cluster: {
|
|
1010
|
+
id: rawClusters[0].id,
|
|
1011
|
+
label: rawClusters[0].heuristicLabel || rawClusters[0].label,
|
|
1012
|
+
heuristicLabel: rawClusters[0].heuristicLabel || rawClusters[0].label,
|
|
1013
|
+
cohesion: totalSymbols > 0 ? weightedCohesion / totalSymbols : 0,
|
|
1014
|
+
symbolCount: totalSymbols,
|
|
1015
|
+
subCommunities: rawClusters.length,
|
|
1016
|
+
},
|
|
1017
|
+
members: members.map((m) => ({
|
|
1018
|
+
name: getRowValue(m, 'name', 1),
|
|
1019
|
+
type: inferTypeFromId(getRowValue(m, 'id', 0)) || 'CodeElement',
|
|
1020
|
+
filePath: getRowValue(m, 'filePath', 2),
|
|
1021
|
+
})),
|
|
1022
|
+
};
|
|
1023
|
+
}
|
|
1024
|
+
if (type === 'process') {
|
|
1025
|
+
const processes = await executeQuery(repo.id, `
|
|
1026
|
+
MATCH (p:Process)
|
|
1027
|
+
WHERE p.label = '${name.replace(/'/g, "''")}' OR p.heuristicLabel = '${name.replace(/'/g, "''")}'
|
|
1028
|
+
RETURN p.id AS id, p.label AS label, p.heuristicLabel AS heuristicLabel, p.processType AS processType, p.stepCount AS stepCount
|
|
1029
|
+
LIMIT 1
|
|
1030
|
+
`);
|
|
1031
|
+
if (processes.length === 0)
|
|
1032
|
+
return { error: `Process '${name}' not found` };
|
|
1033
|
+
const proc = processes[0];
|
|
1034
|
+
const procId = proc.id || proc[0];
|
|
1035
|
+
const steps = await executeQuery(repo.id, `
|
|
1036
|
+
MATCH (n)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p {id: '${procId}'})
|
|
1037
|
+
RETURN n.id AS id, n.name AS name, n.filePath AS filePath, r.step AS step
|
|
1038
|
+
ORDER BY r.step
|
|
1039
|
+
`);
|
|
1040
|
+
return {
|
|
1041
|
+
process: {
|
|
1042
|
+
id: procId, label: proc.label || proc[1], heuristicLabel: proc.heuristicLabel || proc[2],
|
|
1043
|
+
processType: proc.processType || proc[3], stepCount: proc.stepCount || proc[4],
|
|
1044
|
+
},
|
|
1045
|
+
steps: steps.map((s) => ({
|
|
1046
|
+
step: getRowValue(s, 'step', 3),
|
|
1047
|
+
name: getRowValue(s, 'name', 1),
|
|
1048
|
+
type: inferTypeFromId(getRowValue(s, 'id', 0)) || 'CodeElement',
|
|
1049
|
+
filePath: getRowValue(s, 'filePath', 2),
|
|
1050
|
+
})),
|
|
1051
|
+
};
|
|
1052
|
+
}
|
|
1053
|
+
return { error: 'Invalid type. Use: symbol, cluster, or process' };
|
|
1054
|
+
}
|
|
1055
|
+
/**
|
|
1056
|
+
* Detect changes — git-diff based impact analysis.
|
|
1057
|
+
* Maps changed lines to indexed symbols, then finds affected processes.
|
|
1058
|
+
*/
|
|
1059
|
+
async detectChanges(repo, params) {
|
|
1060
|
+
await this.ensureInitialized(repo.id);
|
|
1061
|
+
const scope = params.scope || 'unstaged';
|
|
1062
|
+
const { execSync } = await import('child_process');
|
|
1063
|
+
// Build git diff command based on scope
|
|
1064
|
+
let diffCmd;
|
|
1065
|
+
switch (scope) {
|
|
1066
|
+
case 'staged':
|
|
1067
|
+
diffCmd = 'git diff --staged --name-only';
|
|
1068
|
+
break;
|
|
1069
|
+
case 'all':
|
|
1070
|
+
diffCmd = 'git diff HEAD --name-only';
|
|
1071
|
+
break;
|
|
1072
|
+
case 'compare':
|
|
1073
|
+
if (!params.base_ref)
|
|
1074
|
+
return { error: 'base_ref is required for "compare" scope' };
|
|
1075
|
+
diffCmd = `git diff ${params.base_ref} --name-only`;
|
|
1076
|
+
break;
|
|
1077
|
+
case 'unstaged':
|
|
1078
|
+
default:
|
|
1079
|
+
diffCmd = 'git diff --name-only';
|
|
1080
|
+
break;
|
|
1081
|
+
}
|
|
1082
|
+
let changedFiles;
|
|
1083
|
+
try {
|
|
1084
|
+
const output = execSync(diffCmd, { cwd: repo.repoPath, encoding: 'utf-8' });
|
|
1085
|
+
changedFiles = output.trim().split('\n').filter(f => f.length > 0);
|
|
1086
|
+
}
|
|
1087
|
+
catch (err) {
|
|
1088
|
+
return { error: `Git diff failed: ${err.message}` };
|
|
1089
|
+
}
|
|
1090
|
+
if (changedFiles.length === 0) {
|
|
1091
|
+
return {
|
|
1092
|
+
summary: { changed_count: 0, affected_count: 0, risk_level: 'none', message: 'No changes detected.' },
|
|
1093
|
+
changed_symbols: [],
|
|
1094
|
+
affected_processes: [],
|
|
1095
|
+
};
|
|
1096
|
+
}
|
|
1097
|
+
// Map changed files to indexed symbols
|
|
1098
|
+
const changedSymbols = [];
|
|
1099
|
+
for (const file of changedFiles) {
|
|
1100
|
+
const escaped = file.replace(/\\/g, '/').replace(/'/g, "''");
|
|
1101
|
+
try {
|
|
1102
|
+
const symbols = await executeQuery(repo.id, `
|
|
1103
|
+
MATCH (n) WHERE n.filePath CONTAINS '${escaped}'
|
|
1104
|
+
RETURN n.id AS id, n.name AS name, n.filePath AS filePath
|
|
1105
|
+
LIMIT 20
|
|
1106
|
+
`);
|
|
1107
|
+
for (const sym of symbols) {
|
|
1108
|
+
const id = getRowValue(sym, 'id', 0) || '';
|
|
1109
|
+
changedSymbols.push({
|
|
1110
|
+
id,
|
|
1111
|
+
name: getRowValue(sym, 'name', 1),
|
|
1112
|
+
type: inferTypeFromId(id) || 'CodeElement',
|
|
1113
|
+
filePath: getRowValue(sym, 'filePath', 2),
|
|
1114
|
+
change_type: 'Modified',
|
|
1115
|
+
});
|
|
1116
|
+
}
|
|
1117
|
+
}
|
|
1118
|
+
catch { /* skip */ }
|
|
1119
|
+
}
|
|
1120
|
+
// Find affected processes
|
|
1121
|
+
const affectedProcesses = new Map();
|
|
1122
|
+
for (const sym of changedSymbols) {
|
|
1123
|
+
const escaped = sym.id.replace(/'/g, "''");
|
|
1124
|
+
try {
|
|
1125
|
+
const procs = await executeQuery(repo.id, `
|
|
1126
|
+
MATCH (n {id: '${escaped}'})-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process)
|
|
1127
|
+
RETURN p.id AS pid, p.heuristicLabel AS label, p.processType AS processType, p.stepCount AS stepCount, r.step AS step
|
|
1128
|
+
`);
|
|
1129
|
+
for (const proc of procs) {
|
|
1130
|
+
const pid = proc.pid || proc[0];
|
|
1131
|
+
if (!affectedProcesses.has(pid)) {
|
|
1132
|
+
affectedProcesses.set(pid, {
|
|
1133
|
+
id: pid,
|
|
1134
|
+
name: proc.label || proc[1],
|
|
1135
|
+
process_type: proc.processType || proc[2],
|
|
1136
|
+
step_count: proc.stepCount || proc[3],
|
|
1137
|
+
changed_steps: [],
|
|
1138
|
+
});
|
|
1139
|
+
}
|
|
1140
|
+
affectedProcesses.get(pid).changed_steps.push({
|
|
1141
|
+
symbol: sym.name,
|
|
1142
|
+
step: proc.step || proc[4],
|
|
1143
|
+
});
|
|
1144
|
+
}
|
|
1145
|
+
}
|
|
1146
|
+
catch { /* skip */ }
|
|
1147
|
+
}
|
|
1148
|
+
const processCount = affectedProcesses.size;
|
|
1149
|
+
const risk = processCount === 0 ? 'low' : processCount <= 5 ? 'medium' : processCount <= 15 ? 'high' : 'critical';
|
|
1150
|
+
return {
|
|
1151
|
+
summary: {
|
|
1152
|
+
changed_count: changedSymbols.length,
|
|
1153
|
+
affected_count: processCount,
|
|
1154
|
+
changed_files: changedFiles.length,
|
|
1155
|
+
risk_level: risk,
|
|
1156
|
+
},
|
|
1157
|
+
changed_symbols: changedSymbols,
|
|
1158
|
+
affected_processes: Array.from(affectedProcesses.values()),
|
|
1159
|
+
};
|
|
1160
|
+
}
|
|
1161
|
+
/**
|
|
1162
|
+
* Rename tool — multi-file coordinated rename using graph + text search.
|
|
1163
|
+
* Graph refs are tagged "graph" (high confidence).
|
|
1164
|
+
* Additional refs found via text search are tagged "text_search" (lower confidence).
|
|
1165
|
+
*/
|
|
1166
|
+
async rename(repo, params) {
|
|
1167
|
+
await this.ensureInitialized(repo.id);
|
|
1168
|
+
const { new_name, file_path } = params;
|
|
1169
|
+
const dry_run = params.dry_run ?? true;
|
|
1170
|
+
if (!params.symbol_name && !params.symbol_uid) {
|
|
1171
|
+
return { error: 'Either symbol_name or symbol_uid is required.' };
|
|
1172
|
+
}
|
|
1173
|
+
// Step 1: Find the target symbol (reuse context's lookup)
|
|
1174
|
+
const lookupResult = await this.context(repo, {
|
|
1175
|
+
name: params.symbol_name,
|
|
1176
|
+
uid: params.symbol_uid,
|
|
1177
|
+
file_path,
|
|
1178
|
+
});
|
|
1179
|
+
if (lookupResult.status === 'ambiguous') {
|
|
1180
|
+
return lookupResult; // pass disambiguation through
|
|
1181
|
+
}
|
|
1182
|
+
if (lookupResult.error) {
|
|
1183
|
+
return lookupResult;
|
|
1184
|
+
}
|
|
1185
|
+
const sym = lookupResult.symbol;
|
|
1186
|
+
const oldName = sym.name;
|
|
1187
|
+
if (oldName === new_name) {
|
|
1188
|
+
return { error: 'New name is the same as the current name.' };
|
|
1189
|
+
}
|
|
1190
|
+
// Step 2: Collect edits from graph (high confidence)
|
|
1191
|
+
const changes = new Map();
|
|
1192
|
+
const addEdit = (filePath, line, oldText, newText, confidence) => {
|
|
1193
|
+
if (!changes.has(filePath)) {
|
|
1194
|
+
changes.set(filePath, { file_path: filePath, edits: [] });
|
|
1195
|
+
}
|
|
1196
|
+
changes.get(filePath).edits.push({ line, old_text: oldText, new_text: newText, confidence });
|
|
1197
|
+
};
|
|
1198
|
+
// The definition itself
|
|
1199
|
+
if (sym.filePath && sym.startLine) {
|
|
1200
|
+
try {
|
|
1201
|
+
const content = await fs.readFile(path.join(repo.repoPath, sym.filePath), 'utf-8');
|
|
1202
|
+
const lines = content.split('\n');
|
|
1203
|
+
const lineIdx = sym.startLine - 1;
|
|
1204
|
+
if (lineIdx >= 0 && lineIdx < lines.length && lines[lineIdx].includes(oldName)) {
|
|
1205
|
+
addEdit(sym.filePath, sym.startLine, lines[lineIdx].trim(), lines[lineIdx].replace(oldName, new_name).trim(), 'graph');
|
|
1206
|
+
}
|
|
1207
|
+
}
|
|
1208
|
+
catch { /* skip */ }
|
|
1209
|
+
}
|
|
1210
|
+
// All incoming refs from graph (callers, importers, etc.)
|
|
1211
|
+
const allIncoming = [
|
|
1212
|
+
...(lookupResult.incoming.calls || []),
|
|
1213
|
+
...(lookupResult.incoming.imports || []),
|
|
1214
|
+
...(lookupResult.incoming.extends || []),
|
|
1215
|
+
...(lookupResult.incoming.implements || []),
|
|
1216
|
+
];
|
|
1217
|
+
let graphEdits = changes.size > 0 ? 1 : 0; // count definition edit
|
|
1218
|
+
for (const ref of allIncoming) {
|
|
1219
|
+
if (!ref.filePath)
|
|
1220
|
+
continue;
|
|
1221
|
+
try {
|
|
1222
|
+
const content = await fs.readFile(path.join(repo.repoPath, ref.filePath), 'utf-8');
|
|
1223
|
+
const lines = content.split('\n');
|
|
1224
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1225
|
+
if (lines[i].includes(oldName)) {
|
|
1226
|
+
addEdit(ref.filePath, i + 1, lines[i].trim(), lines[i].replace(new RegExp(`\\b${oldName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\b`, 'g'), new_name).trim(), 'graph');
|
|
1227
|
+
graphEdits++;
|
|
1228
|
+
break; // one edit per file from graph refs
|
|
1229
|
+
}
|
|
1230
|
+
}
|
|
1231
|
+
}
|
|
1232
|
+
catch { /* skip */ }
|
|
1233
|
+
}
|
|
1234
|
+
// Step 3: Text search for refs the graph might have missed
|
|
1235
|
+
let astSearchEdits = 0;
|
|
1236
|
+
const graphFiles = new Set([sym.filePath, ...allIncoming.map(r => r.filePath)].filter(Boolean));
|
|
1237
|
+
// Simple text search across the repo for the old name (in files not already covered by graph)
|
|
1238
|
+
try {
|
|
1239
|
+
const { execSync } = await import('child_process');
|
|
1240
|
+
const rgCmd = `rg -l --type-add "code:*.{ts,tsx,js,jsx,py,go,rs,java}" -t code "\\b${oldName}\\b" .`;
|
|
1241
|
+
const output = execSync(rgCmd, { cwd: repo.repoPath, encoding: 'utf-8', timeout: 5000 });
|
|
1242
|
+
const files = output.trim().split('\n').filter(f => f.length > 0);
|
|
1243
|
+
for (const file of files) {
|
|
1244
|
+
const normalizedFile = file.replace(/\\/g, '/').replace(/^\.\//, '');
|
|
1245
|
+
if (graphFiles.has(normalizedFile))
|
|
1246
|
+
continue; // already covered by graph
|
|
1247
|
+
try {
|
|
1248
|
+
const content = await fs.readFile(path.join(repo.repoPath, normalizedFile), 'utf-8');
|
|
1249
|
+
const lines = content.split('\n');
|
|
1250
|
+
const regex = new RegExp(`\\b${oldName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\b`, 'g');
|
|
1251
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1252
|
+
if (regex.test(lines[i])) {
|
|
1253
|
+
addEdit(normalizedFile, i + 1, lines[i].trim(), lines[i].replace(regex, new_name).trim(), 'text_search');
|
|
1254
|
+
astSearchEdits++;
|
|
1255
|
+
regex.lastIndex = 0; // reset regex
|
|
1256
|
+
}
|
|
1257
|
+
}
|
|
1258
|
+
}
|
|
1259
|
+
catch { /* skip */ }
|
|
1260
|
+
}
|
|
1261
|
+
}
|
|
1262
|
+
catch { /* rg not available or no additional matches */ }
|
|
1263
|
+
// Step 4: Apply or preview
|
|
1264
|
+
const allChanges = Array.from(changes.values());
|
|
1265
|
+
const totalEdits = allChanges.reduce((sum, c) => sum + c.edits.length, 0);
|
|
1266
|
+
if (!dry_run) {
|
|
1267
|
+
// Apply edits to files
|
|
1268
|
+
for (const change of allChanges) {
|
|
1269
|
+
try {
|
|
1270
|
+
const fullPath = path.join(repo.repoPath, change.file_path);
|
|
1271
|
+
let content = await fs.readFile(fullPath, 'utf-8');
|
|
1272
|
+
const regex = new RegExp(`\\b${oldName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\b`, 'g');
|
|
1273
|
+
content = content.replace(regex, new_name);
|
|
1274
|
+
await fs.writeFile(fullPath, content, 'utf-8');
|
|
1275
|
+
}
|
|
1276
|
+
catch { /* skip failed files */ }
|
|
1277
|
+
}
|
|
1278
|
+
}
|
|
1279
|
+
return {
|
|
1280
|
+
status: 'success',
|
|
1281
|
+
old_name: oldName,
|
|
1282
|
+
new_name,
|
|
1283
|
+
files_affected: allChanges.length,
|
|
1284
|
+
total_edits: totalEdits,
|
|
1285
|
+
graph_edits: graphEdits,
|
|
1286
|
+
text_search_edits: astSearchEdits,
|
|
1287
|
+
changes: allChanges,
|
|
1288
|
+
applied: !dry_run,
|
|
1289
|
+
};
|
|
1290
|
+
}
|
|
1291
|
+
async impact(repo, params) {
|
|
1292
|
+
await this.ensureInitialized(repo.id);
|
|
1293
|
+
const { target, direction } = params;
|
|
1294
|
+
const maxDepth = params.maxDepth ?? 3;
|
|
1295
|
+
const relationTypes = params.relationTypes && params.relationTypes.length > 0
|
|
1296
|
+
? params.relationTypes
|
|
1297
|
+
: ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS'];
|
|
1298
|
+
const includeTests = params.includeTests ?? false;
|
|
1299
|
+
const minConfidence = params.minConfidence ?? 0.3;
|
|
1300
|
+
const relTypeFilter = relationTypes.map(t => `'${t}'`).join(', ');
|
|
1301
|
+
const confidenceFilter = minConfidence > 0 ? ` AND r.confidence >= ${minConfidence}` : '';
|
|
1302
|
+
let targets = [];
|
|
1303
|
+
if (params.target_uid?.trim()) {
|
|
1304
|
+
const escapedUid = params.target_uid.trim().replace(/'/g, "''");
|
|
1305
|
+
targets = await executeQuery(repo.id, `
|
|
1306
|
+
MATCH (n {id: '${escapedUid}'})
|
|
1307
|
+
RETURN n.id AS id, n.name AS name, n.filePath AS filePath, n.startLine AS startLine
|
|
1308
|
+
LIMIT 1
|
|
1309
|
+
`);
|
|
1310
|
+
}
|
|
1311
|
+
else {
|
|
1312
|
+
const escapedTarget = target.replace(/'/g, "''");
|
|
1313
|
+
const fileFilter = params.file_path?.trim()
|
|
1314
|
+
? ` AND n.filePath CONTAINS '${params.file_path.trim().replace(/'/g, "''")}'`
|
|
1315
|
+
: '';
|
|
1316
|
+
targets = await executeQuery(repo.id, `
|
|
1317
|
+
MATCH (n)
|
|
1318
|
+
WHERE n.name = '${escapedTarget}'${fileFilter}
|
|
1319
|
+
RETURN n.id AS id, n.name AS name, n.filePath AS filePath, n.startLine AS startLine
|
|
1320
|
+
LIMIT 20
|
|
1321
|
+
`);
|
|
1322
|
+
}
|
|
1323
|
+
if (targets.length === 0)
|
|
1324
|
+
return { error: `Target '${target}' not found` };
|
|
1325
|
+
if (targets.length > 1 && !params.target_uid) {
|
|
1326
|
+
return {
|
|
1327
|
+
status: 'ambiguous',
|
|
1328
|
+
message: `Found ${targets.length} symbols matching '${target}'. Use target_uid or file_path to disambiguate.`,
|
|
1329
|
+
candidates: targets.map((s) => ({
|
|
1330
|
+
uid: getRowValue(s, 'id', 0),
|
|
1331
|
+
name: getRowValue(s, 'name', 1),
|
|
1332
|
+
kind: inferTypeFromId(getRowValue(s, 'id', 0)) || 'CodeElement',
|
|
1333
|
+
filePath: getRowValue(s, 'filePath', 2),
|
|
1334
|
+
line: getRowValue(s, 'startLine', 3),
|
|
1335
|
+
})),
|
|
1336
|
+
};
|
|
1337
|
+
}
|
|
1338
|
+
const sym = targets[0];
|
|
1339
|
+
const symId = getRowValue(sym, 'id', 0) || '';
|
|
1340
|
+
const symType = inferTypeFromId(symId) || 'CodeElement';
|
|
1341
|
+
let seedIds = [symId];
|
|
1342
|
+
// Class/interface references are frequently attached to member symbols.
|
|
1343
|
+
// Seed traversal with symbols from the same file to approximate class blast radius.
|
|
1344
|
+
if ((symType === 'Class' || symType === 'Interface')) {
|
|
1345
|
+
const targetFilePath = getRowValue(sym, 'filePath', 2);
|
|
1346
|
+
if (targetFilePath) {
|
|
1347
|
+
try {
|
|
1348
|
+
const escapedPath = targetFilePath.replace(/'/g, "''");
|
|
1349
|
+
const seedRows = await executeQuery(repo.id, `
|
|
1350
|
+
MATCH (n)
|
|
1351
|
+
WHERE n.filePath = '${escapedPath}'
|
|
1352
|
+
RETURN n.id AS id
|
|
1353
|
+
LIMIT 200
|
|
1354
|
+
`);
|
|
1355
|
+
const seedSet = new Set([symId]);
|
|
1356
|
+
for (const row of seedRows) {
|
|
1357
|
+
const id = getRowValue(row, 'id', 0);
|
|
1358
|
+
if (!id)
|
|
1359
|
+
continue;
|
|
1360
|
+
if (id.startsWith('File:') || id.startsWith('Folder:') || id.startsWith('Community:') || id.startsWith('Process:')) {
|
|
1361
|
+
continue;
|
|
1362
|
+
}
|
|
1363
|
+
seedSet.add(id);
|
|
1364
|
+
}
|
|
1365
|
+
seedIds = Array.from(seedSet);
|
|
1366
|
+
}
|
|
1367
|
+
catch {
|
|
1368
|
+
// fallback to class node only
|
|
1369
|
+
}
|
|
1370
|
+
}
|
|
1371
|
+
}
|
|
1372
|
+
const impacted = [];
|
|
1373
|
+
const visited = new Set(seedIds);
|
|
1374
|
+
let frontier = [...seedIds];
|
|
1375
|
+
for (let depth = 1; depth <= maxDepth && frontier.length > 0; depth++) {
|
|
1376
|
+
const nextFrontier = [];
|
|
1377
|
+
// Batch frontier nodes into a single Cypher query per depth level
|
|
1378
|
+
const idList = frontier.map(id => `'${id.replace(/'/g, "''")}'`).join(', ');
|
|
1379
|
+
const query = direction === 'upstream'
|
|
1380
|
+
? `MATCH (caller)-[r:CodeRelation]->(n) WHERE n.id IN [${idList}] AND r.type IN [${relTypeFilter}]${confidenceFilter} RETURN n.id AS sourceId, caller.id AS id, caller.name AS name, caller.filePath AS filePath, r.type AS relType, r.confidence AS confidence`
|
|
1381
|
+
: `MATCH (n)-[r:CodeRelation]->(callee) WHERE n.id IN [${idList}] AND r.type IN [${relTypeFilter}]${confidenceFilter} RETURN n.id AS sourceId, callee.id AS id, callee.name AS name, callee.filePath AS filePath, r.type AS relType, r.confidence AS confidence`;
|
|
1382
|
+
try {
|
|
1383
|
+
const related = await executeQuery(repo.id, query);
|
|
1384
|
+
for (const rel of related) {
|
|
1385
|
+
const relId = getRowValue(rel, 'id', 1) || '';
|
|
1386
|
+
const filePath = getRowValue(rel, 'filePath', 3) || '';
|
|
1387
|
+
const relNodeType = inferTypeFromId(relId) || 'CodeElement';
|
|
1388
|
+
if (!includeTests && isTestFilePath(filePath))
|
|
1389
|
+
continue;
|
|
1390
|
+
if (relNodeType === 'File' || relNodeType === 'Folder' || relNodeType === 'Community' || relNodeType === 'Process')
|
|
1391
|
+
continue;
|
|
1392
|
+
if (!visited.has(relId)) {
|
|
1393
|
+
visited.add(relId);
|
|
1394
|
+
nextFrontier.push(relId);
|
|
1395
|
+
impacted.push({
|
|
1396
|
+
depth,
|
|
1397
|
+
id: relId,
|
|
1398
|
+
name: getRowValue(rel, 'name', 2),
|
|
1399
|
+
type: relNodeType,
|
|
1400
|
+
filePath,
|
|
1401
|
+
relationType: getRowValue(rel, 'relType', 4),
|
|
1402
|
+
confidence: Number(getRowValue(rel, 'confidence', 5) ?? 1.0),
|
|
1403
|
+
});
|
|
1404
|
+
}
|
|
1405
|
+
}
|
|
1406
|
+
}
|
|
1407
|
+
catch { /* query failed for this depth level */ }
|
|
1408
|
+
frontier = nextFrontier;
|
|
1409
|
+
}
|
|
1410
|
+
const grouped = {};
|
|
1411
|
+
for (const item of impacted) {
|
|
1412
|
+
if (!grouped[item.depth])
|
|
1413
|
+
grouped[item.depth] = [];
|
|
1414
|
+
grouped[item.depth].push(item);
|
|
1415
|
+
}
|
|
1416
|
+
// ── Enrichment: affected processes, modules, risk ──────────────
|
|
1417
|
+
const directCount = (grouped[1] || []).length;
|
|
1418
|
+
let affectedProcesses = [];
|
|
1419
|
+
let affectedModules = [];
|
|
1420
|
+
if (impacted.length > 0) {
|
|
1421
|
+
const allIds = impacted.map(i => `'${i.id.replace(/'/g, "''")}'`).join(', ');
|
|
1422
|
+
const d1Ids = (grouped[1] || []).map((i) => `'${i.id.replace(/'/g, "''")}'`).join(', ');
|
|
1423
|
+
// Affected processes: which execution flows are broken and at which step
|
|
1424
|
+
const [processRows, moduleRows, directModuleRows] = await Promise.all([
|
|
1425
|
+
executeQuery(repo.id, `
|
|
1426
|
+
MATCH (s)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process)
|
|
1427
|
+
WHERE s.id IN [${allIds}]
|
|
1428
|
+
RETURN p.heuristicLabel AS name, COUNT(DISTINCT s.id) AS hits, MIN(r.step) AS minStep, p.stepCount AS stepCount
|
|
1429
|
+
ORDER BY hits DESC
|
|
1430
|
+
LIMIT 20
|
|
1431
|
+
`).catch(() => []),
|
|
1432
|
+
executeQuery(repo.id, `
|
|
1433
|
+
MATCH (s)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
1434
|
+
WHERE s.id IN [${allIds}]
|
|
1435
|
+
RETURN c.heuristicLabel AS name, COUNT(DISTINCT s.id) AS hits
|
|
1436
|
+
ORDER BY hits DESC
|
|
1437
|
+
LIMIT 20
|
|
1438
|
+
`).catch(() => []),
|
|
1439
|
+
d1Ids ? executeQuery(repo.id, `
|
|
1440
|
+
MATCH (s)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
1441
|
+
WHERE s.id IN [${d1Ids}]
|
|
1442
|
+
RETURN DISTINCT c.heuristicLabel AS name
|
|
1443
|
+
`).catch(() => []) : Promise.resolve([]),
|
|
1444
|
+
]);
|
|
1445
|
+
affectedProcesses = processRows.map((r) => ({
|
|
1446
|
+
name: r.name || r[0],
|
|
1447
|
+
hits: r.hits || r[1],
|
|
1448
|
+
broken_at_step: r.minStep ?? r[2],
|
|
1449
|
+
step_count: r.stepCount ?? r[3],
|
|
1450
|
+
}));
|
|
1451
|
+
const directModuleSet = new Set(directModuleRows.map((r) => r.name || r[0]));
|
|
1452
|
+
affectedModules = moduleRows.map((r) => {
|
|
1453
|
+
const name = r.name || r[0];
|
|
1454
|
+
return {
|
|
1455
|
+
name,
|
|
1456
|
+
hits: r.hits || r[1],
|
|
1457
|
+
impact: directModuleSet.has(name) ? 'direct' : 'indirect',
|
|
1458
|
+
};
|
|
1459
|
+
});
|
|
1460
|
+
}
|
|
1461
|
+
// Risk scoring
|
|
1462
|
+
const processCount = affectedProcesses.length;
|
|
1463
|
+
const moduleCount = affectedModules.length;
|
|
1464
|
+
let risk = 'LOW';
|
|
1465
|
+
if (directCount >= 30 || processCount >= 5 || moduleCount >= 5 || impacted.length >= 200) {
|
|
1466
|
+
risk = 'CRITICAL';
|
|
1467
|
+
}
|
|
1468
|
+
else if (directCount >= 15 || processCount >= 3 || moduleCount >= 3 || impacted.length >= 100) {
|
|
1469
|
+
risk = 'HIGH';
|
|
1470
|
+
}
|
|
1471
|
+
else if (directCount >= 5 || impacted.length >= 30) {
|
|
1472
|
+
risk = 'MEDIUM';
|
|
1473
|
+
}
|
|
1474
|
+
return {
|
|
1475
|
+
target: {
|
|
1476
|
+
id: symId,
|
|
1477
|
+
name: getRowValue(sym, 'name', 1),
|
|
1478
|
+
type: symType,
|
|
1479
|
+
filePath: getRowValue(sym, 'filePath', 2),
|
|
1480
|
+
},
|
|
1481
|
+
direction,
|
|
1482
|
+
impactedCount: impacted.length,
|
|
1483
|
+
risk,
|
|
1484
|
+
summary: {
|
|
1485
|
+
direct: directCount,
|
|
1486
|
+
processes_affected: processCount,
|
|
1487
|
+
modules_affected: moduleCount,
|
|
1488
|
+
},
|
|
1489
|
+
affected_processes: affectedProcesses,
|
|
1490
|
+
affected_modules: affectedModules,
|
|
1491
|
+
byDepth: grouped,
|
|
1492
|
+
};
|
|
1493
|
+
}
|
|
1494
|
+
// ─── Direct Graph Queries (for resources.ts) ────────────────────
|
|
1495
|
+
/**
|
|
1496
|
+
* Query clusters (communities) directly from graph.
|
|
1497
|
+
* Used by getClustersResource — avoids legacy overview() dispatch.
|
|
1498
|
+
*/
|
|
1499
|
+
async queryClusters(repoName, limit = 100) {
|
|
1500
|
+
const repo = await this.resolveRepo(repoName);
|
|
1501
|
+
await this.ensureInitialized(repo.id);
|
|
1502
|
+
try {
|
|
1503
|
+
const rawLimit = Math.max(limit * 5, 200);
|
|
1504
|
+
const clusters = await executeQuery(repo.id, `
|
|
1505
|
+
MATCH (c:Community)
|
|
1506
|
+
RETURN c.id AS id, c.label AS label, c.heuristicLabel AS heuristicLabel, c.cohesion AS cohesion, c.symbolCount AS symbolCount
|
|
1507
|
+
ORDER BY c.symbolCount DESC
|
|
1508
|
+
LIMIT ${rawLimit}
|
|
1509
|
+
`);
|
|
1510
|
+
const rawClusters = clusters.map((c) => ({
|
|
1511
|
+
id: c.id || c[0],
|
|
1512
|
+
label: c.label || c[1],
|
|
1513
|
+
heuristicLabel: c.heuristicLabel || c[2],
|
|
1514
|
+
cohesion: c.cohesion || c[3],
|
|
1515
|
+
symbolCount: c.symbolCount || c[4],
|
|
1516
|
+
}));
|
|
1517
|
+
return { clusters: this.aggregateClusters(rawClusters).slice(0, limit) };
|
|
1518
|
+
}
|
|
1519
|
+
catch {
|
|
1520
|
+
return { clusters: [] };
|
|
1521
|
+
}
|
|
1522
|
+
}
|
|
1523
|
+
/**
|
|
1524
|
+
* Query processes directly from graph.
|
|
1525
|
+
* Used by getProcessesResource — avoids legacy overview() dispatch.
|
|
1526
|
+
*/
|
|
1527
|
+
async queryProcesses(repoName, limit = 50) {
|
|
1528
|
+
const repo = await this.resolveRepo(repoName);
|
|
1529
|
+
await this.ensureInitialized(repo.id);
|
|
1530
|
+
try {
|
|
1531
|
+
const processes = await executeQuery(repo.id, `
|
|
1532
|
+
MATCH (p:Process)
|
|
1533
|
+
RETURN p.id AS id, p.label AS label, p.heuristicLabel AS heuristicLabel, p.processType AS processType, p.stepCount AS stepCount
|
|
1534
|
+
ORDER BY p.stepCount DESC
|
|
1535
|
+
LIMIT ${limit}
|
|
1536
|
+
`);
|
|
1537
|
+
return {
|
|
1538
|
+
processes: processes.map((p) => ({
|
|
1539
|
+
id: p.id || p[0],
|
|
1540
|
+
label: p.label || p[1],
|
|
1541
|
+
heuristicLabel: p.heuristicLabel || p[2],
|
|
1542
|
+
processType: p.processType || p[3],
|
|
1543
|
+
stepCount: p.stepCount || p[4],
|
|
1544
|
+
})),
|
|
1545
|
+
};
|
|
1546
|
+
}
|
|
1547
|
+
catch {
|
|
1548
|
+
return { processes: [] };
|
|
1549
|
+
}
|
|
1550
|
+
}
|
|
1551
|
+
/**
|
|
1552
|
+
* Query cluster detail (members) directly from graph.
|
|
1553
|
+
* Used by getClusterDetailResource.
|
|
1554
|
+
*/
|
|
1555
|
+
async queryClusterDetail(name, repoName) {
|
|
1556
|
+
const repo = await this.resolveRepo(repoName);
|
|
1557
|
+
await this.ensureInitialized(repo.id);
|
|
1558
|
+
const escaped = name.replace(/'/g, "''");
|
|
1559
|
+
const clusterQuery = `
|
|
1560
|
+
MATCH (c:Community)
|
|
1561
|
+
WHERE c.label = '${escaped}' OR c.heuristicLabel = '${escaped}'
|
|
1562
|
+
RETURN c.id AS id, c.label AS label, c.heuristicLabel AS heuristicLabel, c.cohesion AS cohesion, c.symbolCount AS symbolCount
|
|
1563
|
+
`;
|
|
1564
|
+
const clusters = await executeQuery(repo.id, clusterQuery);
|
|
1565
|
+
if (clusters.length === 0)
|
|
1566
|
+
return { error: `Cluster '${name}' not found` };
|
|
1567
|
+
const rawClusters = clusters.map((c) => ({
|
|
1568
|
+
id: c.id || c[0], label: c.label || c[1], heuristicLabel: c.heuristicLabel || c[2],
|
|
1569
|
+
cohesion: c.cohesion || c[3], symbolCount: c.symbolCount || c[4],
|
|
1570
|
+
}));
|
|
1571
|
+
let totalSymbols = 0, weightedCohesion = 0;
|
|
1572
|
+
for (const c of rawClusters) {
|
|
1573
|
+
const s = c.symbolCount || 0;
|
|
1574
|
+
totalSymbols += s;
|
|
1575
|
+
weightedCohesion += (c.cohesion || 0) * s;
|
|
1576
|
+
}
|
|
1577
|
+
const members = await executeQuery(repo.id, `
|
|
1578
|
+
MATCH (n)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
1579
|
+
WHERE c.label = '${escaped}' OR c.heuristicLabel = '${escaped}'
|
|
1580
|
+
RETURN DISTINCT n.id AS id, n.name AS name, n.filePath AS filePath
|
|
1581
|
+
LIMIT 30
|
|
1582
|
+
`);
|
|
1583
|
+
return {
|
|
1584
|
+
cluster: {
|
|
1585
|
+
id: rawClusters[0].id,
|
|
1586
|
+
label: rawClusters[0].heuristicLabel || rawClusters[0].label,
|
|
1587
|
+
heuristicLabel: rawClusters[0].heuristicLabel || rawClusters[0].label,
|
|
1588
|
+
cohesion: totalSymbols > 0 ? weightedCohesion / totalSymbols : 0,
|
|
1589
|
+
symbolCount: totalSymbols,
|
|
1590
|
+
subCommunities: rawClusters.length,
|
|
1591
|
+
},
|
|
1592
|
+
members: members.map((m) => ({
|
|
1593
|
+
name: getRowValue(m, 'name', 1),
|
|
1594
|
+
type: inferTypeFromId(getRowValue(m, 'id', 0)) || 'CodeElement',
|
|
1595
|
+
filePath: getRowValue(m, 'filePath', 2),
|
|
1596
|
+
})),
|
|
1597
|
+
};
|
|
1598
|
+
}
|
|
1599
|
+
/**
|
|
1600
|
+
* Query process detail (steps) directly from graph.
|
|
1601
|
+
* Used by getProcessDetailResource.
|
|
1602
|
+
*/
|
|
1603
|
+
async queryProcessDetail(name, repoName) {
|
|
1604
|
+
const repo = await this.resolveRepo(repoName);
|
|
1605
|
+
await this.ensureInitialized(repo.id);
|
|
1606
|
+
const escaped = name.replace(/'/g, "''");
|
|
1607
|
+
const processes = await executeQuery(repo.id, `
|
|
1608
|
+
MATCH (p:Process)
|
|
1609
|
+
WHERE p.label = '${escaped}' OR p.heuristicLabel = '${escaped}'
|
|
1610
|
+
RETURN p.id AS id, p.label AS label, p.heuristicLabel AS heuristicLabel, p.processType AS processType, p.stepCount AS stepCount
|
|
1611
|
+
LIMIT 1
|
|
1612
|
+
`);
|
|
1613
|
+
if (processes.length === 0)
|
|
1614
|
+
return { error: `Process '${name}' not found` };
|
|
1615
|
+
const proc = processes[0];
|
|
1616
|
+
const procId = proc.id || proc[0];
|
|
1617
|
+
const steps = await executeQuery(repo.id, `
|
|
1618
|
+
MATCH (n)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p {id: '${procId}'})
|
|
1619
|
+
RETURN n.id AS id, n.name AS name, n.filePath AS filePath, r.step AS step
|
|
1620
|
+
ORDER BY r.step
|
|
1621
|
+
`);
|
|
1622
|
+
return {
|
|
1623
|
+
process: {
|
|
1624
|
+
id: procId, label: proc.label || proc[1], heuristicLabel: proc.heuristicLabel || proc[2],
|
|
1625
|
+
processType: proc.processType || proc[3], stepCount: proc.stepCount || proc[4],
|
|
1626
|
+
},
|
|
1627
|
+
steps: steps.map((s) => ({
|
|
1628
|
+
step: getRowValue(s, 'step', 3),
|
|
1629
|
+
name: getRowValue(s, 'name', 1),
|
|
1630
|
+
type: inferTypeFromId(getRowValue(s, 'id', 0)) || 'CodeElement',
|
|
1631
|
+
filePath: getRowValue(s, 'filePath', 2),
|
|
1632
|
+
})),
|
|
1633
|
+
};
|
|
1634
|
+
}
|
|
1635
|
+
async disconnect() {
|
|
1636
|
+
await closeKuzu(); // close all connections
|
|
1637
|
+
// Note: we intentionally do NOT call disposeEmbedder() here.
|
|
1638
|
+
// ONNX Runtime's native cleanup segfaults on macOS and some Linux configs,
|
|
1639
|
+
// and importing the embedder module on Node v24+ crashes if onnxruntime
|
|
1640
|
+
// was never loaded during the session. Since process.exit(0) follows
|
|
1641
|
+
// immediately after disconnect(), the OS reclaims everything. See #38, #89.
|
|
1642
|
+
this.repos.clear();
|
|
1643
|
+
this.contextCache.clear();
|
|
1644
|
+
this.initializedRepos.clear();
|
|
1645
|
+
}
|
|
1646
|
+
}
|