@duytransipher/gitnexus 1.4.6-sipher.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +73 -0
- package/README.md +261 -0
- package/dist/cli/ai-context.d.ts +23 -0
- package/dist/cli/ai-context.js +265 -0
- package/dist/cli/analyze.d.ts +12 -0
- package/dist/cli/analyze.js +345 -0
- package/dist/cli/augment.d.ts +13 -0
- package/dist/cli/augment.js +33 -0
- package/dist/cli/clean.d.ts +10 -0
- package/dist/cli/clean.js +60 -0
- package/dist/cli/eval-server.d.ts +37 -0
- package/dist/cli/eval-server.js +389 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +137 -0
- package/dist/cli/lazy-action.d.ts +6 -0
- package/dist/cli/lazy-action.js +18 -0
- package/dist/cli/list.d.ts +6 -0
- package/dist/cli/list.js +30 -0
- package/dist/cli/mcp.d.ts +8 -0
- package/dist/cli/mcp.js +36 -0
- package/dist/cli/serve.d.ts +4 -0
- package/dist/cli/serve.js +6 -0
- package/dist/cli/setup.d.ts +8 -0
- package/dist/cli/setup.js +367 -0
- package/dist/cli/sipher-patched.d.ts +2 -0
- package/dist/cli/sipher-patched.js +77 -0
- package/dist/cli/skill-gen.d.ts +26 -0
- package/dist/cli/skill-gen.js +549 -0
- package/dist/cli/status.d.ts +6 -0
- package/dist/cli/status.js +36 -0
- package/dist/cli/tool.d.ts +60 -0
- package/dist/cli/tool.js +180 -0
- package/dist/cli/wiki.d.ts +15 -0
- package/dist/cli/wiki.js +365 -0
- package/dist/config/ignore-service.d.ts +26 -0
- package/dist/config/ignore-service.js +284 -0
- package/dist/config/supported-languages.d.ts +15 -0
- package/dist/config/supported-languages.js +16 -0
- package/dist/core/augmentation/engine.d.ts +26 -0
- package/dist/core/augmentation/engine.js +240 -0
- package/dist/core/embeddings/embedder.d.ts +60 -0
- package/dist/core/embeddings/embedder.js +251 -0
- package/dist/core/embeddings/embedding-pipeline.d.ts +51 -0
- package/dist/core/embeddings/embedding-pipeline.js +356 -0
- package/dist/core/embeddings/index.d.ts +9 -0
- package/dist/core/embeddings/index.js +9 -0
- package/dist/core/embeddings/text-generator.d.ts +24 -0
- package/dist/core/embeddings/text-generator.js +182 -0
- package/dist/core/embeddings/types.d.ts +87 -0
- package/dist/core/embeddings/types.js +32 -0
- package/dist/core/graph/graph.d.ts +2 -0
- package/dist/core/graph/graph.js +66 -0
- package/dist/core/graph/types.d.ts +66 -0
- package/dist/core/graph/types.js +1 -0
- package/dist/core/ingestion/ast-cache.d.ts +11 -0
- package/dist/core/ingestion/ast-cache.js +35 -0
- package/dist/core/ingestion/call-processor.d.ts +23 -0
- package/dist/core/ingestion/call-processor.js +793 -0
- package/dist/core/ingestion/call-routing.d.ts +68 -0
- package/dist/core/ingestion/call-routing.js +129 -0
- package/dist/core/ingestion/cluster-enricher.d.ts +38 -0
- package/dist/core/ingestion/cluster-enricher.js +170 -0
- package/dist/core/ingestion/community-processor.d.ts +39 -0
- package/dist/core/ingestion/community-processor.js +312 -0
- package/dist/core/ingestion/constants.d.ts +16 -0
- package/dist/core/ingestion/constants.js +16 -0
- package/dist/core/ingestion/entry-point-scoring.d.ts +40 -0
- package/dist/core/ingestion/entry-point-scoring.js +353 -0
- package/dist/core/ingestion/export-detection.d.ts +18 -0
- package/dist/core/ingestion/export-detection.js +231 -0
- package/dist/core/ingestion/filesystem-walker.d.ts +28 -0
- package/dist/core/ingestion/filesystem-walker.js +81 -0
- package/dist/core/ingestion/framework-detection.d.ts +54 -0
- package/dist/core/ingestion/framework-detection.js +411 -0
- package/dist/core/ingestion/heritage-processor.d.ts +28 -0
- package/dist/core/ingestion/heritage-processor.js +251 -0
- package/dist/core/ingestion/import-processor.d.ts +34 -0
- package/dist/core/ingestion/import-processor.js +398 -0
- package/dist/core/ingestion/language-config.d.ts +46 -0
- package/dist/core/ingestion/language-config.js +167 -0
- package/dist/core/ingestion/mro-processor.d.ts +45 -0
- package/dist/core/ingestion/mro-processor.js +369 -0
- package/dist/core/ingestion/named-binding-extraction.d.ts +61 -0
- package/dist/core/ingestion/named-binding-extraction.js +363 -0
- package/dist/core/ingestion/parsing-processor.d.ts +19 -0
- package/dist/core/ingestion/parsing-processor.js +315 -0
- package/dist/core/ingestion/pipeline.d.ts +6 -0
- package/dist/core/ingestion/pipeline.js +401 -0
- package/dist/core/ingestion/process-processor.d.ts +51 -0
- package/dist/core/ingestion/process-processor.js +315 -0
- package/dist/core/ingestion/resolution-context.d.ts +53 -0
- package/dist/core/ingestion/resolution-context.js +132 -0
- package/dist/core/ingestion/resolvers/csharp.d.ts +22 -0
- package/dist/core/ingestion/resolvers/csharp.js +109 -0
- package/dist/core/ingestion/resolvers/go.d.ts +19 -0
- package/dist/core/ingestion/resolvers/go.js +42 -0
- package/dist/core/ingestion/resolvers/index.d.ts +18 -0
- package/dist/core/ingestion/resolvers/index.js +13 -0
- package/dist/core/ingestion/resolvers/jvm.d.ts +23 -0
- package/dist/core/ingestion/resolvers/jvm.js +87 -0
- package/dist/core/ingestion/resolvers/php.d.ts +15 -0
- package/dist/core/ingestion/resolvers/php.js +35 -0
- package/dist/core/ingestion/resolvers/python.d.ts +19 -0
- package/dist/core/ingestion/resolvers/python.js +52 -0
- package/dist/core/ingestion/resolvers/ruby.d.ts +12 -0
- package/dist/core/ingestion/resolvers/ruby.js +15 -0
- package/dist/core/ingestion/resolvers/rust.d.ts +15 -0
- package/dist/core/ingestion/resolvers/rust.js +73 -0
- package/dist/core/ingestion/resolvers/standard.d.ts +28 -0
- package/dist/core/ingestion/resolvers/standard.js +123 -0
- package/dist/core/ingestion/resolvers/utils.d.ts +33 -0
- package/dist/core/ingestion/resolvers/utils.js +122 -0
- package/dist/core/ingestion/structure-processor.d.ts +2 -0
- package/dist/core/ingestion/structure-processor.js +36 -0
- package/dist/core/ingestion/symbol-table.d.ts +63 -0
- package/dist/core/ingestion/symbol-table.js +85 -0
- package/dist/core/ingestion/tree-sitter-queries.d.ts +15 -0
- package/dist/core/ingestion/tree-sitter-queries.js +888 -0
- package/dist/core/ingestion/type-env.d.ts +49 -0
- package/dist/core/ingestion/type-env.js +613 -0
- package/dist/core/ingestion/type-extractors/c-cpp.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/c-cpp.js +385 -0
- package/dist/core/ingestion/type-extractors/csharp.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/csharp.js +383 -0
- package/dist/core/ingestion/type-extractors/go.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/go.js +467 -0
- package/dist/core/ingestion/type-extractors/index.d.ts +22 -0
- package/dist/core/ingestion/type-extractors/index.js +31 -0
- package/dist/core/ingestion/type-extractors/jvm.d.ts +3 -0
- package/dist/core/ingestion/type-extractors/jvm.js +681 -0
- package/dist/core/ingestion/type-extractors/php.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/php.js +549 -0
- package/dist/core/ingestion/type-extractors/python.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/python.js +455 -0
- package/dist/core/ingestion/type-extractors/ruby.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/ruby.js +389 -0
- package/dist/core/ingestion/type-extractors/rust.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/rust.js +456 -0
- package/dist/core/ingestion/type-extractors/shared.d.ts +145 -0
- package/dist/core/ingestion/type-extractors/shared.js +810 -0
- package/dist/core/ingestion/type-extractors/swift.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/swift.js +137 -0
- package/dist/core/ingestion/type-extractors/types.d.ts +127 -0
- package/dist/core/ingestion/type-extractors/types.js +1 -0
- package/dist/core/ingestion/type-extractors/typescript.d.ts +2 -0
- package/dist/core/ingestion/type-extractors/typescript.js +494 -0
- package/dist/core/ingestion/utils.d.ts +138 -0
- package/dist/core/ingestion/utils.js +1290 -0
- package/dist/core/ingestion/workers/parse-worker.d.ts +122 -0
- package/dist/core/ingestion/workers/parse-worker.js +1126 -0
- package/dist/core/ingestion/workers/worker-pool.d.ts +16 -0
- package/dist/core/ingestion/workers/worker-pool.js +128 -0
- package/dist/core/lbug/csv-generator.d.ts +33 -0
- package/dist/core/lbug/csv-generator.js +366 -0
- package/dist/core/lbug/lbug-adapter.d.ts +103 -0
- package/dist/core/lbug/lbug-adapter.js +769 -0
- package/dist/core/lbug/schema.d.ts +53 -0
- package/dist/core/lbug/schema.js +430 -0
- package/dist/core/search/bm25-index.d.ts +23 -0
- package/dist/core/search/bm25-index.js +96 -0
- package/dist/core/search/hybrid-search.d.ts +49 -0
- package/dist/core/search/hybrid-search.js +118 -0
- package/dist/core/tree-sitter/parser-loader.d.ts +5 -0
- package/dist/core/tree-sitter/parser-loader.js +63 -0
- package/dist/core/wiki/generator.d.ts +120 -0
- package/dist/core/wiki/generator.js +939 -0
- package/dist/core/wiki/graph-queries.d.ts +80 -0
- package/dist/core/wiki/graph-queries.js +238 -0
- package/dist/core/wiki/html-viewer.d.ts +10 -0
- package/dist/core/wiki/html-viewer.js +297 -0
- package/dist/core/wiki/llm-client.d.ts +43 -0
- package/dist/core/wiki/llm-client.js +186 -0
- package/dist/core/wiki/prompts.d.ts +53 -0
- package/dist/core/wiki/prompts.js +174 -0
- package/dist/lib/utils.d.ts +1 -0
- package/dist/lib/utils.js +3 -0
- package/dist/mcp/compatible-stdio-transport.d.ts +25 -0
- package/dist/mcp/compatible-stdio-transport.js +200 -0
- package/dist/mcp/core/embedder.d.ts +27 -0
- package/dist/mcp/core/embedder.js +108 -0
- package/dist/mcp/core/lbug-adapter.d.ts +57 -0
- package/dist/mcp/core/lbug-adapter.js +455 -0
- package/dist/mcp/local/local-backend.d.ts +181 -0
- package/dist/mcp/local/local-backend.js +1722 -0
- package/dist/mcp/resources.d.ts +31 -0
- package/dist/mcp/resources.js +411 -0
- package/dist/mcp/server.d.ts +23 -0
- package/dist/mcp/server.js +296 -0
- package/dist/mcp/staleness.d.ts +15 -0
- package/dist/mcp/staleness.js +29 -0
- package/dist/mcp/tools.d.ts +24 -0
- package/dist/mcp/tools.js +292 -0
- package/dist/server/api.d.ts +10 -0
- package/dist/server/api.js +344 -0
- package/dist/server/mcp-http.d.ts +13 -0
- package/dist/server/mcp-http.js +100 -0
- package/dist/storage/git.d.ts +6 -0
- package/dist/storage/git.js +35 -0
- package/dist/storage/repo-manager.d.ts +138 -0
- package/dist/storage/repo-manager.js +299 -0
- package/dist/types/pipeline.d.ts +32 -0
- package/dist/types/pipeline.js +18 -0
- package/dist/unreal/bridge.d.ts +4 -0
- package/dist/unreal/bridge.js +113 -0
- package/dist/unreal/config.d.ts +6 -0
- package/dist/unreal/config.js +55 -0
- package/dist/unreal/types.d.ts +105 -0
- package/dist/unreal/types.js +1 -0
- package/hooks/claude/gitnexus-hook.cjs +238 -0
- package/hooks/claude/pre-tool-use.sh +79 -0
- package/hooks/claude/session-start.sh +42 -0
- package/package.json +100 -0
- package/scripts/ensure-cli-executable.cjs +21 -0
- package/scripts/patch-tree-sitter-swift.cjs +74 -0
- package/scripts/setup-unreal-gitnexus.ps1 +191 -0
- package/skills/gitnexus-cli.md +82 -0
- package/skills/gitnexus-debugging.md +89 -0
- package/skills/gitnexus-exploring.md +78 -0
- package/skills/gitnexus-guide.md +64 -0
- package/skills/gitnexus-impact-analysis.md +97 -0
- package/skills/gitnexus-pr-review.md +163 -0
- package/skills/gitnexus-refactoring.md +121 -0
- package/vendor/leiden/index.cjs +355 -0
- package/vendor/leiden/utils.cjs +392 -0
|
@@ -0,0 +1,1722 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Local Backend (Multi-Repo)
|
|
3
|
+
*
|
|
4
|
+
* Provides tool implementations using local .gitnexus/ indexes.
|
|
5
|
+
* Supports multiple indexed repositories via a global registry.
|
|
6
|
+
* LadybugDB connections are opened lazily per repo on first query.
|
|
7
|
+
*/
|
|
8
|
+
import fs from 'fs/promises';
|
|
9
|
+
import path from 'path';
|
|
10
|
+
import { initLbug, executeQuery, executeParameterized, closeLbug, isLbugReady } from '../core/lbug-adapter.js';
|
|
11
|
+
// Embedding imports are lazy (dynamic import) to avoid loading onnxruntime-node
|
|
12
|
+
// at MCP server startup — crashes on unsupported Node ABI versions (#89)
|
|
13
|
+
// git utilities available if needed
|
|
14
|
+
// import { isGitRepo, getCurrentCommit, getGitRoot } from '../../storage/git.js';
|
|
15
|
+
import { listRegisteredRepos, cleanupOldKuzuFiles, } from '../../storage/repo-manager.js';
|
|
16
|
+
import { ensureUnrealStorage, getUnrealStoragePaths, loadUnrealAssetManifest, loadUnrealConfig, } from '../../unreal/config.js';
|
|
17
|
+
import { expandBlueprintChain, findNativeBlueprintReferences, syncUnrealAssetManifest, } from '../../unreal/bridge.js';
|
|
18
|
+
// AI context generation is CLI-only (gitnexus analyze)
|
|
19
|
+
// import { generateAIContextFiles } from '../../cli/ai-context.js';
|
|
20
|
+
/**
|
|
21
|
+
* Quick test-file detection for filtering impact results.
|
|
22
|
+
* Matches common test file patterns across all supported languages.
|
|
23
|
+
*/
|
|
24
|
+
export function isTestFilePath(filePath) {
|
|
25
|
+
const p = filePath.toLowerCase().replace(/\\/g, '/');
|
|
26
|
+
return (p.includes('.test.') || p.includes('.spec.') ||
|
|
27
|
+
p.includes('__tests__/') || p.includes('__mocks__/') ||
|
|
28
|
+
p.includes('/test/') || p.includes('/tests/') ||
|
|
29
|
+
p.includes('/testing/') || p.includes('/fixtures/') ||
|
|
30
|
+
p.endsWith('_test.go') || p.endsWith('_test.py') ||
|
|
31
|
+
p.endsWith('_spec.rb') || p.endsWith('_test.rb') || p.includes('/spec/') ||
|
|
32
|
+
p.includes('/test_') || p.includes('/conftest.'));
|
|
33
|
+
}
|
|
34
|
+
/** Valid LadybugDB node labels for safe Cypher query construction */
|
|
35
|
+
export const VALID_NODE_LABELS = new Set([
|
|
36
|
+
'File', 'Folder', 'Function', 'Class', 'Interface', 'Method', 'CodeElement',
|
|
37
|
+
'Community', 'Process', 'Struct', 'Enum', 'Macro', 'Typedef', 'Union',
|
|
38
|
+
'Namespace', 'Trait', 'Impl', 'TypeAlias', 'Const', 'Static', 'Property',
|
|
39
|
+
'Record', 'Delegate', 'Annotation', 'Constructor', 'Template', 'Module',
|
|
40
|
+
]);
|
|
41
|
+
/** Valid relation types for impact analysis filtering */
|
|
42
|
+
export const VALID_RELATION_TYPES = new Set(['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS', 'HAS_METHOD', 'HAS_PROPERTY', 'OVERRIDES', 'ACCESSES']);
|
|
43
|
+
/** Regex to detect write operations in user-supplied Cypher queries */
|
|
44
|
+
export const CYPHER_WRITE_RE = /\b(CREATE|DELETE|SET|MERGE|REMOVE|DROP|ALTER|COPY|DETACH)\b/i;
|
|
45
|
+
/** Check if a Cypher query contains write operations */
|
|
46
|
+
export function isWriteQuery(query) {
|
|
47
|
+
return CYPHER_WRITE_RE.test(query);
|
|
48
|
+
}
|
|
49
|
+
/** Structured error logging for query failures — replaces empty catch blocks */
|
|
50
|
+
function logQueryError(context, err) {
|
|
51
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
52
|
+
console.error(`GitNexus [${context}]: ${msg}`);
|
|
53
|
+
}
|
|
54
|
+
export class LocalBackend {
|
|
55
|
+
repos = new Map();
|
|
56
|
+
contextCache = new Map();
|
|
57
|
+
initializedRepos = new Set();
|
|
58
|
+
// ─── Initialization ──────────────────────────────────────────────
|
|
59
|
+
/**
|
|
60
|
+
* Initialize from the global registry.
|
|
61
|
+
* Returns true if at least one repo is available.
|
|
62
|
+
*/
|
|
63
|
+
async init() {
|
|
64
|
+
await this.refreshRepos();
|
|
65
|
+
return this.repos.size > 0;
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Re-read the global registry and update the in-memory repo map.
|
|
69
|
+
* New repos are added, existing repos are updated, removed repos are pruned.
|
|
70
|
+
* LadybugDB connections for removed repos are NOT closed (they idle-timeout naturally).
|
|
71
|
+
*/
|
|
72
|
+
async refreshRepos() {
|
|
73
|
+
const entries = await listRegisteredRepos({ validate: true });
|
|
74
|
+
const freshIds = new Set();
|
|
75
|
+
for (const entry of entries) {
|
|
76
|
+
const id = this.repoId(entry.name, entry.path);
|
|
77
|
+
freshIds.add(id);
|
|
78
|
+
const storagePath = entry.storagePath;
|
|
79
|
+
const lbugPath = path.join(storagePath, 'lbug');
|
|
80
|
+
// Clean up any leftover KuzuDB files from before the LadybugDB migration.
|
|
81
|
+
// If kuzu exists but lbug doesn't, warn so the user knows to re-analyze.
|
|
82
|
+
const kuzu = await cleanupOldKuzuFiles(storagePath);
|
|
83
|
+
if (kuzu.found && kuzu.needsReindex) {
|
|
84
|
+
console.error(`GitNexus: "${entry.name}" has a stale KuzuDB index. Run: gitnexus analyze ${entry.path}`);
|
|
85
|
+
}
|
|
86
|
+
const handle = {
|
|
87
|
+
id,
|
|
88
|
+
name: entry.name,
|
|
89
|
+
repoPath: entry.path,
|
|
90
|
+
storagePath,
|
|
91
|
+
lbugPath,
|
|
92
|
+
indexedAt: entry.indexedAt,
|
|
93
|
+
lastCommit: entry.lastCommit,
|
|
94
|
+
stats: entry.stats,
|
|
95
|
+
};
|
|
96
|
+
this.repos.set(id, handle);
|
|
97
|
+
// Build lightweight context (no LadybugDB needed)
|
|
98
|
+
const s = entry.stats || {};
|
|
99
|
+
this.contextCache.set(id, {
|
|
100
|
+
projectName: entry.name,
|
|
101
|
+
stats: {
|
|
102
|
+
fileCount: s.files || 0,
|
|
103
|
+
functionCount: s.nodes || 0,
|
|
104
|
+
communityCount: s.communities || 0,
|
|
105
|
+
processCount: s.processes || 0,
|
|
106
|
+
},
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
// Prune repos that no longer exist in the registry
|
|
110
|
+
for (const id of this.repos.keys()) {
|
|
111
|
+
if (!freshIds.has(id)) {
|
|
112
|
+
this.repos.delete(id);
|
|
113
|
+
this.contextCache.delete(id);
|
|
114
|
+
this.initializedRepos.delete(id);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Generate a stable repo ID from name + path.
|
|
120
|
+
* If names collide, append a hash of the path.
|
|
121
|
+
*/
|
|
122
|
+
repoId(name, repoPath) {
|
|
123
|
+
const base = name.toLowerCase();
|
|
124
|
+
// Check for name collision with a different path
|
|
125
|
+
for (const [id, handle] of this.repos) {
|
|
126
|
+
if (id === base && handle.repoPath !== path.resolve(repoPath)) {
|
|
127
|
+
// Collision — use path hash
|
|
128
|
+
const hash = Buffer.from(repoPath).toString('base64url').slice(0, 6);
|
|
129
|
+
return `${base}-${hash}`;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
return base;
|
|
133
|
+
}
|
|
134
|
+
// ─── Repo Resolution ─────────────────────────────────────────────
|
|
135
|
+
/**
|
|
136
|
+
* Resolve which repo to use.
|
|
137
|
+
* - If repoParam is given, match by name or path
|
|
138
|
+
* - If only 1 repo, use it
|
|
139
|
+
* - If 0 or multiple without param, throw with helpful message
|
|
140
|
+
*
|
|
141
|
+
* On a miss, re-reads the registry once in case a new repo was indexed
|
|
142
|
+
* while the MCP server was running.
|
|
143
|
+
*/
|
|
144
|
+
async resolveRepo(repoParam) {
|
|
145
|
+
const result = this.resolveRepoFromCache(repoParam);
|
|
146
|
+
if (result)
|
|
147
|
+
return result;
|
|
148
|
+
// Miss — refresh registry and try once more
|
|
149
|
+
await this.refreshRepos();
|
|
150
|
+
const retried = this.resolveRepoFromCache(repoParam);
|
|
151
|
+
if (retried)
|
|
152
|
+
return retried;
|
|
153
|
+
// Still no match — throw with helpful message
|
|
154
|
+
if (this.repos.size === 0) {
|
|
155
|
+
throw new Error('No indexed repositories. Run: gitnexus analyze');
|
|
156
|
+
}
|
|
157
|
+
if (repoParam) {
|
|
158
|
+
const names = [...this.repos.values()].map(h => h.name);
|
|
159
|
+
throw new Error(`Repository "${repoParam}" not found. Available: ${names.join(', ')}`);
|
|
160
|
+
}
|
|
161
|
+
const names = [...this.repos.values()].map(h => h.name);
|
|
162
|
+
throw new Error(`Multiple repositories indexed. Specify which one with the "repo" parameter. Available: ${names.join(', ')}`);
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Try to resolve a repo from the in-memory cache. Returns null on miss.
|
|
166
|
+
*/
|
|
167
|
+
resolveRepoFromCache(repoParam) {
|
|
168
|
+
if (this.repos.size === 0)
|
|
169
|
+
return null;
|
|
170
|
+
if (repoParam) {
|
|
171
|
+
const paramLower = repoParam.toLowerCase();
|
|
172
|
+
// Match by id
|
|
173
|
+
if (this.repos.has(paramLower))
|
|
174
|
+
return this.repos.get(paramLower);
|
|
175
|
+
// Match by name (case-insensitive)
|
|
176
|
+
for (const handle of this.repos.values()) {
|
|
177
|
+
if (handle.name.toLowerCase() === paramLower)
|
|
178
|
+
return handle;
|
|
179
|
+
}
|
|
180
|
+
// Match by path (substring)
|
|
181
|
+
const resolved = path.resolve(repoParam);
|
|
182
|
+
for (const handle of this.repos.values()) {
|
|
183
|
+
if (handle.repoPath === resolved)
|
|
184
|
+
return handle;
|
|
185
|
+
}
|
|
186
|
+
// Match by partial name
|
|
187
|
+
for (const handle of this.repos.values()) {
|
|
188
|
+
if (handle.name.toLowerCase().includes(paramLower))
|
|
189
|
+
return handle;
|
|
190
|
+
}
|
|
191
|
+
return null;
|
|
192
|
+
}
|
|
193
|
+
if (this.repos.size === 1) {
|
|
194
|
+
return this.repos.values().next().value;
|
|
195
|
+
}
|
|
196
|
+
return null; // Multiple repos, no param — ambiguous
|
|
197
|
+
}
|
|
198
|
+
// ─── Lazy LadybugDB Init ────────────────────────────────────────────
|
|
199
|
+
async ensureInitialized(repoId) {
|
|
200
|
+
// Always check the actual pool — the idle timer may have evicted the connection
|
|
201
|
+
if (this.initializedRepos.has(repoId) && isLbugReady(repoId))
|
|
202
|
+
return;
|
|
203
|
+
const handle = this.repos.get(repoId);
|
|
204
|
+
if (!handle)
|
|
205
|
+
throw new Error(`Unknown repo: ${repoId}`);
|
|
206
|
+
try {
|
|
207
|
+
await initLbug(repoId, handle.lbugPath);
|
|
208
|
+
this.initializedRepos.add(repoId);
|
|
209
|
+
}
|
|
210
|
+
catch (err) {
|
|
211
|
+
// If lock error, mark as not initialized so next call retries
|
|
212
|
+
this.initializedRepos.delete(repoId);
|
|
213
|
+
throw err;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
// ─── Public Getters ──────────────────────────────────────────────
|
|
217
|
+
/**
|
|
218
|
+
* Get context for a specific repo (or the single repo if only one).
|
|
219
|
+
*/
|
|
220
|
+
getContext(repoId) {
|
|
221
|
+
if (repoId && this.contextCache.has(repoId)) {
|
|
222
|
+
return this.contextCache.get(repoId);
|
|
223
|
+
}
|
|
224
|
+
if (this.repos.size === 1) {
|
|
225
|
+
return this.contextCache.values().next().value ?? null;
|
|
226
|
+
}
|
|
227
|
+
return null;
|
|
228
|
+
}
|
|
229
|
+
/**
|
|
230
|
+
* List all registered repos with their metadata.
|
|
231
|
+
* Re-reads the global registry so newly indexed repos are discovered
|
|
232
|
+
* without restarting the MCP server.
|
|
233
|
+
*/
|
|
234
|
+
async listRepos() {
|
|
235
|
+
await this.refreshRepos();
|
|
236
|
+
return [...this.repos.values()].map(h => ({
|
|
237
|
+
name: h.name,
|
|
238
|
+
path: h.repoPath,
|
|
239
|
+
indexedAt: h.indexedAt,
|
|
240
|
+
lastCommit: h.lastCommit,
|
|
241
|
+
stats: h.stats,
|
|
242
|
+
}));
|
|
243
|
+
}
|
|
244
|
+
// ─── Tool Dispatch ───────────────────────────────────────────────
|
|
245
|
+
async callTool(method, params) {
|
|
246
|
+
if (method === 'list_repos') {
|
|
247
|
+
return this.listRepos();
|
|
248
|
+
}
|
|
249
|
+
// Resolve repo from optional param (re-reads registry on miss)
|
|
250
|
+
const repo = await this.resolveRepo(params?.repo);
|
|
251
|
+
switch (method) {
|
|
252
|
+
case 'query':
|
|
253
|
+
return this.query(repo, params);
|
|
254
|
+
case 'cypher': {
|
|
255
|
+
const raw = await this.cypher(repo, params);
|
|
256
|
+
return this.formatCypherAsMarkdown(raw);
|
|
257
|
+
}
|
|
258
|
+
case 'context':
|
|
259
|
+
return this.context(repo, params);
|
|
260
|
+
case 'impact':
|
|
261
|
+
return this.impact(repo, params);
|
|
262
|
+
case 'detect_changes':
|
|
263
|
+
return this.detectChanges(repo, params);
|
|
264
|
+
case 'rename':
|
|
265
|
+
return this.rename(repo, params);
|
|
266
|
+
case 'sync_unreal_asset_manifest':
|
|
267
|
+
return this.syncUnrealAssetManifestTool(repo);
|
|
268
|
+
case 'find_native_blueprint_references':
|
|
269
|
+
return this.findNativeBlueprintReferencesTool(repo, params);
|
|
270
|
+
case 'expand_blueprint_chain':
|
|
271
|
+
return this.expandBlueprintChainTool(repo, params);
|
|
272
|
+
case 'find_blueprints_derived_from_native_class':
|
|
273
|
+
return this.findBlueprintsDerivedFromNativeClassTool(repo, params);
|
|
274
|
+
// Legacy aliases for backwards compatibility
|
|
275
|
+
case 'search':
|
|
276
|
+
return this.query(repo, params);
|
|
277
|
+
case 'explore':
|
|
278
|
+
return this.context(repo, { name: params?.name, ...params });
|
|
279
|
+
case 'overview':
|
|
280
|
+
return this.overview(repo, params);
|
|
281
|
+
default:
|
|
282
|
+
throw new Error(`Unknown tool: ${method}`);
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
// ─── Tool Implementations ────────────────────────────────────────
|
|
286
|
+
async resolveNativeFunctionTarget(repo, params) {
|
|
287
|
+
await this.ensureInitialized(repo.id);
|
|
288
|
+
const qualifiedInput = params.function?.trim() || '';
|
|
289
|
+
const explicitClass = params.class_name?.trim()
|
|
290
|
+
|| (qualifiedInput.includes('::') ? qualifiedInput.split('::').slice(0, -1).join('::') : '');
|
|
291
|
+
const symbolName = params.symbol_uid
|
|
292
|
+
? ''
|
|
293
|
+
: (qualifiedInput.includes('::') ? qualifiedInput.split('::').at(-1) : qualifiedInput);
|
|
294
|
+
if (!params.symbol_uid && !symbolName) {
|
|
295
|
+
return { error: 'Either "function" or "symbol_uid" is required.' };
|
|
296
|
+
}
|
|
297
|
+
const rows = await executeParameterized(repo.id, `
|
|
298
|
+
MATCH (n)
|
|
299
|
+
WHERE labels(n)[0] IN ['Function', 'Method']
|
|
300
|
+
AND (($symbolId = '') OR n.id = $symbolId)
|
|
301
|
+
AND (($symbolName = '') OR n.name = $symbolName)
|
|
302
|
+
AND (($filePath = '') OR n.filePath = $filePath)
|
|
303
|
+
OPTIONAL MATCH (owner)-[:CodeRelation {type: 'HAS_METHOD'}]->(n)
|
|
304
|
+
RETURN
|
|
305
|
+
n.id AS symbolId,
|
|
306
|
+
n.name AS symbolName,
|
|
307
|
+
labels(n)[0] AS symbolType,
|
|
308
|
+
n.filePath AS filePath,
|
|
309
|
+
n.startLine AS startLine,
|
|
310
|
+
owner.name AS ownerClass
|
|
311
|
+
LIMIT 25
|
|
312
|
+
`, {
|
|
313
|
+
symbolId: params.symbol_uid || '',
|
|
314
|
+
symbolName,
|
|
315
|
+
filePath: params.file_path || '',
|
|
316
|
+
});
|
|
317
|
+
const normalizedClass = explicitClass.toLowerCase();
|
|
318
|
+
const candidates = rows
|
|
319
|
+
.map((row) => {
|
|
320
|
+
const ownerClass = row.ownerClass || row[5] || undefined;
|
|
321
|
+
const qualifiedName = ownerClass ? `${ownerClass}::${row.symbolName || row[1]}` : (row.symbolName || row[1]);
|
|
322
|
+
return {
|
|
323
|
+
symbol_id: row.symbolId || row[0],
|
|
324
|
+
symbol_name: row.symbolName || row[1],
|
|
325
|
+
symbol_type: row.symbolType || row[2],
|
|
326
|
+
class_name: ownerClass,
|
|
327
|
+
file_path: row.filePath || row[3],
|
|
328
|
+
start_line: row.startLine || row[4],
|
|
329
|
+
symbol_key: qualifiedName,
|
|
330
|
+
qualified_name: qualifiedName,
|
|
331
|
+
};
|
|
332
|
+
})
|
|
333
|
+
.filter((candidate) => {
|
|
334
|
+
if (!normalizedClass)
|
|
335
|
+
return true;
|
|
336
|
+
return (candidate.class_name || '').toLowerCase() === normalizedClass;
|
|
337
|
+
});
|
|
338
|
+
if (candidates.length === 0) {
|
|
339
|
+
return { error: `Native function '${qualifiedInput || params.symbol_uid}' not found.` };
|
|
340
|
+
}
|
|
341
|
+
if (candidates.length > 1) {
|
|
342
|
+
return {
|
|
343
|
+
status: 'ambiguous',
|
|
344
|
+
candidates: candidates.map(candidate => ({
|
|
345
|
+
symbol_id: candidate.symbol_id,
|
|
346
|
+
symbol_key: candidate.symbol_key,
|
|
347
|
+
class_name: candidate.class_name,
|
|
348
|
+
file_path: candidate.file_path,
|
|
349
|
+
start_line: candidate.start_line,
|
|
350
|
+
})),
|
|
351
|
+
};
|
|
352
|
+
}
|
|
353
|
+
return { target: candidates[0] };
|
|
354
|
+
}
|
|
355
|
+
shortlistBlueprintCandidates(target, manifest, maxCandidates = 200) {
|
|
356
|
+
const normalizedClass = (target.class_name || '').toLowerCase();
|
|
357
|
+
const normalizedQualified = target.qualified_name.toLowerCase();
|
|
358
|
+
const normalizedSymbol = target.symbol_key.toLowerCase();
|
|
359
|
+
const normalizedName = target.symbol_name.toLowerCase();
|
|
360
|
+
const scored = manifest.assets.map((asset) => {
|
|
361
|
+
const nativeParents = (asset.native_parents || []).map(v => v.toLowerCase());
|
|
362
|
+
const nativeFunctionRefs = (asset.native_function_refs || []).map(v => v.toLowerCase());
|
|
363
|
+
const dependencies = (asset.dependencies || []).map(v => v.toLowerCase());
|
|
364
|
+
let score = 0;
|
|
365
|
+
let reason = 'manifest';
|
|
366
|
+
if (normalizedClass && nativeParents.some(parent => parent === normalizedClass || parent.endsWith(`.${normalizedClass}`))) {
|
|
367
|
+
score = 90;
|
|
368
|
+
reason = 'native_parent';
|
|
369
|
+
}
|
|
370
|
+
if (nativeFunctionRefs.some(ref => ref === normalizedQualified || ref === normalizedSymbol || ref === normalizedName || ref.endsWith(normalizedQualified))) {
|
|
371
|
+
score = 120;
|
|
372
|
+
reason = 'native_function_ref';
|
|
373
|
+
}
|
|
374
|
+
if (dependencies.some(dep => dep.includes(normalizedQualified) || dep.includes(normalizedSymbol) || (normalizedClass && dep.includes(normalizedClass)))) {
|
|
375
|
+
score = Math.max(score, 75);
|
|
376
|
+
if (reason !== 'native_function_ref') {
|
|
377
|
+
reason = 'dependency';
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
if (score === 0 && normalizedClass && (asset.parent_class || '').toLowerCase().includes(normalizedClass)) {
|
|
381
|
+
score = 60;
|
|
382
|
+
}
|
|
383
|
+
return {
|
|
384
|
+
score,
|
|
385
|
+
candidate: {
|
|
386
|
+
asset_path: asset.asset_path,
|
|
387
|
+
generated_class: asset.generated_class,
|
|
388
|
+
parent_class: asset.parent_class,
|
|
389
|
+
reason,
|
|
390
|
+
},
|
|
391
|
+
};
|
|
392
|
+
});
|
|
393
|
+
const filtered = scored.filter(item => item.score > 0).sort((a, b) => b.score - a.score);
|
|
394
|
+
if (filtered.length === 0) {
|
|
395
|
+
return manifest.assets.slice(0, maxCandidates).map(asset => ({
|
|
396
|
+
asset_path: asset.asset_path,
|
|
397
|
+
generated_class: asset.generated_class,
|
|
398
|
+
parent_class: asset.parent_class,
|
|
399
|
+
reason: 'manifest',
|
|
400
|
+
}));
|
|
401
|
+
}
|
|
402
|
+
return filtered.slice(0, maxCandidates).map(item => item.candidate);
|
|
403
|
+
}
|
|
404
|
+
async ensureUnrealReady(repo, refreshManifest = false) {
|
|
405
|
+
const config = await loadUnrealConfig(repo.storagePath);
|
|
406
|
+
if (!config) {
|
|
407
|
+
const paths = getUnrealStoragePaths(repo.storagePath);
|
|
408
|
+
return {
|
|
409
|
+
error: `Unreal analyzer is not configured for this repo. Create ${paths.config_path} with editor_cmd and project_path.`,
|
|
410
|
+
};
|
|
411
|
+
}
|
|
412
|
+
await ensureUnrealStorage(repo.storagePath);
|
|
413
|
+
const paths = getUnrealStoragePaths(repo.storagePath);
|
|
414
|
+
let manifest = refreshManifest ? null : await loadUnrealAssetManifest(repo.storagePath);
|
|
415
|
+
let manifestRefreshed = false;
|
|
416
|
+
if (!manifest) {
|
|
417
|
+
const syncResult = await syncUnrealAssetManifest(repo.storagePath, config);
|
|
418
|
+
if (syncResult.status === 'error') {
|
|
419
|
+
return { error: syncResult.error || 'Failed to build Unreal asset manifest.' };
|
|
420
|
+
}
|
|
421
|
+
manifest = await loadUnrealAssetManifest(repo.storagePath);
|
|
422
|
+
manifestRefreshed = true;
|
|
423
|
+
}
|
|
424
|
+
if (!manifest) {
|
|
425
|
+
return { error: 'Unreal asset manifest is missing after sync.' };
|
|
426
|
+
}
|
|
427
|
+
return {
|
|
428
|
+
config,
|
|
429
|
+
manifest,
|
|
430
|
+
manifestPath: paths.manifest_path,
|
|
431
|
+
manifestRefreshed,
|
|
432
|
+
};
|
|
433
|
+
}
|
|
434
|
+
async syncUnrealAssetManifestTool(repo) {
|
|
435
|
+
const config = await loadUnrealConfig(repo.storagePath);
|
|
436
|
+
if (!config) {
|
|
437
|
+
const paths = getUnrealStoragePaths(repo.storagePath);
|
|
438
|
+
return {
|
|
439
|
+
error: `Unreal analyzer is not configured for this repo. Create ${paths.config_path} with editor_cmd and project_path.`,
|
|
440
|
+
};
|
|
441
|
+
}
|
|
442
|
+
return syncUnrealAssetManifest(repo.storagePath, config);
|
|
443
|
+
}
|
|
444
|
+
async findNativeBlueprintReferencesTool(repo, params) {
|
|
445
|
+
const targetResult = await this.resolveNativeFunctionTarget(repo, params);
|
|
446
|
+
if (targetResult.error || targetResult.status === 'ambiguous') {
|
|
447
|
+
return targetResult;
|
|
448
|
+
}
|
|
449
|
+
const unrealState = await this.ensureUnrealReady(repo, params.refresh_manifest ?? false);
|
|
450
|
+
if ('error' in unrealState) {
|
|
451
|
+
return unrealState;
|
|
452
|
+
}
|
|
453
|
+
const candidateAssets = this.shortlistBlueprintCandidates(targetResult.target, unrealState.manifest, params.max_candidates || 200);
|
|
454
|
+
const result = await findNativeBlueprintReferences(repo.storagePath, unrealState.config, targetResult.target, candidateAssets, unrealState.manifestPath);
|
|
455
|
+
return {
|
|
456
|
+
...result,
|
|
457
|
+
manifest_refreshed: unrealState.manifestRefreshed,
|
|
458
|
+
};
|
|
459
|
+
}
|
|
460
|
+
async expandBlueprintChainTool(repo, params) {
|
|
461
|
+
if (!params.asset_path || !params.chain_anchor_id) {
|
|
462
|
+
return { error: '"asset_path" and "chain_anchor_id" are required.' };
|
|
463
|
+
}
|
|
464
|
+
const unrealState = await this.ensureUnrealReady(repo, false);
|
|
465
|
+
if ('error' in unrealState) {
|
|
466
|
+
return unrealState;
|
|
467
|
+
}
|
|
468
|
+
return expandBlueprintChain(repo.storagePath, unrealState.config, params.asset_path, params.chain_anchor_id, params.direction || 'downstream', params.max_depth || 5);
|
|
469
|
+
}
|
|
470
|
+
async findBlueprintsDerivedFromNativeClassTool(repo, params) {
|
|
471
|
+
if (!params.class_name?.trim()) {
|
|
472
|
+
return { error: '"class_name" is required.' };
|
|
473
|
+
}
|
|
474
|
+
const unrealState = await this.ensureUnrealReady(repo, params.refresh_manifest ?? false);
|
|
475
|
+
if ('error' in unrealState) {
|
|
476
|
+
return unrealState;
|
|
477
|
+
}
|
|
478
|
+
const normalizedClass = params.class_name.trim().toLowerCase();
|
|
479
|
+
const matches = unrealState.manifest.assets
|
|
480
|
+
.filter(asset => (asset.native_parents || []).some(parent => {
|
|
481
|
+
const normalizedParent = parent.toLowerCase();
|
|
482
|
+
return normalizedParent === normalizedClass || normalizedParent.endsWith(`.${normalizedClass}`);
|
|
483
|
+
}))
|
|
484
|
+
.slice(0, params.max_results || 200)
|
|
485
|
+
.map(asset => ({
|
|
486
|
+
asset_path: asset.asset_path,
|
|
487
|
+
generated_class: asset.generated_class,
|
|
488
|
+
parent_class: asset.parent_class,
|
|
489
|
+
reason: 'native_parent',
|
|
490
|
+
}));
|
|
491
|
+
return {
|
|
492
|
+
class_name: params.class_name.trim(),
|
|
493
|
+
manifest_path: unrealState.manifestPath,
|
|
494
|
+
manifest_refreshed: unrealState.manifestRefreshed,
|
|
495
|
+
blueprints: matches,
|
|
496
|
+
};
|
|
497
|
+
}
|
|
498
|
+
/**
|
|
499
|
+
* Query tool — process-grouped search.
|
|
500
|
+
*
|
|
501
|
+
* 1. Hybrid search (BM25 + semantic) to find matching symbols
|
|
502
|
+
* 2. Trace each match to its process(es) via STEP_IN_PROCESS
|
|
503
|
+
* 3. Group by process, rank by aggregate relevance + internal cluster cohesion
|
|
504
|
+
* 4. Return: { processes, process_symbols, definitions }
|
|
505
|
+
*/
|
|
506
|
+
async query(repo, params) {
|
|
507
|
+
if (!params.query?.trim()) {
|
|
508
|
+
return { error: 'query parameter is required and cannot be empty.' };
|
|
509
|
+
}
|
|
510
|
+
await this.ensureInitialized(repo.id);
|
|
511
|
+
const processLimit = params.limit || 5;
|
|
512
|
+
const maxSymbolsPerProcess = params.max_symbols || 10;
|
|
513
|
+
const includeContent = params.include_content ?? false;
|
|
514
|
+
const searchQuery = params.query.trim();
|
|
515
|
+
// Step 1: Run hybrid search to get matching symbols
|
|
516
|
+
const searchLimit = processLimit * maxSymbolsPerProcess; // fetch enough raw results
|
|
517
|
+
const [bm25Results, semanticResults] = await Promise.all([
|
|
518
|
+
this.bm25Search(repo, searchQuery, searchLimit),
|
|
519
|
+
this.semanticSearch(repo, searchQuery, searchLimit),
|
|
520
|
+
]);
|
|
521
|
+
// Merge via reciprocal rank fusion
|
|
522
|
+
const scoreMap = new Map();
|
|
523
|
+
for (let i = 0; i < bm25Results.length; i++) {
|
|
524
|
+
const result = bm25Results[i];
|
|
525
|
+
const key = result.nodeId || result.filePath;
|
|
526
|
+
const rrfScore = 1 / (60 + i);
|
|
527
|
+
const existing = scoreMap.get(key);
|
|
528
|
+
if (existing) {
|
|
529
|
+
existing.score += rrfScore;
|
|
530
|
+
}
|
|
531
|
+
else {
|
|
532
|
+
scoreMap.set(key, { score: rrfScore, data: result });
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
for (let i = 0; i < semanticResults.length; i++) {
|
|
536
|
+
const result = semanticResults[i];
|
|
537
|
+
const key = result.nodeId || result.filePath;
|
|
538
|
+
const rrfScore = 1 / (60 + i);
|
|
539
|
+
const existing = scoreMap.get(key);
|
|
540
|
+
if (existing) {
|
|
541
|
+
existing.score += rrfScore;
|
|
542
|
+
}
|
|
543
|
+
else {
|
|
544
|
+
scoreMap.set(key, { score: rrfScore, data: result });
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
const merged = Array.from(scoreMap.entries())
|
|
548
|
+
.sort((a, b) => b[1].score - a[1].score)
|
|
549
|
+
.slice(0, searchLimit);
|
|
550
|
+
// Step 2: For each match with a nodeId, trace to process(es)
|
|
551
|
+
const processMap = new Map();
|
|
552
|
+
const definitions = []; // standalone symbols not in any process
|
|
553
|
+
for (const [_, item] of merged) {
|
|
554
|
+
const sym = item.data;
|
|
555
|
+
if (!sym.nodeId) {
|
|
556
|
+
// File-level results go to definitions
|
|
557
|
+
definitions.push({
|
|
558
|
+
name: sym.name,
|
|
559
|
+
type: sym.type || 'File',
|
|
560
|
+
filePath: sym.filePath,
|
|
561
|
+
});
|
|
562
|
+
continue;
|
|
563
|
+
}
|
|
564
|
+
// Find processes this symbol participates in
|
|
565
|
+
let processRows = [];
|
|
566
|
+
try {
|
|
567
|
+
processRows = await executeParameterized(repo.id, `
|
|
568
|
+
MATCH (n {id: $nodeId})-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process)
|
|
569
|
+
RETURN p.id AS pid, p.label AS label, p.heuristicLabel AS heuristicLabel, p.processType AS processType, p.stepCount AS stepCount, r.step AS step
|
|
570
|
+
`, { nodeId: sym.nodeId });
|
|
571
|
+
}
|
|
572
|
+
catch (e) {
|
|
573
|
+
logQueryError('query:process-lookup', e);
|
|
574
|
+
}
|
|
575
|
+
// Get cluster membership + cohesion (cohesion used as internal ranking signal)
|
|
576
|
+
let cohesion = 0;
|
|
577
|
+
let module;
|
|
578
|
+
try {
|
|
579
|
+
const cohesionRows = await executeParameterized(repo.id, `
|
|
580
|
+
MATCH (n {id: $nodeId})-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
581
|
+
RETURN c.cohesion AS cohesion, c.heuristicLabel AS module
|
|
582
|
+
LIMIT 1
|
|
583
|
+
`, { nodeId: sym.nodeId });
|
|
584
|
+
if (cohesionRows.length > 0) {
|
|
585
|
+
cohesion = (cohesionRows[0].cohesion ?? cohesionRows[0][0]) || 0;
|
|
586
|
+
module = cohesionRows[0].module ?? cohesionRows[0][1];
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
catch (e) {
|
|
590
|
+
logQueryError('query:cluster-info', e);
|
|
591
|
+
}
|
|
592
|
+
// Optionally fetch content
|
|
593
|
+
let content;
|
|
594
|
+
if (includeContent) {
|
|
595
|
+
try {
|
|
596
|
+
const contentRows = await executeParameterized(repo.id, `
|
|
597
|
+
MATCH (n {id: $nodeId})
|
|
598
|
+
RETURN n.content AS content
|
|
599
|
+
`, { nodeId: sym.nodeId });
|
|
600
|
+
if (contentRows.length > 0) {
|
|
601
|
+
content = contentRows[0].content ?? contentRows[0][0];
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
catch (e) {
|
|
605
|
+
logQueryError('query:content-fetch', e);
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
const symbolEntry = {
|
|
609
|
+
id: sym.nodeId,
|
|
610
|
+
name: sym.name,
|
|
611
|
+
type: sym.type,
|
|
612
|
+
filePath: sym.filePath,
|
|
613
|
+
startLine: sym.startLine,
|
|
614
|
+
endLine: sym.endLine,
|
|
615
|
+
...(module ? { module } : {}),
|
|
616
|
+
...(includeContent && content ? { content } : {}),
|
|
617
|
+
};
|
|
618
|
+
if (processRows.length === 0) {
|
|
619
|
+
// Symbol not in any process — goes to definitions
|
|
620
|
+
definitions.push(symbolEntry);
|
|
621
|
+
}
|
|
622
|
+
else {
|
|
623
|
+
// Add to each process it belongs to
|
|
624
|
+
for (const row of processRows) {
|
|
625
|
+
const pid = row.pid ?? row[0];
|
|
626
|
+
const label = row.label ?? row[1];
|
|
627
|
+
const hLabel = row.heuristicLabel ?? row[2];
|
|
628
|
+
const pType = row.processType ?? row[3];
|
|
629
|
+
const stepCount = row.stepCount ?? row[4];
|
|
630
|
+
const step = row.step ?? row[5];
|
|
631
|
+
if (!processMap.has(pid)) {
|
|
632
|
+
processMap.set(pid, {
|
|
633
|
+
id: pid,
|
|
634
|
+
label,
|
|
635
|
+
heuristicLabel: hLabel,
|
|
636
|
+
processType: pType,
|
|
637
|
+
stepCount,
|
|
638
|
+
totalScore: 0,
|
|
639
|
+
cohesionBoost: 0,
|
|
640
|
+
symbols: [],
|
|
641
|
+
});
|
|
642
|
+
}
|
|
643
|
+
const proc = processMap.get(pid);
|
|
644
|
+
proc.totalScore += item.score;
|
|
645
|
+
proc.cohesionBoost = Math.max(proc.cohesionBoost, cohesion);
|
|
646
|
+
proc.symbols.push({
|
|
647
|
+
...symbolEntry,
|
|
648
|
+
process_id: pid,
|
|
649
|
+
step_index: step,
|
|
650
|
+
});
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
// Step 3: Rank processes by aggregate score + internal cohesion boost
|
|
655
|
+
const rankedProcesses = Array.from(processMap.values())
|
|
656
|
+
.map(p => ({
|
|
657
|
+
...p,
|
|
658
|
+
priority: p.totalScore + (p.cohesionBoost * 0.1), // cohesion as subtle ranking signal
|
|
659
|
+
}))
|
|
660
|
+
.sort((a, b) => b.priority - a.priority)
|
|
661
|
+
.slice(0, processLimit);
|
|
662
|
+
// Step 4: Build response
|
|
663
|
+
const processes = rankedProcesses.map(p => ({
|
|
664
|
+
id: p.id,
|
|
665
|
+
summary: p.heuristicLabel || p.label,
|
|
666
|
+
priority: Math.round(p.priority * 1000) / 1000,
|
|
667
|
+
symbol_count: p.symbols.length,
|
|
668
|
+
process_type: p.processType,
|
|
669
|
+
step_count: p.stepCount,
|
|
670
|
+
}));
|
|
671
|
+
const processSymbols = rankedProcesses.flatMap(p => p.symbols.slice(0, maxSymbolsPerProcess).map(s => ({
|
|
672
|
+
...s,
|
|
673
|
+
// remove internal fields
|
|
674
|
+
})));
|
|
675
|
+
// Deduplicate process_symbols by id
|
|
676
|
+
const seen = new Set();
|
|
677
|
+
const dedupedSymbols = processSymbols.filter(s => {
|
|
678
|
+
if (seen.has(s.id))
|
|
679
|
+
return false;
|
|
680
|
+
seen.add(s.id);
|
|
681
|
+
return true;
|
|
682
|
+
});
|
|
683
|
+
return {
|
|
684
|
+
processes,
|
|
685
|
+
process_symbols: dedupedSymbols,
|
|
686
|
+
definitions: definitions.slice(0, 20), // cap standalone definitions
|
|
687
|
+
};
|
|
688
|
+
}
|
|
689
|
+
/**
|
|
690
|
+
* BM25 keyword search helper - uses LadybugDB FTS for always-fresh results
|
|
691
|
+
*/
|
|
692
|
+
async bm25Search(repo, query, limit) {
|
|
693
|
+
const { searchFTSFromLbug } = await import('../../core/search/bm25-index.js');
|
|
694
|
+
let bm25Results;
|
|
695
|
+
try {
|
|
696
|
+
bm25Results = await searchFTSFromLbug(query, limit, repo.id);
|
|
697
|
+
}
|
|
698
|
+
catch (err) {
|
|
699
|
+
console.error('GitNexus: BM25/FTS search failed (FTS indexes may not exist) -', err.message);
|
|
700
|
+
return [];
|
|
701
|
+
}
|
|
702
|
+
const results = [];
|
|
703
|
+
for (const bm25Result of bm25Results) {
|
|
704
|
+
const fullPath = bm25Result.filePath;
|
|
705
|
+
try {
|
|
706
|
+
const symbols = await executeParameterized(repo.id, `
|
|
707
|
+
MATCH (n)
|
|
708
|
+
WHERE n.filePath = $filePath
|
|
709
|
+
RETURN n.id AS id, n.name AS name, labels(n)[0] AS type, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine
|
|
710
|
+
LIMIT 3
|
|
711
|
+
`, { filePath: fullPath });
|
|
712
|
+
if (symbols.length > 0) {
|
|
713
|
+
for (const sym of symbols) {
|
|
714
|
+
results.push({
|
|
715
|
+
nodeId: sym.id || sym[0],
|
|
716
|
+
name: sym.name || sym[1],
|
|
717
|
+
type: sym.type || sym[2],
|
|
718
|
+
filePath: sym.filePath || sym[3],
|
|
719
|
+
startLine: sym.startLine || sym[4],
|
|
720
|
+
endLine: sym.endLine || sym[5],
|
|
721
|
+
bm25Score: bm25Result.score,
|
|
722
|
+
});
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
else {
|
|
726
|
+
const fileName = fullPath.split('/').pop() || fullPath;
|
|
727
|
+
results.push({
|
|
728
|
+
name: fileName,
|
|
729
|
+
type: 'File',
|
|
730
|
+
filePath: bm25Result.filePath,
|
|
731
|
+
bm25Score: bm25Result.score,
|
|
732
|
+
});
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
catch {
|
|
736
|
+
const fileName = fullPath.split('/').pop() || fullPath;
|
|
737
|
+
results.push({
|
|
738
|
+
name: fileName,
|
|
739
|
+
type: 'File',
|
|
740
|
+
filePath: bm25Result.filePath,
|
|
741
|
+
bm25Score: bm25Result.score,
|
|
742
|
+
});
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
return results;
|
|
746
|
+
}
|
|
747
|
+
/**
|
|
748
|
+
* Semantic vector search helper
|
|
749
|
+
*/
|
|
750
|
+
async semanticSearch(repo, query, limit) {
|
|
751
|
+
try {
|
|
752
|
+
// Check if embedding table exists before loading the model (avoids heavy model init when embeddings are off)
|
|
753
|
+
const tableCheck = await executeQuery(repo.id, `MATCH (e:CodeEmbedding) RETURN COUNT(*) AS cnt LIMIT 1`);
|
|
754
|
+
if (!tableCheck.length || (tableCheck[0].cnt ?? tableCheck[0][0]) === 0)
|
|
755
|
+
return [];
|
|
756
|
+
const { embedQuery, getEmbeddingDims } = await import('../core/embedder.js');
|
|
757
|
+
const queryVec = await embedQuery(query);
|
|
758
|
+
const dims = getEmbeddingDims();
|
|
759
|
+
const queryVecStr = `[${queryVec.join(',')}]`;
|
|
760
|
+
const vectorQuery = `
|
|
761
|
+
CALL QUERY_VECTOR_INDEX('CodeEmbedding', 'code_embedding_idx',
|
|
762
|
+
CAST(${queryVecStr} AS FLOAT[${dims}]), ${limit})
|
|
763
|
+
YIELD node AS emb, distance
|
|
764
|
+
WITH emb, distance
|
|
765
|
+
WHERE distance < 0.6
|
|
766
|
+
RETURN emb.nodeId AS nodeId, distance
|
|
767
|
+
ORDER BY distance
|
|
768
|
+
`;
|
|
769
|
+
const embResults = await executeQuery(repo.id, vectorQuery);
|
|
770
|
+
if (embResults.length === 0)
|
|
771
|
+
return [];
|
|
772
|
+
const results = [];
|
|
773
|
+
for (const embRow of embResults) {
|
|
774
|
+
const nodeId = embRow.nodeId ?? embRow[0];
|
|
775
|
+
const distance = embRow.distance ?? embRow[1];
|
|
776
|
+
const labelEndIdx = nodeId.indexOf(':');
|
|
777
|
+
const label = labelEndIdx > 0 ? nodeId.substring(0, labelEndIdx) : 'Unknown';
|
|
778
|
+
// Validate label against known node types to prevent Cypher injection
|
|
779
|
+
if (!VALID_NODE_LABELS.has(label))
|
|
780
|
+
continue;
|
|
781
|
+
try {
|
|
782
|
+
const nodeQuery = label === 'File'
|
|
783
|
+
? `MATCH (n:File {id: $nodeId}) RETURN n.name AS name, n.filePath AS filePath`
|
|
784
|
+
: `MATCH (n:\`${label}\` {id: $nodeId}) RETURN n.name AS name, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine`;
|
|
785
|
+
const nodeRows = await executeParameterized(repo.id, nodeQuery, { nodeId });
|
|
786
|
+
if (nodeRows.length > 0) {
|
|
787
|
+
const nodeRow = nodeRows[0];
|
|
788
|
+
results.push({
|
|
789
|
+
nodeId,
|
|
790
|
+
name: nodeRow.name ?? nodeRow[0] ?? '',
|
|
791
|
+
type: label,
|
|
792
|
+
filePath: nodeRow.filePath ?? nodeRow[1] ?? '',
|
|
793
|
+
distance,
|
|
794
|
+
startLine: label !== 'File' ? (nodeRow.startLine ?? nodeRow[2]) : undefined,
|
|
795
|
+
endLine: label !== 'File' ? (nodeRow.endLine ?? nodeRow[3]) : undefined,
|
|
796
|
+
});
|
|
797
|
+
}
|
|
798
|
+
}
|
|
799
|
+
catch { }
|
|
800
|
+
}
|
|
801
|
+
return results;
|
|
802
|
+
}
|
|
803
|
+
catch {
|
|
804
|
+
// Expected when embeddings are disabled — silently fall back to BM25-only
|
|
805
|
+
return [];
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
async executeCypher(repoName, query) {
|
|
809
|
+
const repo = await this.resolveRepo(repoName);
|
|
810
|
+
return this.cypher(repo, { query });
|
|
811
|
+
}
|
|
812
|
+
async cypher(repo, params) {
|
|
813
|
+
await this.ensureInitialized(repo.id);
|
|
814
|
+
if (!isLbugReady(repo.id)) {
|
|
815
|
+
return { error: 'LadybugDB not ready. Index may be corrupted.' };
|
|
816
|
+
}
|
|
817
|
+
// Block write operations (defense-in-depth — DB is already read-only)
|
|
818
|
+
if (CYPHER_WRITE_RE.test(params.query)) {
|
|
819
|
+
return { error: 'Write operations (CREATE, DELETE, SET, MERGE, REMOVE, DROP, ALTER, COPY, DETACH) are not allowed. The knowledge graph is read-only.' };
|
|
820
|
+
}
|
|
821
|
+
try {
|
|
822
|
+
const result = await executeQuery(repo.id, params.query);
|
|
823
|
+
return result;
|
|
824
|
+
}
|
|
825
|
+
catch (err) {
|
|
826
|
+
return { error: err.message || 'Query failed' };
|
|
827
|
+
}
|
|
828
|
+
}
|
|
829
|
+
/**
|
|
830
|
+
* Format raw Cypher result rows as a markdown table for LLM readability.
|
|
831
|
+
* Falls back to raw result if rows aren't tabular objects.
|
|
832
|
+
*/
|
|
833
|
+
formatCypherAsMarkdown(result) {
|
|
834
|
+
if (!Array.isArray(result) || result.length === 0)
|
|
835
|
+
return result;
|
|
836
|
+
const firstRow = result[0];
|
|
837
|
+
if (typeof firstRow !== 'object' || firstRow === null)
|
|
838
|
+
return result;
|
|
839
|
+
const keys = Object.keys(firstRow);
|
|
840
|
+
if (keys.length === 0)
|
|
841
|
+
return result;
|
|
842
|
+
const header = '| ' + keys.join(' | ') + ' |';
|
|
843
|
+
const separator = '| ' + keys.map(() => '---').join(' | ') + ' |';
|
|
844
|
+
const dataRows = result.map((row) => '| ' + keys.map(k => {
|
|
845
|
+
const v = row[k];
|
|
846
|
+
if (v === null || v === undefined)
|
|
847
|
+
return '';
|
|
848
|
+
if (typeof v === 'object')
|
|
849
|
+
return JSON.stringify(v);
|
|
850
|
+
return String(v);
|
|
851
|
+
}).join(' | ') + ' |');
|
|
852
|
+
return {
|
|
853
|
+
markdown: [header, separator, ...dataRows].join('\n'),
|
|
854
|
+
row_count: result.length,
|
|
855
|
+
};
|
|
856
|
+
}
|
|
857
|
+
/**
|
|
858
|
+
* Aggregate same-named clusters: group by heuristicLabel, sum symbols,
|
|
859
|
+
* weighted-average cohesion, filter out tiny clusters (<5 symbols).
|
|
860
|
+
* Raw communities stay intact in LadybugDB for Cypher queries.
|
|
861
|
+
*/
|
|
862
|
+
aggregateClusters(clusters) {
|
|
863
|
+
const groups = new Map();
|
|
864
|
+
for (const c of clusters) {
|
|
865
|
+
const label = c.heuristicLabel || c.label || 'Unknown';
|
|
866
|
+
const symbols = c.symbolCount || 0;
|
|
867
|
+
const cohesion = c.cohesion || 0;
|
|
868
|
+
const existing = groups.get(label);
|
|
869
|
+
if (!existing) {
|
|
870
|
+
groups.set(label, { ids: [c.id], totalSymbols: symbols, weightedCohesion: cohesion * symbols, largest: c });
|
|
871
|
+
}
|
|
872
|
+
else {
|
|
873
|
+
existing.ids.push(c.id);
|
|
874
|
+
existing.totalSymbols += symbols;
|
|
875
|
+
existing.weightedCohesion += cohesion * symbols;
|
|
876
|
+
if (symbols > (existing.largest.symbolCount || 0)) {
|
|
877
|
+
existing.largest = c;
|
|
878
|
+
}
|
|
879
|
+
}
|
|
880
|
+
}
|
|
881
|
+
return Array.from(groups.entries())
|
|
882
|
+
.map(([label, g]) => ({
|
|
883
|
+
id: g.largest.id,
|
|
884
|
+
label,
|
|
885
|
+
heuristicLabel: label,
|
|
886
|
+
symbolCount: g.totalSymbols,
|
|
887
|
+
cohesion: g.totalSymbols > 0 ? g.weightedCohesion / g.totalSymbols : 0,
|
|
888
|
+
subCommunities: g.ids.length,
|
|
889
|
+
}))
|
|
890
|
+
.filter(c => c.symbolCount >= 5)
|
|
891
|
+
.sort((a, b) => b.symbolCount - a.symbolCount);
|
|
892
|
+
}
|
|
893
|
+
async overview(repo, params) {
|
|
894
|
+
await this.ensureInitialized(repo.id);
|
|
895
|
+
const limit = params.limit || 20;
|
|
896
|
+
const result = {
|
|
897
|
+
repo: repo.name,
|
|
898
|
+
repoPath: repo.repoPath,
|
|
899
|
+
stats: repo.stats,
|
|
900
|
+
indexedAt: repo.indexedAt,
|
|
901
|
+
lastCommit: repo.lastCommit,
|
|
902
|
+
};
|
|
903
|
+
if (params.showClusters !== false) {
|
|
904
|
+
try {
|
|
905
|
+
// Fetch more raw communities than the display limit so aggregation has enough data
|
|
906
|
+
const rawLimit = Math.max(limit * 5, 200);
|
|
907
|
+
const clusters = await executeQuery(repo.id, `
|
|
908
|
+
MATCH (c:Community)
|
|
909
|
+
RETURN c.id AS id, c.label AS label, c.heuristicLabel AS heuristicLabel, c.cohesion AS cohesion, c.symbolCount AS symbolCount
|
|
910
|
+
ORDER BY c.symbolCount DESC
|
|
911
|
+
LIMIT ${rawLimit}
|
|
912
|
+
`);
|
|
913
|
+
const rawClusters = clusters.map((c) => ({
|
|
914
|
+
id: c.id || c[0],
|
|
915
|
+
label: c.label || c[1],
|
|
916
|
+
heuristicLabel: c.heuristicLabel || c[2],
|
|
917
|
+
cohesion: c.cohesion || c[3],
|
|
918
|
+
symbolCount: c.symbolCount || c[4],
|
|
919
|
+
}));
|
|
920
|
+
result.clusters = this.aggregateClusters(rawClusters).slice(0, limit);
|
|
921
|
+
}
|
|
922
|
+
catch {
|
|
923
|
+
result.clusters = [];
|
|
924
|
+
}
|
|
925
|
+
}
|
|
926
|
+
if (params.showProcesses !== false) {
|
|
927
|
+
try {
|
|
928
|
+
const processes = await executeQuery(repo.id, `
|
|
929
|
+
MATCH (p:Process)
|
|
930
|
+
RETURN p.id AS id, p.label AS label, p.heuristicLabel AS heuristicLabel, p.processType AS processType, p.stepCount AS stepCount
|
|
931
|
+
ORDER BY p.stepCount DESC
|
|
932
|
+
LIMIT ${limit}
|
|
933
|
+
`);
|
|
934
|
+
result.processes = processes.map((p) => ({
|
|
935
|
+
id: p.id || p[0],
|
|
936
|
+
label: p.label || p[1],
|
|
937
|
+
heuristicLabel: p.heuristicLabel || p[2],
|
|
938
|
+
processType: p.processType || p[3],
|
|
939
|
+
stepCount: p.stepCount || p[4],
|
|
940
|
+
}));
|
|
941
|
+
}
|
|
942
|
+
catch {
|
|
943
|
+
result.processes = [];
|
|
944
|
+
}
|
|
945
|
+
}
|
|
946
|
+
return result;
|
|
947
|
+
}
|
|
948
|
+
/**
|
|
949
|
+
* Context tool — 360-degree symbol view with categorized refs.
|
|
950
|
+
* Disambiguation when multiple symbols share a name.
|
|
951
|
+
* UID-based direct lookup. No cluster in output.
|
|
952
|
+
*/
|
|
953
|
+
async context(repo, params) {
|
|
954
|
+
await this.ensureInitialized(repo.id);
|
|
955
|
+
const { name, uid, file_path, include_content } = params;
|
|
956
|
+
if (!name && !uid) {
|
|
957
|
+
return { error: 'Either "name" or "uid" parameter is required.' };
|
|
958
|
+
}
|
|
959
|
+
// Step 1: Find the symbol
|
|
960
|
+
let symbols;
|
|
961
|
+
if (uid) {
|
|
962
|
+
symbols = await executeParameterized(repo.id, `
|
|
963
|
+
MATCH (n {id: $uid})
|
|
964
|
+
RETURN n.id AS id, n.name AS name, labels(n)[0] AS type, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine${include_content ? ', n.content AS content' : ''}
|
|
965
|
+
LIMIT 1
|
|
966
|
+
`, { uid });
|
|
967
|
+
}
|
|
968
|
+
else {
|
|
969
|
+
const isQualified = name.includes('/') || name.includes(':');
|
|
970
|
+
let whereClause;
|
|
971
|
+
let queryParams;
|
|
972
|
+
if (file_path) {
|
|
973
|
+
whereClause = `WHERE n.name = $symName AND n.filePath CONTAINS $filePath`;
|
|
974
|
+
queryParams = { symName: name, filePath: file_path };
|
|
975
|
+
}
|
|
976
|
+
else if (isQualified) {
|
|
977
|
+
whereClause = `WHERE n.id = $symName OR n.name = $symName`;
|
|
978
|
+
queryParams = { symName: name };
|
|
979
|
+
}
|
|
980
|
+
else {
|
|
981
|
+
whereClause = `WHERE n.name = $symName`;
|
|
982
|
+
queryParams = { symName: name };
|
|
983
|
+
}
|
|
984
|
+
symbols = await executeParameterized(repo.id, `
|
|
985
|
+
MATCH (n) ${whereClause}
|
|
986
|
+
RETURN n.id AS id, n.name AS name, labels(n)[0] AS type, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine${include_content ? ', n.content AS content' : ''}
|
|
987
|
+
LIMIT 10
|
|
988
|
+
`, queryParams);
|
|
989
|
+
}
|
|
990
|
+
if (symbols.length === 0) {
|
|
991
|
+
return { error: `Symbol '${name || uid}' not found` };
|
|
992
|
+
}
|
|
993
|
+
// Step 2: Disambiguation
|
|
994
|
+
if (symbols.length > 1 && !uid) {
|
|
995
|
+
return {
|
|
996
|
+
status: 'ambiguous',
|
|
997
|
+
message: `Found ${symbols.length} symbols matching '${name}'. Use uid or file_path to disambiguate.`,
|
|
998
|
+
candidates: symbols.map((s) => ({
|
|
999
|
+
uid: s.id || s[0],
|
|
1000
|
+
name: s.name || s[1],
|
|
1001
|
+
kind: s.type || s[2],
|
|
1002
|
+
filePath: s.filePath || s[3],
|
|
1003
|
+
line: s.startLine || s[4],
|
|
1004
|
+
})),
|
|
1005
|
+
};
|
|
1006
|
+
}
|
|
1007
|
+
// Step 3: Build full context
|
|
1008
|
+
const sym = symbols[0];
|
|
1009
|
+
const symId = sym.id || sym[0];
|
|
1010
|
+
// Categorized incoming refs
|
|
1011
|
+
const incomingRows = await executeParameterized(repo.id, `
|
|
1012
|
+
MATCH (caller)-[r:CodeRelation]->(n {id: $symId})
|
|
1013
|
+
WHERE r.type IN ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS', 'HAS_METHOD', 'HAS_PROPERTY', 'OVERRIDES', 'ACCESSES']
|
|
1014
|
+
RETURN r.type AS relType, caller.id AS uid, caller.name AS name, caller.filePath AS filePath, labels(caller)[0] AS kind
|
|
1015
|
+
LIMIT 30
|
|
1016
|
+
`, { symId });
|
|
1017
|
+
// Categorized outgoing refs
|
|
1018
|
+
const outgoingRows = await executeParameterized(repo.id, `
|
|
1019
|
+
MATCH (n {id: $symId})-[r:CodeRelation]->(target)
|
|
1020
|
+
WHERE r.type IN ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS', 'HAS_METHOD', 'HAS_PROPERTY', 'OVERRIDES', 'ACCESSES']
|
|
1021
|
+
RETURN r.type AS relType, target.id AS uid, target.name AS name, target.filePath AS filePath, labels(target)[0] AS kind
|
|
1022
|
+
LIMIT 30
|
|
1023
|
+
`, { symId });
|
|
1024
|
+
// Process participation
|
|
1025
|
+
let processRows = [];
|
|
1026
|
+
try {
|
|
1027
|
+
processRows = await executeParameterized(repo.id, `
|
|
1028
|
+
MATCH (n {id: $symId})-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process)
|
|
1029
|
+
RETURN p.id AS pid, p.heuristicLabel AS label, r.step AS step, p.stepCount AS stepCount
|
|
1030
|
+
`, { symId });
|
|
1031
|
+
}
|
|
1032
|
+
catch (e) {
|
|
1033
|
+
logQueryError('context:process-participation', e);
|
|
1034
|
+
}
|
|
1035
|
+
// Helper to categorize refs
|
|
1036
|
+
const categorize = (rows) => {
|
|
1037
|
+
const cats = {};
|
|
1038
|
+
for (const row of rows) {
|
|
1039
|
+
const relType = (row.relType || row[0] || '').toLowerCase();
|
|
1040
|
+
const entry = {
|
|
1041
|
+
uid: row.uid || row[1],
|
|
1042
|
+
name: row.name || row[2],
|
|
1043
|
+
filePath: row.filePath || row[3],
|
|
1044
|
+
kind: row.kind || row[4],
|
|
1045
|
+
};
|
|
1046
|
+
if (!cats[relType])
|
|
1047
|
+
cats[relType] = [];
|
|
1048
|
+
cats[relType].push(entry);
|
|
1049
|
+
}
|
|
1050
|
+
return cats;
|
|
1051
|
+
};
|
|
1052
|
+
return {
|
|
1053
|
+
status: 'found',
|
|
1054
|
+
symbol: {
|
|
1055
|
+
uid: sym.id || sym[0],
|
|
1056
|
+
name: sym.name || sym[1],
|
|
1057
|
+
kind: sym.type || sym[2],
|
|
1058
|
+
filePath: sym.filePath || sym[3],
|
|
1059
|
+
startLine: sym.startLine || sym[4],
|
|
1060
|
+
endLine: sym.endLine || sym[5],
|
|
1061
|
+
...(include_content && (sym.content || sym[6]) ? { content: sym.content || sym[6] } : {}),
|
|
1062
|
+
},
|
|
1063
|
+
incoming: categorize(incomingRows),
|
|
1064
|
+
outgoing: categorize(outgoingRows),
|
|
1065
|
+
processes: processRows.map((r) => ({
|
|
1066
|
+
id: r.pid || r[0],
|
|
1067
|
+
name: r.label || r[1],
|
|
1068
|
+
step_index: r.step || r[2],
|
|
1069
|
+
step_count: r.stepCount || r[3],
|
|
1070
|
+
})),
|
|
1071
|
+
};
|
|
1072
|
+
}
|
|
1073
|
+
/**
|
|
1074
|
+
* Legacy explore — kept for backwards compatibility with resources.ts.
|
|
1075
|
+
* Routes cluster/process types to direct graph queries.
|
|
1076
|
+
*/
|
|
1077
|
+
async explore(repo, params) {
|
|
1078
|
+
await this.ensureInitialized(repo.id);
|
|
1079
|
+
const { name, type } = params;
|
|
1080
|
+
if (type === 'symbol') {
|
|
1081
|
+
return this.context(repo, { name });
|
|
1082
|
+
}
|
|
1083
|
+
if (type === 'cluster') {
|
|
1084
|
+
const clusters = await executeParameterized(repo.id, `
|
|
1085
|
+
MATCH (c:Community)
|
|
1086
|
+
WHERE c.label = $clusterName OR c.heuristicLabel = $clusterName
|
|
1087
|
+
RETURN c.id AS id, c.label AS label, c.heuristicLabel AS heuristicLabel, c.cohesion AS cohesion, c.symbolCount AS symbolCount
|
|
1088
|
+
`, { clusterName: name });
|
|
1089
|
+
if (clusters.length === 0)
|
|
1090
|
+
return { error: `Cluster '${name}' not found` };
|
|
1091
|
+
const rawClusters = clusters.map((c) => ({
|
|
1092
|
+
id: c.id || c[0], label: c.label || c[1], heuristicLabel: c.heuristicLabel || c[2],
|
|
1093
|
+
cohesion: c.cohesion || c[3], symbolCount: c.symbolCount || c[4],
|
|
1094
|
+
}));
|
|
1095
|
+
let totalSymbols = 0, weightedCohesion = 0;
|
|
1096
|
+
for (const c of rawClusters) {
|
|
1097
|
+
const s = c.symbolCount || 0;
|
|
1098
|
+
totalSymbols += s;
|
|
1099
|
+
weightedCohesion += (c.cohesion || 0) * s;
|
|
1100
|
+
}
|
|
1101
|
+
const members = await executeParameterized(repo.id, `
|
|
1102
|
+
MATCH (n)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
1103
|
+
WHERE c.label = $clusterName OR c.heuristicLabel = $clusterName
|
|
1104
|
+
RETURN DISTINCT n.name AS name, labels(n)[0] AS type, n.filePath AS filePath
|
|
1105
|
+
LIMIT 30
|
|
1106
|
+
`, { clusterName: name });
|
|
1107
|
+
return {
|
|
1108
|
+
cluster: {
|
|
1109
|
+
id: rawClusters[0].id,
|
|
1110
|
+
label: rawClusters[0].heuristicLabel || rawClusters[0].label,
|
|
1111
|
+
heuristicLabel: rawClusters[0].heuristicLabel || rawClusters[0].label,
|
|
1112
|
+
cohesion: totalSymbols > 0 ? weightedCohesion / totalSymbols : 0,
|
|
1113
|
+
symbolCount: totalSymbols,
|
|
1114
|
+
subCommunities: rawClusters.length,
|
|
1115
|
+
},
|
|
1116
|
+
members: members.map((m) => ({
|
|
1117
|
+
name: m.name || m[0], type: m.type || m[1], filePath: m.filePath || m[2],
|
|
1118
|
+
})),
|
|
1119
|
+
};
|
|
1120
|
+
}
|
|
1121
|
+
if (type === 'process') {
|
|
1122
|
+
const processes = await executeParameterized(repo.id, `
|
|
1123
|
+
MATCH (p:Process)
|
|
1124
|
+
WHERE p.label = $processName OR p.heuristicLabel = $processName
|
|
1125
|
+
RETURN p.id AS id, p.label AS label, p.heuristicLabel AS heuristicLabel, p.processType AS processType, p.stepCount AS stepCount
|
|
1126
|
+
LIMIT 1
|
|
1127
|
+
`, { processName: name });
|
|
1128
|
+
if (processes.length === 0)
|
|
1129
|
+
return { error: `Process '${name}' not found` };
|
|
1130
|
+
const proc = processes[0];
|
|
1131
|
+
const procId = proc.id || proc[0];
|
|
1132
|
+
const steps = await executeParameterized(repo.id, `
|
|
1133
|
+
MATCH (n)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p {id: $procId})
|
|
1134
|
+
RETURN n.name AS name, labels(n)[0] AS type, n.filePath AS filePath, r.step AS step
|
|
1135
|
+
ORDER BY r.step
|
|
1136
|
+
`, { procId });
|
|
1137
|
+
return {
|
|
1138
|
+
process: {
|
|
1139
|
+
id: procId, label: proc.label || proc[1], heuristicLabel: proc.heuristicLabel || proc[2],
|
|
1140
|
+
processType: proc.processType || proc[3], stepCount: proc.stepCount || proc[4],
|
|
1141
|
+
},
|
|
1142
|
+
steps: steps.map((s) => ({
|
|
1143
|
+
step: s.step || s[3], name: s.name || s[0], type: s.type || s[1], filePath: s.filePath || s[2],
|
|
1144
|
+
})),
|
|
1145
|
+
};
|
|
1146
|
+
}
|
|
1147
|
+
return { error: 'Invalid type. Use: symbol, cluster, or process' };
|
|
1148
|
+
}
|
|
1149
|
+
/**
|
|
1150
|
+
* Detect changes — git-diff based impact analysis.
|
|
1151
|
+
* Maps changed lines to indexed symbols, then finds affected processes.
|
|
1152
|
+
*/
|
|
1153
|
+
async detectChanges(repo, params) {
|
|
1154
|
+
await this.ensureInitialized(repo.id);
|
|
1155
|
+
const scope = params.scope || 'unstaged';
|
|
1156
|
+
const { execFileSync } = await import('child_process');
|
|
1157
|
+
// Build git diff args based on scope (using execFileSync to avoid shell injection)
|
|
1158
|
+
let diffArgs;
|
|
1159
|
+
switch (scope) {
|
|
1160
|
+
case 'staged':
|
|
1161
|
+
diffArgs = ['diff', '--staged', '--name-only'];
|
|
1162
|
+
break;
|
|
1163
|
+
case 'all':
|
|
1164
|
+
diffArgs = ['diff', 'HEAD', '--name-only'];
|
|
1165
|
+
break;
|
|
1166
|
+
case 'compare':
|
|
1167
|
+
if (!params.base_ref)
|
|
1168
|
+
return { error: 'base_ref is required for "compare" scope' };
|
|
1169
|
+
diffArgs = ['diff', params.base_ref, '--name-only'];
|
|
1170
|
+
break;
|
|
1171
|
+
case 'unstaged':
|
|
1172
|
+
default:
|
|
1173
|
+
diffArgs = ['diff', '--name-only'];
|
|
1174
|
+
break;
|
|
1175
|
+
}
|
|
1176
|
+
let changedFiles;
|
|
1177
|
+
try {
|
|
1178
|
+
const output = execFileSync('git', diffArgs, { cwd: repo.repoPath, encoding: 'utf-8' });
|
|
1179
|
+
changedFiles = output.trim().split('\n').filter(f => f.length > 0);
|
|
1180
|
+
}
|
|
1181
|
+
catch (err) {
|
|
1182
|
+
return { error: `Git diff failed: ${err.message}` };
|
|
1183
|
+
}
|
|
1184
|
+
if (changedFiles.length === 0) {
|
|
1185
|
+
return {
|
|
1186
|
+
summary: { changed_count: 0, affected_count: 0, risk_level: 'none', message: 'No changes detected.' },
|
|
1187
|
+
changed_symbols: [],
|
|
1188
|
+
affected_processes: [],
|
|
1189
|
+
};
|
|
1190
|
+
}
|
|
1191
|
+
// Map changed files to indexed symbols
|
|
1192
|
+
const changedSymbols = [];
|
|
1193
|
+
for (const file of changedFiles) {
|
|
1194
|
+
const normalizedFile = file.replace(/\\/g, '/');
|
|
1195
|
+
try {
|
|
1196
|
+
const symbols = await executeParameterized(repo.id, `
|
|
1197
|
+
MATCH (n) WHERE n.filePath CONTAINS $filePath
|
|
1198
|
+
RETURN n.id AS id, n.name AS name, labels(n)[0] AS type, n.filePath AS filePath
|
|
1199
|
+
LIMIT 20
|
|
1200
|
+
`, { filePath: normalizedFile });
|
|
1201
|
+
for (const sym of symbols) {
|
|
1202
|
+
changedSymbols.push({
|
|
1203
|
+
id: sym.id || sym[0],
|
|
1204
|
+
name: sym.name || sym[1],
|
|
1205
|
+
type: sym.type || sym[2],
|
|
1206
|
+
filePath: sym.filePath || sym[3],
|
|
1207
|
+
change_type: 'Modified',
|
|
1208
|
+
});
|
|
1209
|
+
}
|
|
1210
|
+
}
|
|
1211
|
+
catch (e) {
|
|
1212
|
+
logQueryError('detect-changes:file-symbols', e);
|
|
1213
|
+
}
|
|
1214
|
+
}
|
|
1215
|
+
// Find affected processes
|
|
1216
|
+
const affectedProcesses = new Map();
|
|
1217
|
+
for (const sym of changedSymbols) {
|
|
1218
|
+
try {
|
|
1219
|
+
const procs = await executeParameterized(repo.id, `
|
|
1220
|
+
MATCH (n {id: $nodeId})-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process)
|
|
1221
|
+
RETURN p.id AS pid, p.heuristicLabel AS label, p.processType AS processType, p.stepCount AS stepCount, r.step AS step
|
|
1222
|
+
`, { nodeId: sym.id });
|
|
1223
|
+
for (const proc of procs) {
|
|
1224
|
+
const pid = proc.pid || proc[0];
|
|
1225
|
+
if (!affectedProcesses.has(pid)) {
|
|
1226
|
+
affectedProcesses.set(pid, {
|
|
1227
|
+
id: pid,
|
|
1228
|
+
name: proc.label || proc[1],
|
|
1229
|
+
process_type: proc.processType || proc[2],
|
|
1230
|
+
step_count: proc.stepCount || proc[3],
|
|
1231
|
+
changed_steps: [],
|
|
1232
|
+
});
|
|
1233
|
+
}
|
|
1234
|
+
affectedProcesses.get(pid).changed_steps.push({
|
|
1235
|
+
symbol: sym.name,
|
|
1236
|
+
step: proc.step || proc[4],
|
|
1237
|
+
});
|
|
1238
|
+
}
|
|
1239
|
+
}
|
|
1240
|
+
catch (e) {
|
|
1241
|
+
logQueryError('detect-changes:process-lookup', e);
|
|
1242
|
+
}
|
|
1243
|
+
}
|
|
1244
|
+
const processCount = affectedProcesses.size;
|
|
1245
|
+
const risk = processCount === 0 ? 'low' : processCount <= 5 ? 'medium' : processCount <= 15 ? 'high' : 'critical';
|
|
1246
|
+
return {
|
|
1247
|
+
summary: {
|
|
1248
|
+
changed_count: changedSymbols.length,
|
|
1249
|
+
affected_count: processCount,
|
|
1250
|
+
changed_files: changedFiles.length,
|
|
1251
|
+
risk_level: risk,
|
|
1252
|
+
},
|
|
1253
|
+
changed_symbols: changedSymbols,
|
|
1254
|
+
affected_processes: Array.from(affectedProcesses.values()),
|
|
1255
|
+
};
|
|
1256
|
+
}
|
|
1257
|
+
/**
|
|
1258
|
+
* Rename tool — multi-file coordinated rename using graph + text search.
|
|
1259
|
+
* Graph refs are tagged "graph" (high confidence).
|
|
1260
|
+
* Additional refs found via text search are tagged "text_search" (lower confidence).
|
|
1261
|
+
*/
|
|
1262
|
+
async rename(repo, params) {
|
|
1263
|
+
await this.ensureInitialized(repo.id);
|
|
1264
|
+
const { new_name, file_path } = params;
|
|
1265
|
+
const dry_run = params.dry_run ?? true;
|
|
1266
|
+
if (!params.symbol_name && !params.symbol_uid) {
|
|
1267
|
+
return { error: 'Either symbol_name or symbol_uid is required.' };
|
|
1268
|
+
}
|
|
1269
|
+
/** Guard: ensure a file path resolves within the repo root (prevents path traversal) */
|
|
1270
|
+
const assertSafePath = (filePath) => {
|
|
1271
|
+
const full = path.resolve(repo.repoPath, filePath);
|
|
1272
|
+
if (!full.startsWith(repo.repoPath + path.sep) && full !== repo.repoPath) {
|
|
1273
|
+
throw new Error(`Path traversal blocked: ${filePath}`);
|
|
1274
|
+
}
|
|
1275
|
+
return full;
|
|
1276
|
+
};
|
|
1277
|
+
// Step 1: Find the target symbol (reuse context's lookup)
|
|
1278
|
+
const lookupResult = await this.context(repo, {
|
|
1279
|
+
name: params.symbol_name,
|
|
1280
|
+
uid: params.symbol_uid,
|
|
1281
|
+
file_path,
|
|
1282
|
+
});
|
|
1283
|
+
if (lookupResult.status === 'ambiguous') {
|
|
1284
|
+
return lookupResult; // pass disambiguation through
|
|
1285
|
+
}
|
|
1286
|
+
if (lookupResult.error) {
|
|
1287
|
+
return lookupResult;
|
|
1288
|
+
}
|
|
1289
|
+
const sym = lookupResult.symbol;
|
|
1290
|
+
const oldName = sym.name;
|
|
1291
|
+
if (oldName === new_name) {
|
|
1292
|
+
return { error: 'New name is the same as the current name.' };
|
|
1293
|
+
}
|
|
1294
|
+
// Step 2: Collect edits from graph (high confidence)
|
|
1295
|
+
const changes = new Map();
|
|
1296
|
+
const addEdit = (filePath, line, oldText, newText, confidence) => {
|
|
1297
|
+
if (!changes.has(filePath)) {
|
|
1298
|
+
changes.set(filePath, { file_path: filePath, edits: [] });
|
|
1299
|
+
}
|
|
1300
|
+
changes.get(filePath).edits.push({ line, old_text: oldText, new_text: newText, confidence });
|
|
1301
|
+
};
|
|
1302
|
+
// The definition itself
|
|
1303
|
+
if (sym.filePath && sym.startLine) {
|
|
1304
|
+
try {
|
|
1305
|
+
const content = await fs.readFile(assertSafePath(sym.filePath), 'utf-8');
|
|
1306
|
+
const lines = content.split('\n');
|
|
1307
|
+
const lineIdx = sym.startLine - 1;
|
|
1308
|
+
if (lineIdx >= 0 && lineIdx < lines.length && lines[lineIdx].includes(oldName)) {
|
|
1309
|
+
const defRegex = new RegExp(`\\b${oldName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\b`, 'g');
|
|
1310
|
+
addEdit(sym.filePath, sym.startLine, lines[lineIdx].trim(), lines[lineIdx].replace(defRegex, new_name).trim(), 'graph');
|
|
1311
|
+
}
|
|
1312
|
+
}
|
|
1313
|
+
catch (e) {
|
|
1314
|
+
logQueryError('rename:read-definition', e);
|
|
1315
|
+
}
|
|
1316
|
+
}
|
|
1317
|
+
// All incoming refs from graph (callers, importers, etc.)
|
|
1318
|
+
const allIncoming = [
|
|
1319
|
+
...(lookupResult.incoming.calls || []),
|
|
1320
|
+
...(lookupResult.incoming.imports || []),
|
|
1321
|
+
...(lookupResult.incoming.extends || []),
|
|
1322
|
+
...(lookupResult.incoming.implements || []),
|
|
1323
|
+
];
|
|
1324
|
+
let graphEdits = changes.size > 0 ? 1 : 0; // count definition edit
|
|
1325
|
+
for (const ref of allIncoming) {
|
|
1326
|
+
if (!ref.filePath)
|
|
1327
|
+
continue;
|
|
1328
|
+
try {
|
|
1329
|
+
const content = await fs.readFile(assertSafePath(ref.filePath), 'utf-8');
|
|
1330
|
+
const lines = content.split('\n');
|
|
1331
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1332
|
+
if (lines[i].includes(oldName)) {
|
|
1333
|
+
addEdit(ref.filePath, i + 1, lines[i].trim(), lines[i].replace(new RegExp(`\\b${oldName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\b`, 'g'), new_name).trim(), 'graph');
|
|
1334
|
+
graphEdits++;
|
|
1335
|
+
break; // one edit per file from graph refs
|
|
1336
|
+
}
|
|
1337
|
+
}
|
|
1338
|
+
}
|
|
1339
|
+
catch (e) {
|
|
1340
|
+
logQueryError('rename:read-ref', e);
|
|
1341
|
+
}
|
|
1342
|
+
}
|
|
1343
|
+
// Step 3: Text search for refs the graph might have missed
|
|
1344
|
+
let astSearchEdits = 0;
|
|
1345
|
+
const graphFiles = new Set([sym.filePath, ...allIncoming.map(r => r.filePath)].filter(Boolean));
|
|
1346
|
+
// Simple text search across the repo for the old name (in files not already covered by graph)
|
|
1347
|
+
try {
|
|
1348
|
+
const { execFileSync } = await import('child_process');
|
|
1349
|
+
const rgArgs = [
|
|
1350
|
+
'-l',
|
|
1351
|
+
'--type-add', 'code:*.{ts,tsx,js,jsx,py,go,rs,java,c,h,cpp,cc,cxx,hpp,hxx,hh,cs,php,swift}',
|
|
1352
|
+
'-t', 'code',
|
|
1353
|
+
`\\b${oldName}\\b`,
|
|
1354
|
+
'.',
|
|
1355
|
+
];
|
|
1356
|
+
const output = execFileSync('rg', rgArgs, { cwd: repo.repoPath, encoding: 'utf-8', timeout: 5000 });
|
|
1357
|
+
const files = output.trim().split('\n').filter(f => f.length > 0);
|
|
1358
|
+
for (const file of files) {
|
|
1359
|
+
const normalizedFile = file.replace(/\\/g, '/').replace(/^\.\//, '');
|
|
1360
|
+
if (graphFiles.has(normalizedFile))
|
|
1361
|
+
continue; // already covered by graph
|
|
1362
|
+
try {
|
|
1363
|
+
const content = await fs.readFile(assertSafePath(normalizedFile), 'utf-8');
|
|
1364
|
+
const lines = content.split('\n');
|
|
1365
|
+
const regex = new RegExp(`\\b${oldName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\b`, 'g');
|
|
1366
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1367
|
+
regex.lastIndex = 0;
|
|
1368
|
+
if (regex.test(lines[i])) {
|
|
1369
|
+
regex.lastIndex = 0;
|
|
1370
|
+
addEdit(normalizedFile, i + 1, lines[i].trim(), lines[i].replace(regex, new_name).trim(), 'text_search');
|
|
1371
|
+
astSearchEdits++;
|
|
1372
|
+
}
|
|
1373
|
+
}
|
|
1374
|
+
}
|
|
1375
|
+
catch (e) {
|
|
1376
|
+
logQueryError('rename:text-search-read', e);
|
|
1377
|
+
}
|
|
1378
|
+
}
|
|
1379
|
+
}
|
|
1380
|
+
catch (e) {
|
|
1381
|
+
logQueryError('rename:ripgrep', e);
|
|
1382
|
+
}
|
|
1383
|
+
// Step 4: Apply or preview
|
|
1384
|
+
const allChanges = Array.from(changes.values());
|
|
1385
|
+
const totalEdits = allChanges.reduce((sum, c) => sum + c.edits.length, 0);
|
|
1386
|
+
if (!dry_run) {
|
|
1387
|
+
// Apply edits to files
|
|
1388
|
+
for (const change of allChanges) {
|
|
1389
|
+
try {
|
|
1390
|
+
const fullPath = assertSafePath(change.file_path);
|
|
1391
|
+
let content = await fs.readFile(fullPath, 'utf-8');
|
|
1392
|
+
const regex = new RegExp(`\\b${oldName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\b`, 'g');
|
|
1393
|
+
content = content.replace(regex, new_name);
|
|
1394
|
+
await fs.writeFile(fullPath, content, 'utf-8');
|
|
1395
|
+
}
|
|
1396
|
+
catch (e) {
|
|
1397
|
+
logQueryError('rename:apply-edit', e);
|
|
1398
|
+
}
|
|
1399
|
+
}
|
|
1400
|
+
}
|
|
1401
|
+
return {
|
|
1402
|
+
status: 'success',
|
|
1403
|
+
old_name: oldName,
|
|
1404
|
+
new_name,
|
|
1405
|
+
files_affected: allChanges.length,
|
|
1406
|
+
total_edits: totalEdits,
|
|
1407
|
+
graph_edits: graphEdits,
|
|
1408
|
+
text_search_edits: astSearchEdits,
|
|
1409
|
+
changes: allChanges,
|
|
1410
|
+
applied: !dry_run,
|
|
1411
|
+
};
|
|
1412
|
+
}
|
|
1413
|
+
async impact(repo, params) {
|
|
1414
|
+
try {
|
|
1415
|
+
return await this._impactImpl(repo, params);
|
|
1416
|
+
}
|
|
1417
|
+
catch (err) {
|
|
1418
|
+
// Return structured error instead of crashing (#321)
|
|
1419
|
+
return {
|
|
1420
|
+
error: (err instanceof Error ? err.message : String(err)) || 'Impact analysis failed',
|
|
1421
|
+
target: { name: params.target },
|
|
1422
|
+
direction: params.direction,
|
|
1423
|
+
impactedCount: 0,
|
|
1424
|
+
risk: 'UNKNOWN',
|
|
1425
|
+
suggestion: 'The graph query failed — try gitnexus context <symbol> as a fallback',
|
|
1426
|
+
};
|
|
1427
|
+
}
|
|
1428
|
+
}
|
|
1429
|
+
async _impactImpl(repo, params) {
|
|
1430
|
+
await this.ensureInitialized(repo.id);
|
|
1431
|
+
const { target, direction } = params;
|
|
1432
|
+
const maxDepth = params.maxDepth || 3;
|
|
1433
|
+
const rawRelTypes = params.relationTypes && params.relationTypes.length > 0
|
|
1434
|
+
? params.relationTypes.filter(t => VALID_RELATION_TYPES.has(t))
|
|
1435
|
+
: ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS'];
|
|
1436
|
+
const relationTypes = rawRelTypes.length > 0 ? rawRelTypes : ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS'];
|
|
1437
|
+
const includeTests = params.includeTests ?? false;
|
|
1438
|
+
const minConfidence = params.minConfidence ?? 0;
|
|
1439
|
+
const relTypeFilter = relationTypes.map(t => `'${t}'`).join(', ');
|
|
1440
|
+
const confidenceFilter = minConfidence > 0 ? ` AND r.confidence >= ${minConfidence}` : '';
|
|
1441
|
+
const targets = await executeParameterized(repo.id, `
|
|
1442
|
+
MATCH (n)
|
|
1443
|
+
WHERE n.name = $targetName
|
|
1444
|
+
RETURN n.id AS id, n.name AS name, labels(n)[0] AS type, n.filePath AS filePath
|
|
1445
|
+
LIMIT 1
|
|
1446
|
+
`, { targetName: target });
|
|
1447
|
+
if (targets.length === 0)
|
|
1448
|
+
return { error: `Target '${target}' not found` };
|
|
1449
|
+
const sym = targets[0];
|
|
1450
|
+
const symId = sym.id || sym[0];
|
|
1451
|
+
const impacted = [];
|
|
1452
|
+
const visited = new Set([symId]);
|
|
1453
|
+
let frontier = [symId];
|
|
1454
|
+
let traversalComplete = true;
|
|
1455
|
+
for (let depth = 1; depth <= maxDepth && frontier.length > 0; depth++) {
|
|
1456
|
+
const nextFrontier = [];
|
|
1457
|
+
// Batch frontier nodes into a single Cypher query per depth level
|
|
1458
|
+
const idList = frontier.map(id => `'${id.replace(/'/g, "''")}'`).join(', ');
|
|
1459
|
+
const query = direction === 'upstream'
|
|
1460
|
+
? `MATCH (caller)-[r:CodeRelation]->(n) WHERE n.id IN [${idList}] AND r.type IN [${relTypeFilter}]${confidenceFilter} RETURN n.id AS sourceId, caller.id AS id, caller.name AS name, labels(caller)[0] AS type, caller.filePath AS filePath, r.type AS relType, r.confidence AS confidence`
|
|
1461
|
+
: `MATCH (n)-[r:CodeRelation]->(callee) WHERE n.id IN [${idList}] AND r.type IN [${relTypeFilter}]${confidenceFilter} RETURN n.id AS sourceId, callee.id AS id, callee.name AS name, labels(callee)[0] AS type, callee.filePath AS filePath, r.type AS relType, r.confidence AS confidence`;
|
|
1462
|
+
try {
|
|
1463
|
+
const related = await executeQuery(repo.id, query);
|
|
1464
|
+
for (const rel of related) {
|
|
1465
|
+
const relId = rel.id || rel[1];
|
|
1466
|
+
const filePath = rel.filePath || rel[4] || '';
|
|
1467
|
+
if (!includeTests && isTestFilePath(filePath))
|
|
1468
|
+
continue;
|
|
1469
|
+
if (!visited.has(relId)) {
|
|
1470
|
+
visited.add(relId);
|
|
1471
|
+
nextFrontier.push(relId);
|
|
1472
|
+
impacted.push({
|
|
1473
|
+
depth,
|
|
1474
|
+
id: relId,
|
|
1475
|
+
name: rel.name || rel[2],
|
|
1476
|
+
type: rel.type || rel[3],
|
|
1477
|
+
filePath,
|
|
1478
|
+
relationType: rel.relType || rel[5],
|
|
1479
|
+
confidence: rel.confidence || rel[6] || 1.0,
|
|
1480
|
+
});
|
|
1481
|
+
}
|
|
1482
|
+
}
|
|
1483
|
+
}
|
|
1484
|
+
catch (e) {
|
|
1485
|
+
logQueryError('impact:depth-traversal', e);
|
|
1486
|
+
// Break out of depth loop on query failure but return partial results
|
|
1487
|
+
// collected so far, rather than silently swallowing the error (#321)
|
|
1488
|
+
traversalComplete = false;
|
|
1489
|
+
break;
|
|
1490
|
+
}
|
|
1491
|
+
frontier = nextFrontier;
|
|
1492
|
+
}
|
|
1493
|
+
const grouped = {};
|
|
1494
|
+
for (const item of impacted) {
|
|
1495
|
+
if (!grouped[item.depth])
|
|
1496
|
+
grouped[item.depth] = [];
|
|
1497
|
+
grouped[item.depth].push(item);
|
|
1498
|
+
}
|
|
1499
|
+
// ── Enrichment: affected processes, modules, risk ──────────────
|
|
1500
|
+
const directCount = (grouped[1] || []).length;
|
|
1501
|
+
let affectedProcesses = [];
|
|
1502
|
+
let affectedModules = [];
|
|
1503
|
+
if (impacted.length > 0) {
|
|
1504
|
+
const allIds = impacted.map(i => `'${i.id.replace(/'/g, "''")}'`).join(', ');
|
|
1505
|
+
const d1Ids = (grouped[1] || []).map((i) => `'${i.id.replace(/'/g, "''")}'`).join(', ');
|
|
1506
|
+
// Affected processes: which execution flows are broken and at which step
|
|
1507
|
+
const [processRows, moduleRows, directModuleRows] = await Promise.all([
|
|
1508
|
+
executeQuery(repo.id, `
|
|
1509
|
+
MATCH (s)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process)
|
|
1510
|
+
WHERE s.id IN [${allIds}]
|
|
1511
|
+
RETURN p.heuristicLabel AS name, COUNT(DISTINCT s.id) AS hits, MIN(r.step) AS minStep, p.stepCount AS stepCount
|
|
1512
|
+
ORDER BY hits DESC
|
|
1513
|
+
LIMIT 20
|
|
1514
|
+
`).catch(() => []),
|
|
1515
|
+
executeQuery(repo.id, `
|
|
1516
|
+
MATCH (s)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
1517
|
+
WHERE s.id IN [${allIds}]
|
|
1518
|
+
RETURN c.heuristicLabel AS name, COUNT(DISTINCT s.id) AS hits
|
|
1519
|
+
ORDER BY hits DESC
|
|
1520
|
+
LIMIT 20
|
|
1521
|
+
`).catch(() => []),
|
|
1522
|
+
d1Ids ? executeQuery(repo.id, `
|
|
1523
|
+
MATCH (s)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
1524
|
+
WHERE s.id IN [${d1Ids}]
|
|
1525
|
+
RETURN DISTINCT c.heuristicLabel AS name
|
|
1526
|
+
`).catch(() => []) : Promise.resolve([]),
|
|
1527
|
+
]);
|
|
1528
|
+
affectedProcesses = processRows.map((r) => ({
|
|
1529
|
+
name: r.name || r[0],
|
|
1530
|
+
hits: r.hits || r[1],
|
|
1531
|
+
broken_at_step: r.minStep ?? r[2],
|
|
1532
|
+
step_count: r.stepCount ?? r[3],
|
|
1533
|
+
}));
|
|
1534
|
+
const directModuleSet = new Set(directModuleRows.map((r) => r.name || r[0]));
|
|
1535
|
+
affectedModules = moduleRows.map((r) => {
|
|
1536
|
+
const name = r.name || r[0];
|
|
1537
|
+
return {
|
|
1538
|
+
name,
|
|
1539
|
+
hits: r.hits || r[1],
|
|
1540
|
+
impact: directModuleSet.has(name) ? 'direct' : 'indirect',
|
|
1541
|
+
};
|
|
1542
|
+
});
|
|
1543
|
+
}
|
|
1544
|
+
// Risk scoring
|
|
1545
|
+
const processCount = affectedProcesses.length;
|
|
1546
|
+
const moduleCount = affectedModules.length;
|
|
1547
|
+
let risk = 'LOW';
|
|
1548
|
+
if (directCount >= 30 || processCount >= 5 || moduleCount >= 5 || impacted.length >= 200) {
|
|
1549
|
+
risk = 'CRITICAL';
|
|
1550
|
+
}
|
|
1551
|
+
else if (directCount >= 15 || processCount >= 3 || moduleCount >= 3 || impacted.length >= 100) {
|
|
1552
|
+
risk = 'HIGH';
|
|
1553
|
+
}
|
|
1554
|
+
else if (directCount >= 5 || impacted.length >= 30) {
|
|
1555
|
+
risk = 'MEDIUM';
|
|
1556
|
+
}
|
|
1557
|
+
return {
|
|
1558
|
+
target: {
|
|
1559
|
+
id: symId,
|
|
1560
|
+
name: sym.name || sym[1],
|
|
1561
|
+
type: sym.type || sym[2],
|
|
1562
|
+
filePath: sym.filePath || sym[3],
|
|
1563
|
+
},
|
|
1564
|
+
direction,
|
|
1565
|
+
impactedCount: impacted.length,
|
|
1566
|
+
risk,
|
|
1567
|
+
...(!traversalComplete && { partial: true }),
|
|
1568
|
+
summary: {
|
|
1569
|
+
direct: directCount,
|
|
1570
|
+
processes_affected: processCount,
|
|
1571
|
+
modules_affected: moduleCount,
|
|
1572
|
+
},
|
|
1573
|
+
affected_processes: affectedProcesses,
|
|
1574
|
+
affected_modules: affectedModules,
|
|
1575
|
+
byDepth: grouped,
|
|
1576
|
+
};
|
|
1577
|
+
}
|
|
1578
|
+
// ─── Direct Graph Queries (for resources.ts) ────────────────────
|
|
1579
|
+
/**
|
|
1580
|
+
* Query clusters (communities) directly from graph.
|
|
1581
|
+
* Used by getClustersResource — avoids legacy overview() dispatch.
|
|
1582
|
+
*/
|
|
1583
|
+
async queryClusters(repoName, limit = 100) {
|
|
1584
|
+
const repo = await this.resolveRepo(repoName);
|
|
1585
|
+
await this.ensureInitialized(repo.id);
|
|
1586
|
+
try {
|
|
1587
|
+
const rawLimit = Math.max(limit * 5, 200);
|
|
1588
|
+
const clusters = await executeQuery(repo.id, `
|
|
1589
|
+
MATCH (c:Community)
|
|
1590
|
+
RETURN c.id AS id, c.label AS label, c.heuristicLabel AS heuristicLabel, c.cohesion AS cohesion, c.symbolCount AS symbolCount
|
|
1591
|
+
ORDER BY c.symbolCount DESC
|
|
1592
|
+
LIMIT ${rawLimit}
|
|
1593
|
+
`);
|
|
1594
|
+
const rawClusters = clusters.map((c) => ({
|
|
1595
|
+
id: c.id || c[0],
|
|
1596
|
+
label: c.label || c[1],
|
|
1597
|
+
heuristicLabel: c.heuristicLabel || c[2],
|
|
1598
|
+
cohesion: c.cohesion || c[3],
|
|
1599
|
+
symbolCount: c.symbolCount || c[4],
|
|
1600
|
+
}));
|
|
1601
|
+
return { clusters: this.aggregateClusters(rawClusters).slice(0, limit) };
|
|
1602
|
+
}
|
|
1603
|
+
catch {
|
|
1604
|
+
return { clusters: [] };
|
|
1605
|
+
}
|
|
1606
|
+
}
|
|
1607
|
+
/**
|
|
1608
|
+
* Query processes directly from graph.
|
|
1609
|
+
* Used by getProcessesResource — avoids legacy overview() dispatch.
|
|
1610
|
+
*/
|
|
1611
|
+
async queryProcesses(repoName, limit = 50) {
|
|
1612
|
+
const repo = await this.resolveRepo(repoName);
|
|
1613
|
+
await this.ensureInitialized(repo.id);
|
|
1614
|
+
try {
|
|
1615
|
+
const processes = await executeQuery(repo.id, `
|
|
1616
|
+
MATCH (p:Process)
|
|
1617
|
+
RETURN p.id AS id, p.label AS label, p.heuristicLabel AS heuristicLabel, p.processType AS processType, p.stepCount AS stepCount
|
|
1618
|
+
ORDER BY p.stepCount DESC
|
|
1619
|
+
LIMIT ${limit}
|
|
1620
|
+
`);
|
|
1621
|
+
return {
|
|
1622
|
+
processes: processes.map((p) => ({
|
|
1623
|
+
id: p.id || p[0],
|
|
1624
|
+
label: p.label || p[1],
|
|
1625
|
+
heuristicLabel: p.heuristicLabel || p[2],
|
|
1626
|
+
processType: p.processType || p[3],
|
|
1627
|
+
stepCount: p.stepCount || p[4],
|
|
1628
|
+
})),
|
|
1629
|
+
};
|
|
1630
|
+
}
|
|
1631
|
+
catch {
|
|
1632
|
+
return { processes: [] };
|
|
1633
|
+
}
|
|
1634
|
+
}
|
|
1635
|
+
/**
|
|
1636
|
+
* Query cluster detail (members) directly from graph.
|
|
1637
|
+
* Used by getClusterDetailResource.
|
|
1638
|
+
*/
|
|
1639
|
+
async queryClusterDetail(name, repoName) {
|
|
1640
|
+
const repo = await this.resolveRepo(repoName);
|
|
1641
|
+
await this.ensureInitialized(repo.id);
|
|
1642
|
+
const clusters = await executeParameterized(repo.id, `
|
|
1643
|
+
MATCH (c:Community)
|
|
1644
|
+
WHERE c.label = $clusterName OR c.heuristicLabel = $clusterName
|
|
1645
|
+
RETURN c.id AS id, c.label AS label, c.heuristicLabel AS heuristicLabel, c.cohesion AS cohesion, c.symbolCount AS symbolCount
|
|
1646
|
+
`, { clusterName: name });
|
|
1647
|
+
if (clusters.length === 0)
|
|
1648
|
+
return { error: `Cluster '${name}' not found` };
|
|
1649
|
+
const rawClusters = clusters.map((c) => ({
|
|
1650
|
+
id: c.id || c[0], label: c.label || c[1], heuristicLabel: c.heuristicLabel || c[2],
|
|
1651
|
+
cohesion: c.cohesion || c[3], symbolCount: c.symbolCount || c[4],
|
|
1652
|
+
}));
|
|
1653
|
+
let totalSymbols = 0, weightedCohesion = 0;
|
|
1654
|
+
for (const c of rawClusters) {
|
|
1655
|
+
const s = c.symbolCount || 0;
|
|
1656
|
+
totalSymbols += s;
|
|
1657
|
+
weightedCohesion += (c.cohesion || 0) * s;
|
|
1658
|
+
}
|
|
1659
|
+
const members = await executeParameterized(repo.id, `
|
|
1660
|
+
MATCH (n)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
1661
|
+
WHERE c.label = $clusterName OR c.heuristicLabel = $clusterName
|
|
1662
|
+
RETURN DISTINCT n.name AS name, labels(n)[0] AS type, n.filePath AS filePath
|
|
1663
|
+
LIMIT 30
|
|
1664
|
+
`, { clusterName: name });
|
|
1665
|
+
return {
|
|
1666
|
+
cluster: {
|
|
1667
|
+
id: rawClusters[0].id,
|
|
1668
|
+
label: rawClusters[0].heuristicLabel || rawClusters[0].label,
|
|
1669
|
+
heuristicLabel: rawClusters[0].heuristicLabel || rawClusters[0].label,
|
|
1670
|
+
cohesion: totalSymbols > 0 ? weightedCohesion / totalSymbols : 0,
|
|
1671
|
+
symbolCount: totalSymbols,
|
|
1672
|
+
subCommunities: rawClusters.length,
|
|
1673
|
+
},
|
|
1674
|
+
members: members.map((m) => ({
|
|
1675
|
+
name: m.name || m[0], type: m.type || m[1], filePath: m.filePath || m[2],
|
|
1676
|
+
})),
|
|
1677
|
+
};
|
|
1678
|
+
}
|
|
1679
|
+
/**
|
|
1680
|
+
* Query process detail (steps) directly from graph.
|
|
1681
|
+
* Used by getProcessDetailResource.
|
|
1682
|
+
*/
|
|
1683
|
+
async queryProcessDetail(name, repoName) {
|
|
1684
|
+
const repo = await this.resolveRepo(repoName);
|
|
1685
|
+
await this.ensureInitialized(repo.id);
|
|
1686
|
+
const processes = await executeParameterized(repo.id, `
|
|
1687
|
+
MATCH (p:Process)
|
|
1688
|
+
WHERE p.label = $processName OR p.heuristicLabel = $processName
|
|
1689
|
+
RETURN p.id AS id, p.label AS label, p.heuristicLabel AS heuristicLabel, p.processType AS processType, p.stepCount AS stepCount
|
|
1690
|
+
LIMIT 1
|
|
1691
|
+
`, { processName: name });
|
|
1692
|
+
if (processes.length === 0)
|
|
1693
|
+
return { error: `Process '${name}' not found` };
|
|
1694
|
+
const proc = processes[0];
|
|
1695
|
+
const procId = proc.id || proc[0];
|
|
1696
|
+
const steps = await executeParameterized(repo.id, `
|
|
1697
|
+
MATCH (n)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p {id: $procId})
|
|
1698
|
+
RETURN n.name AS name, labels(n)[0] AS type, n.filePath AS filePath, r.step AS step
|
|
1699
|
+
ORDER BY r.step
|
|
1700
|
+
`, { procId });
|
|
1701
|
+
return {
|
|
1702
|
+
process: {
|
|
1703
|
+
id: procId, label: proc.label || proc[1], heuristicLabel: proc.heuristicLabel || proc[2],
|
|
1704
|
+
processType: proc.processType || proc[3], stepCount: proc.stepCount || proc[4],
|
|
1705
|
+
},
|
|
1706
|
+
steps: steps.map((s) => ({
|
|
1707
|
+
step: s.step || s[3], name: s.name || s[0], type: s.type || s[1], filePath: s.filePath || s[2],
|
|
1708
|
+
})),
|
|
1709
|
+
};
|
|
1710
|
+
}
|
|
1711
|
+
async disconnect() {
|
|
1712
|
+
await closeLbug(); // close all connections
|
|
1713
|
+
// Note: we intentionally do NOT call disposeEmbedder() here.
|
|
1714
|
+
// ONNX Runtime's native cleanup segfaults on macOS and some Linux configs,
|
|
1715
|
+
// and importing the embedder module on Node v24+ crashes if onnxruntime
|
|
1716
|
+
// was never loaded during the session. Since process.exit(0) follows
|
|
1717
|
+
// immediately after disconnect(), the OS reclaims everything. See #38, #89.
|
|
1718
|
+
this.repos.clear();
|
|
1719
|
+
this.contextCache.clear();
|
|
1720
|
+
this.initializedRepos.clear();
|
|
1721
|
+
}
|
|
1722
|
+
}
|