@duytransipher/gitnexus 1.1.2 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/analyze.js +9 -0
- package/dist/cli/setup.js +15 -0
- package/dist/core/augmentation/engine.js +3 -2
- package/dist/core/graph/types.d.ts +1 -1
- package/dist/core/ingestion/workers/parse-worker.js +31 -1
- package/dist/core/lbug/csv-generator.js +1 -1
- package/dist/core/lbug/lbug-adapter.js +1 -1
- package/dist/core/lbug/schema.d.ts +3 -2
- package/dist/core/lbug/schema.js +13 -1
- package/dist/core/wiki/generator.d.ts +6 -0
- package/dist/core/wiki/generator.js +93 -1
- package/dist/core/wiki/graph-queries.js +7 -6
- package/dist/core/wiki/llm-client.d.ts +2 -0
- package/dist/core/wiki/llm-client.js +152 -0
- package/dist/mcp/local/local-backend.js +46 -40
- package/dist/mcp/resources.js +1 -1
- package/dist/unreal/blueprint-ingestion.d.ts +18 -0
- package/dist/unreal/blueprint-ingestion.js +200 -0
- package/dist/unreal/types.d.ts +27 -0
- package/package.json +1 -1
- package/scripts/setup-unreal-gitnexus.ps1 +14 -0
package/dist/cli/analyze.js
CHANGED
|
@@ -182,6 +182,15 @@ export const analyzeCommand = async (inputPath, options) => {
|
|
|
182
182
|
const scaled = Math.round(progress.percent * 0.6);
|
|
183
183
|
updateBar(scaled, phaseLabel);
|
|
184
184
|
});
|
|
185
|
+
// ── Phase 1.5: Blueprint Ingestion (optional) ────────────────────────
|
|
186
|
+
try {
|
|
187
|
+
const { ingestBlueprintsIntoGraph } = await import('../unreal/blueprint-ingestion.js');
|
|
188
|
+
const bpResult = await ingestBlueprintsIntoGraph(pipelineResult.graph, storagePath);
|
|
189
|
+
if (bpResult.nodesAdded > 0) {
|
|
190
|
+
updateBar(61, `Indexed ${bpResult.nodesAdded} Blueprints (${bpResult.edgesAdded} edges)`);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
catch { /* non-fatal — Unreal integration is optional */ }
|
|
185
194
|
// ── Phase 2: LadybugDB (60–85%) ──────────────────────────────────────
|
|
186
195
|
updateBar(60, 'Loading into LadybugDB...');
|
|
187
196
|
await closeLbug();
|
package/dist/cli/setup.js
CHANGED
|
@@ -366,6 +366,21 @@ function resolveEditorCmd(explicitPath, engineAssociation) {
|
|
|
366
366
|
}
|
|
367
367
|
}
|
|
368
368
|
catch { /* registry key not found */ }
|
|
369
|
+
// Try LauncherInstalled.dat (Epic Games Launcher writes this for all engine installs)
|
|
370
|
+
try {
|
|
371
|
+
const launcherDat = path.join(process.env.LOCALAPPDATA || path.join(os.homedir(), 'AppData', 'Local'), 'EpicGames', 'UnrealEngineLauncher', 'LauncherInstalled.dat');
|
|
372
|
+
const launcher = JSON.parse(fsSync.readFileSync(launcherDat, 'utf-8'));
|
|
373
|
+
if (Array.isArray(launcher.InstallationList)) {
|
|
374
|
+
for (const entry of launcher.InstallationList) {
|
|
375
|
+
if (entry.AppName === `UE_${engineAssociation}` || entry.AppName === engineAssociation) {
|
|
376
|
+
const found = tryCandidates(entry.InstallLocation);
|
|
377
|
+
if (found)
|
|
378
|
+
return found;
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
catch { /* LauncherInstalled.dat not found or unparseable */ }
|
|
369
384
|
// Try standard install path
|
|
370
385
|
const found = tryCandidates(path.join('C:\\Program Files\\Epic Games', `UE_${engineAssociation}`));
|
|
371
386
|
if (found)
|
|
@@ -95,20 +95,21 @@ export async function augment(pattern, cwd) {
|
|
|
95
95
|
return '';
|
|
96
96
|
// Step 2: Map BM25 file results to symbols
|
|
97
97
|
const symbolMatches = [];
|
|
98
|
+
const extractLabel = (v) => Array.isArray(v) ? v[0] : typeof v === 'string' ? v : undefined;
|
|
98
99
|
for (const result of bm25Results.slice(0, 5)) {
|
|
99
100
|
const escaped = result.filePath.replace(/'/g, "''");
|
|
100
101
|
try {
|
|
101
102
|
const symbols = await executeQuery(repoId, `
|
|
102
103
|
MATCH (n) WHERE n.filePath = '${escaped}'
|
|
103
104
|
AND n.name CONTAINS '${pattern.replace(/'/g, "''").split(/\s+/)[0]}'
|
|
104
|
-
RETURN n.id AS id, n.name AS name, labels(n)
|
|
105
|
+
RETURN n.id AS id, n.name AS name, labels(n) AS type, n.filePath AS filePath
|
|
105
106
|
LIMIT 3
|
|
106
107
|
`);
|
|
107
108
|
for (const sym of symbols) {
|
|
108
109
|
symbolMatches.push({
|
|
109
110
|
nodeId: sym.id || sym[0],
|
|
110
111
|
name: sym.name || sym[1],
|
|
111
|
-
type: sym.type || sym[2],
|
|
112
|
+
type: extractLabel(sym.type || sym[2]),
|
|
112
113
|
filePath: sym.filePath || sym[3],
|
|
113
114
|
score: result.score,
|
|
114
115
|
});
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export type NodeLabel = 'Project' | 'Package' | 'Module' | 'Folder' | 'File' | 'Class' | 'Function' | 'Method' | 'Variable' | 'Interface' | 'Enum' | 'Decorator' | 'Import' | 'Type' | 'CodeElement' | 'Community' | 'Process' | 'Struct' | 'Macro' | 'Typedef' | 'Union' | 'Namespace' | 'Trait' | 'Impl' | 'TypeAlias' | 'Const' | 'Static' | 'Property' | 'Record' | 'Delegate' | 'Annotation' | 'Constructor' | 'Template';
|
|
1
|
+
export type NodeLabel = 'Project' | 'Package' | 'Module' | 'Folder' | 'File' | 'Class' | 'Function' | 'Method' | 'Variable' | 'Interface' | 'Enum' | 'Decorator' | 'Import' | 'Type' | 'CodeElement' | 'Community' | 'Process' | 'Struct' | 'Macro' | 'Typedef' | 'Union' | 'Namespace' | 'Trait' | 'Impl' | 'TypeAlias' | 'Const' | 'Static' | 'Property' | 'Record' | 'Delegate' | 'Annotation' | 'Constructor' | 'Template' | 'Blueprint';
|
|
2
2
|
import { SupportedLanguages } from '../../config/supported-languages.js';
|
|
3
3
|
export type NodeProperties = {
|
|
4
4
|
name: string;
|
|
@@ -1024,7 +1024,37 @@ const processFileGroup = (files, language, queryString, result, onFileProcessed)
|
|
|
1024
1024
|
// Compute enclosing class for Method/Constructor/Property/Function — used for both ownerId and HAS_METHOD
|
|
1025
1025
|
// Function is included because Kotlin/Rust/Python capture class methods as Function nodes
|
|
1026
1026
|
const needsOwner = nodeLabel === 'Method' || nodeLabel === 'Constructor' || nodeLabel === 'Property' || nodeLabel === 'Function';
|
|
1027
|
-
|
|
1027
|
+
let enclosingClassId = needsOwner ? findEnclosingClassId(nameNode || definitionNode, file.path) : null;
|
|
1028
|
+
// C/C++ out-of-line methods: extract class from qualified_identifier scope.
|
|
1029
|
+
// e.g. void Duck::speak() → scope class "Duck". Since the class is declared
|
|
1030
|
+
// in a header (different file), AST walk can't find it. Search accumulated
|
|
1031
|
+
// batch nodes for a matching Class node instead.
|
|
1032
|
+
if (!enclosingClassId && needsOwner && nodeLabel === 'Method'
|
|
1033
|
+
&& (language === SupportedLanguages.CPlusPlus || language === SupportedLanguages.C)) {
|
|
1034
|
+
const qualifiedId = nameNode?.parent;
|
|
1035
|
+
if (qualifiedId?.type === 'qualified_identifier') {
|
|
1036
|
+
const scopeNode = qualifiedId.childForFieldName?.('scope')
|
|
1037
|
+
?? qualifiedId.children?.find((c) => (c.type === 'namespace_identifier' || c.type === 'identifier' || c.type === 'type_identifier')
|
|
1038
|
+
&& c !== nameNode);
|
|
1039
|
+
const scopeClassName = scopeNode?.text;
|
|
1040
|
+
if (scopeClassName) {
|
|
1041
|
+
// Prefer the Class node defined in the canonical header (ClassName.h)
|
|
1042
|
+
const matchingClasses = result.nodes.filter(n => n.label === 'Class' && n.properties.name === scopeClassName);
|
|
1043
|
+
if (matchingClasses.length === 1) {
|
|
1044
|
+
enclosingClassId = matchingClasses[0].id;
|
|
1045
|
+
}
|
|
1046
|
+
else if (matchingClasses.length > 1) {
|
|
1047
|
+
// Prefer the class in a header whose base name matches the class name
|
|
1048
|
+
const preferred = matchingClasses.find(n => {
|
|
1049
|
+
const fp = (n.properties.filePath || '').toLowerCase();
|
|
1050
|
+
return fp.endsWith(`${scopeClassName.toLowerCase()}.h`)
|
|
1051
|
+
|| fp.endsWith(`${scopeClassName.toLowerCase()}.hpp`);
|
|
1052
|
+
});
|
|
1053
|
+
enclosingClassId = (preferred || matchingClasses[0]).id;
|
|
1054
|
+
}
|
|
1055
|
+
}
|
|
1056
|
+
}
|
|
1057
|
+
}
|
|
1028
1058
|
result.symbols.push({
|
|
1029
1059
|
filePath: file.path,
|
|
1030
1060
|
name: nodeName,
|
|
@@ -211,7 +211,7 @@ export const streamAllCSVsToDisk = async (graph, repoPath, csvDir) => {
|
|
|
211
211
|
// Multi-language node types share the same CSV shape (no isExported column)
|
|
212
212
|
const multiLangHeader = 'id,name,filePath,startLine,endLine,content,description';
|
|
213
213
|
const MULTI_LANG_TYPES = ['Struct', 'Enum', 'Macro', 'Typedef', 'Union', 'Namespace', 'Trait', 'Impl',
|
|
214
|
-
'TypeAlias', 'Const', 'Static', 'Property', 'Record', 'Delegate', 'Annotation', 'Constructor', 'Template', 'Module'];
|
|
214
|
+
'TypeAlias', 'Const', 'Static', 'Property', 'Record', 'Delegate', 'Annotation', 'Constructor', 'Template', 'Module', 'Blueprint'];
|
|
215
215
|
const multiLangWriters = new Map();
|
|
216
216
|
for (const t of MULTI_LANG_TYPES) {
|
|
217
217
|
multiLangWriters.set(t, new BufferedCSVWriter(path.join(csvDir, `${t.toLowerCase()}.csv`), multiLangHeader));
|
|
@@ -270,7 +270,7 @@ const COPY_CSV_OPTS = `(HEADER=true, ESCAPE='"', DELIM=',', QUOTE='"', PARALLEL=
|
|
|
270
270
|
const BACKTICK_TABLES = new Set([
|
|
271
271
|
'Struct', 'Enum', 'Macro', 'Typedef', 'Union', 'Namespace', 'Trait', 'Impl',
|
|
272
272
|
'TypeAlias', 'Const', 'Static', 'Property', 'Record', 'Delegate', 'Annotation',
|
|
273
|
-
'Constructor', 'Template', 'Module',
|
|
273
|
+
'Constructor', 'Template', 'Module', 'Blueprint',
|
|
274
274
|
]);
|
|
275
275
|
const escapeTableName = (table) => {
|
|
276
276
|
return BACKTICK_TABLES.has(table) ? `\`${table}\`` : table;
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
* This allows LLMs to write natural Cypher queries like:
|
|
9
9
|
* MATCH (f:Function)-[r:CodeRelation {type: 'CALLS'}]->(g:Function) RETURN f, g
|
|
10
10
|
*/
|
|
11
|
-
export declare const NODE_TABLES: readonly ["File", "Folder", "Function", "Class", "Interface", "Method", "CodeElement", "Community", "Process", "Struct", "Enum", "Macro", "Typedef", "Union", "Namespace", "Trait", "Impl", "TypeAlias", "Const", "Static", "Property", "Record", "Delegate", "Annotation", "Constructor", "Template", "Module"];
|
|
11
|
+
export declare const NODE_TABLES: readonly ["File", "Folder", "Function", "Class", "Interface", "Method", "CodeElement", "Community", "Process", "Struct", "Enum", "Macro", "Typedef", "Union", "Namespace", "Trait", "Impl", "TypeAlias", "Const", "Static", "Property", "Record", "Delegate", "Annotation", "Constructor", "Template", "Module", "Blueprint"];
|
|
12
12
|
export type NodeTableName = typeof NODE_TABLES[number];
|
|
13
13
|
export declare const REL_TABLE_NAME = "CodeRelation";
|
|
14
14
|
export declare const REL_TYPES: readonly ["CONTAINS", "DEFINES", "IMPORTS", "CALLS", "EXTENDS", "IMPLEMENTS", "HAS_METHOD", "HAS_PROPERTY", "ACCESSES", "OVERRIDES", "MEMBER_OF", "STEP_IN_PROCESS"];
|
|
@@ -41,7 +41,8 @@ export declare const ANNOTATION_SCHEMA: string;
|
|
|
41
41
|
export declare const CONSTRUCTOR_SCHEMA: string;
|
|
42
42
|
export declare const TEMPLATE_SCHEMA: string;
|
|
43
43
|
export declare const MODULE_SCHEMA: string;
|
|
44
|
-
export declare const
|
|
44
|
+
export declare const BLUEPRINT_SCHEMA: string;
|
|
45
|
+
export declare const RELATION_SCHEMA = "\nCREATE REL TABLE CodeRelation (\n FROM File TO File,\n FROM File TO Folder,\n FROM File TO Function,\n FROM File TO Class,\n FROM File TO Interface,\n FROM File TO Method,\n FROM File TO CodeElement,\n FROM File TO `Struct`,\n FROM File TO `Enum`,\n FROM File TO `Macro`,\n FROM File TO `Typedef`,\n FROM File TO `Union`,\n FROM File TO `Namespace`,\n FROM File TO `Trait`,\n FROM File TO `Impl`,\n FROM File TO `TypeAlias`,\n FROM File TO `Const`,\n FROM File TO `Static`,\n FROM File TO `Property`,\n FROM File TO `Record`,\n FROM File TO `Delegate`,\n FROM File TO `Annotation`,\n FROM File TO `Constructor`,\n FROM File TO `Template`,\n FROM File TO `Module`,\n FROM Folder TO Folder,\n FROM Folder TO File,\n FROM Function TO Function,\n FROM Function TO Method,\n FROM Function TO Class,\n FROM Function TO Community,\n FROM Function TO `Macro`,\n FROM Function TO `Struct`,\n FROM Function TO `Template`,\n FROM Function TO `Enum`,\n FROM Function TO `Namespace`,\n FROM Function TO `TypeAlias`,\n FROM Function TO `Module`,\n FROM Function TO `Impl`,\n FROM Function TO Interface,\n FROM Function TO `Constructor`,\n FROM Function TO `Const`,\n FROM Function TO `Typedef`,\n FROM Function TO `Union`,\n FROM Function TO `Property`,\n FROM Class TO Method,\n FROM Class TO Function,\n FROM Class TO Class,\n FROM Class TO Interface,\n FROM Class TO Community,\n FROM Class TO `Template`,\n FROM Class TO `TypeAlias`,\n FROM Class TO `Struct`,\n FROM Class TO `Enum`,\n FROM Class TO `Annotation`,\n FROM Class TO `Constructor`,\n FROM Class TO `Trait`,\n FROM Class TO `Macro`,\n FROM Class TO `Impl`,\n FROM Class TO `Union`,\n FROM Class TO `Namespace`,\n FROM Class TO `Typedef`,\n FROM Class TO `Property`,\n FROM Method TO Function,\n FROM Method TO Method,\n FROM Method TO Class,\n FROM Method TO Community,\n FROM Method TO `Template`,\n FROM Method TO `Struct`,\n FROM Method TO `TypeAlias`,\n FROM Method TO `Enum`,\n FROM Method TO `Macro`,\n FROM Method TO `Namespace`,\n FROM Method TO `Module`,\n FROM Method TO `Impl`,\n FROM Method TO Interface,\n FROM Method TO `Constructor`,\n FROM Method TO `Property`,\n FROM `Template` TO `Template`,\n FROM `Template` TO Function,\n FROM `Template` TO Method,\n FROM `Template` TO Class,\n FROM `Template` TO `Struct`,\n FROM `Template` TO `TypeAlias`,\n FROM `Template` TO `Enum`,\n FROM `Template` TO `Macro`,\n FROM `Template` TO Interface,\n FROM `Template` TO `Constructor`,\n FROM `Module` TO `Module`,\n FROM CodeElement TO Community,\n FROM Interface TO Community,\n FROM Interface TO Function,\n FROM Interface TO Method,\n FROM Interface TO Class,\n FROM Interface TO Interface,\n FROM Interface TO `TypeAlias`,\n FROM Interface TO `Struct`,\n FROM Interface TO `Constructor`,\n FROM Interface TO `Property`,\n FROM `Struct` TO Community,\n FROM `Struct` TO `Trait`,\n FROM `Struct` TO `Struct`,\n FROM `Struct` TO Class,\n FROM `Struct` TO `Enum`,\n FROM `Struct` TO Function,\n FROM `Struct` TO Method,\n FROM `Struct` TO Interface,\n FROM `Struct` TO `Constructor`,\n FROM `Struct` TO `Property`,\n FROM `Enum` TO `Enum`,\n FROM `Enum` TO Community,\n FROM `Enum` TO Class,\n FROM `Enum` TO Interface,\n FROM `Macro` TO Community,\n FROM `Macro` TO Function,\n FROM `Macro` TO Method,\n FROM `Module` TO Function,\n FROM `Module` TO Method,\n FROM `Typedef` TO Community,\n FROM `Union` TO Community,\n FROM `Namespace` TO Community,\n FROM `Namespace` TO `Struct`,\n FROM `Trait` TO Method,\n FROM `Trait` TO `Constructor`,\n FROM `Trait` TO `Property`,\n FROM `Trait` TO Community,\n FROM `Impl` TO Method,\n FROM `Impl` TO `Constructor`,\n FROM `Impl` TO `Property`,\n FROM `Impl` TO Community,\n FROM `Impl` TO `Trait`,\n FROM `Impl` TO `Struct`,\n FROM `Impl` TO `Impl`,\n FROM `TypeAlias` TO Community,\n FROM `TypeAlias` TO `Trait`,\n FROM `TypeAlias` TO Class,\n FROM `Const` TO Community,\n FROM `Static` TO Community,\n FROM `Property` TO Community,\n FROM `Record` TO Method,\n FROM `Record` TO `Constructor`,\n FROM `Record` TO `Property`,\n FROM `Record` TO Community,\n FROM `Delegate` TO Community,\n FROM `Annotation` TO Community,\n FROM `Constructor` TO Community,\n FROM `Constructor` TO Interface,\n FROM `Constructor` TO Class,\n FROM `Constructor` TO Method,\n FROM `Constructor` TO Function,\n FROM `Constructor` TO `Constructor`,\n FROM `Constructor` TO `Struct`,\n FROM `Constructor` TO `Macro`,\n FROM `Constructor` TO `Template`,\n FROM `Constructor` TO `TypeAlias`,\n FROM `Constructor` TO `Enum`,\n FROM `Constructor` TO `Annotation`,\n FROM `Constructor` TO `Impl`,\n FROM `Constructor` TO `Namespace`,\n FROM `Constructor` TO `Module`,\n FROM `Constructor` TO `Property`,\n FROM `Constructor` TO `Typedef`,\n FROM `Template` TO Community,\n FROM `Module` TO Community,\n FROM Function TO Process,\n FROM Method TO Process,\n FROM Class TO Process,\n FROM Interface TO Process,\n FROM `Struct` TO Process,\n FROM `Constructor` TO Process,\n FROM `Module` TO Process,\n FROM `Macro` TO Process,\n FROM `Impl` TO Process,\n FROM `Typedef` TO Process,\n FROM `TypeAlias` TO Process,\n FROM `Enum` TO Process,\n FROM `Union` TO Process,\n FROM `Namespace` TO Process,\n FROM `Trait` TO Process,\n FROM `Const` TO Process,\n FROM `Static` TO Process,\n FROM `Property` TO Process,\n FROM `Record` TO Process,\n FROM `Delegate` TO Process,\n FROM `Annotation` TO Process,\n FROM `Template` TO Process,\n FROM CodeElement TO Process,\n FROM `Blueprint` TO Class,\n FROM `Blueprint` TO `Struct`,\n FROM `Blueprint` TO Method,\n FROM `Blueprint` TO Function,\n FROM `Blueprint` TO `Blueprint`,\n FROM `Blueprint` TO Community,\n FROM `Blueprint` TO Process,\n type STRING,\n confidence DOUBLE,\n reason STRING,\n step INT32\n)";
|
|
45
46
|
export declare const EMBEDDING_SCHEMA = "\nCREATE NODE TABLE CodeEmbedding (\n nodeId STRING,\n embedding FLOAT[384],\n PRIMARY KEY (nodeId)\n)";
|
|
46
47
|
/**
|
|
47
48
|
* Create vector index for semantic search
|
package/dist/core/lbug/schema.js
CHANGED
|
@@ -15,7 +15,9 @@ export const NODE_TABLES = [
|
|
|
15
15
|
'File', 'Folder', 'Function', 'Class', 'Interface', 'Method', 'CodeElement', 'Community', 'Process',
|
|
16
16
|
// Multi-language support
|
|
17
17
|
'Struct', 'Enum', 'Macro', 'Typedef', 'Union', 'Namespace', 'Trait', 'Impl',
|
|
18
|
-
'TypeAlias', 'Const', 'Static', 'Property', 'Record', 'Delegate', 'Annotation', 'Constructor', 'Template', 'Module'
|
|
18
|
+
'TypeAlias', 'Const', 'Static', 'Property', 'Record', 'Delegate', 'Annotation', 'Constructor', 'Template', 'Module',
|
|
19
|
+
// Unreal Engine Blueprint assets
|
|
20
|
+
'Blueprint'
|
|
19
21
|
];
|
|
20
22
|
// ============================================================================
|
|
21
23
|
// RELATION TABLE
|
|
@@ -171,6 +173,8 @@ export const ANNOTATION_SCHEMA = CODE_ELEMENT_BASE('Annotation');
|
|
|
171
173
|
export const CONSTRUCTOR_SCHEMA = CODE_ELEMENT_BASE('Constructor');
|
|
172
174
|
export const TEMPLATE_SCHEMA = CODE_ELEMENT_BASE('Template');
|
|
173
175
|
export const MODULE_SCHEMA = CODE_ELEMENT_BASE('Module');
|
|
176
|
+
// Unreal Engine Blueprint assets
|
|
177
|
+
export const BLUEPRINT_SCHEMA = CODE_ELEMENT_BASE('Blueprint');
|
|
174
178
|
// ============================================================================
|
|
175
179
|
// RELATION TABLE SCHEMA
|
|
176
180
|
// Single table with 'type' property - connects all node tables
|
|
@@ -364,6 +368,13 @@ CREATE REL TABLE ${REL_TABLE_NAME} (
|
|
|
364
368
|
FROM \`Annotation\` TO Process,
|
|
365
369
|
FROM \`Template\` TO Process,
|
|
366
370
|
FROM CodeElement TO Process,
|
|
371
|
+
FROM \`Blueprint\` TO Class,
|
|
372
|
+
FROM \`Blueprint\` TO \`Struct\`,
|
|
373
|
+
FROM \`Blueprint\` TO Method,
|
|
374
|
+
FROM \`Blueprint\` TO Function,
|
|
375
|
+
FROM \`Blueprint\` TO \`Blueprint\`,
|
|
376
|
+
FROM \`Blueprint\` TO Community,
|
|
377
|
+
FROM \`Blueprint\` TO Process,
|
|
367
378
|
type STRING,
|
|
368
379
|
confidence DOUBLE,
|
|
369
380
|
reason STRING,
|
|
@@ -419,6 +430,7 @@ export const NODE_SCHEMA_QUERIES = [
|
|
|
419
430
|
CONSTRUCTOR_SCHEMA,
|
|
420
431
|
TEMPLATE_SCHEMA,
|
|
421
432
|
MODULE_SCHEMA,
|
|
433
|
+
BLUEPRINT_SCHEMA,
|
|
422
434
|
];
|
|
423
435
|
export const REL_SCHEMA_QUERIES = [
|
|
424
436
|
RELATION_SCHEMA,
|
|
@@ -25,6 +25,7 @@ export interface WikiMeta {
|
|
|
25
25
|
model: string;
|
|
26
26
|
moduleFiles: Record<string, string[]>;
|
|
27
27
|
moduleTree: ModuleTreeNode[];
|
|
28
|
+
failedModules?: string[];
|
|
28
29
|
}
|
|
29
30
|
export interface ModuleTreeNode {
|
|
30
31
|
name: string;
|
|
@@ -67,6 +68,11 @@ export declare class WikiGenerator {
|
|
|
67
68
|
}>;
|
|
68
69
|
private ensureHTMLViewer;
|
|
69
70
|
private fullGeneration;
|
|
71
|
+
/**
|
|
72
|
+
* Retry only the modules that failed in a previous run.
|
|
73
|
+
* Finds them in the saved module tree and regenerates their pages.
|
|
74
|
+
*/
|
|
75
|
+
private retryFailedModules;
|
|
70
76
|
private buildModuleTree;
|
|
71
77
|
/**
|
|
72
78
|
* Parse LLM grouping response. Validates all files are assigned.
|
|
@@ -121,6 +121,20 @@ export class WikiGenerator {
|
|
|
121
121
|
const forceMode = this.options.force;
|
|
122
122
|
// Up-to-date check (skip if --force)
|
|
123
123
|
if (!forceMode && existingMeta && existingMeta.fromCommit === currentCommit) {
|
|
124
|
+
// If previous run had failed modules, retry them instead of skipping
|
|
125
|
+
if (existingMeta.failedModules?.length) {
|
|
126
|
+
this.onProgress('init', 2, 'Retrying previously failed modules...');
|
|
127
|
+
await initWikiDb(this.lbugPath);
|
|
128
|
+
let retryResult;
|
|
129
|
+
try {
|
|
130
|
+
retryResult = await this.retryFailedModules(existingMeta, currentCommit);
|
|
131
|
+
}
|
|
132
|
+
finally {
|
|
133
|
+
await closeWikiDb();
|
|
134
|
+
}
|
|
135
|
+
await this.ensureHTMLViewer();
|
|
136
|
+
return retryResult;
|
|
137
|
+
}
|
|
124
138
|
// Still regenerate the HTML viewer in case it's missing
|
|
125
139
|
await this.ensureHTMLViewer();
|
|
126
140
|
return { pagesGenerated: 0, mode: 'up-to-date', failedModules: [] };
|
|
@@ -246,7 +260,7 @@ export class WikiGenerator {
|
|
|
246
260
|
this.onProgress('overview', 88, 'Generating overview page...');
|
|
247
261
|
await this.generateOverview(moduleTree);
|
|
248
262
|
pagesGenerated++;
|
|
249
|
-
// Save metadata
|
|
263
|
+
// Save metadata (include failed modules so retry can find them)
|
|
250
264
|
this.onProgress('finalize', 95, 'Saving metadata...');
|
|
251
265
|
const moduleFiles = this.extractModuleFiles(moduleTree);
|
|
252
266
|
await this.saveModuleTree(moduleTree);
|
|
@@ -256,10 +270,87 @@ export class WikiGenerator {
|
|
|
256
270
|
model: this.llmConfig.model,
|
|
257
271
|
moduleFiles,
|
|
258
272
|
moduleTree,
|
|
273
|
+
failedModules: this.failedModules.length > 0 ? [...this.failedModules] : undefined,
|
|
259
274
|
});
|
|
260
275
|
this.onProgress('done', 100, 'Wiki generation complete');
|
|
261
276
|
return { pagesGenerated, mode: 'full', failedModules: [...this.failedModules] };
|
|
262
277
|
}
|
|
278
|
+
// ─── Retry Failed Modules ──────────────────────────────────────────
|
|
279
|
+
/**
|
|
280
|
+
* Retry only the modules that failed in a previous run.
|
|
281
|
+
* Finds them in the saved module tree and regenerates their pages.
|
|
282
|
+
*/
|
|
283
|
+
async retryFailedModules(existingMeta, currentCommit) {
|
|
284
|
+
const failedNames = new Set(existingMeta.failedModules ?? []);
|
|
285
|
+
const moduleTree = existingMeta.moduleTree;
|
|
286
|
+
const { leaves, parents } = this.flattenModuleTree(moduleTree);
|
|
287
|
+
const failedLeaves = leaves.filter(n => failedNames.has(n.name));
|
|
288
|
+
const failedParents = parents.filter(n => failedNames.has(n.name));
|
|
289
|
+
const totalRetries = failedLeaves.length + failedParents.length;
|
|
290
|
+
let processed = 0;
|
|
291
|
+
let pagesGenerated = 0;
|
|
292
|
+
const reportProgress = (moduleName) => {
|
|
293
|
+
processed++;
|
|
294
|
+
const percent = 10 + Math.round((processed / totalRetries) * 75);
|
|
295
|
+
const detail = moduleName
|
|
296
|
+
? `Retry ${processed}/${totalRetries} — ${moduleName}`
|
|
297
|
+
: `Retry ${processed}/${totalRetries}`;
|
|
298
|
+
this.onProgress('retry', percent, detail);
|
|
299
|
+
};
|
|
300
|
+
// Delete existing failed pages so they get regenerated
|
|
301
|
+
for (const name of failedNames) {
|
|
302
|
+
const node = [...leaves, ...parents].find(n => n.name === name);
|
|
303
|
+
if (node) {
|
|
304
|
+
try {
|
|
305
|
+
await fs.unlink(path.join(this.wikiDir, `${node.slug}.md`));
|
|
306
|
+
}
|
|
307
|
+
catch { }
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
// Retry leaf modules in parallel
|
|
311
|
+
pagesGenerated += await this.runParallel(failedLeaves, async (node) => {
|
|
312
|
+
try {
|
|
313
|
+
await this.generateLeafPage(node);
|
|
314
|
+
reportProgress(node.name);
|
|
315
|
+
return 1;
|
|
316
|
+
}
|
|
317
|
+
catch {
|
|
318
|
+
this.failedModules.push(node.name);
|
|
319
|
+
reportProgress(`Failed: ${node.name}`);
|
|
320
|
+
return 0;
|
|
321
|
+
}
|
|
322
|
+
});
|
|
323
|
+
// Retry parent modules sequentially
|
|
324
|
+
for (const node of failedParents) {
|
|
325
|
+
try {
|
|
326
|
+
await this.generateParentPage(node);
|
|
327
|
+
pagesGenerated++;
|
|
328
|
+
reportProgress(node.name);
|
|
329
|
+
}
|
|
330
|
+
catch {
|
|
331
|
+
this.failedModules.push(node.name);
|
|
332
|
+
reportProgress(`Failed: ${node.name}`);
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
// Regenerate overview if any pages were recovered
|
|
336
|
+
if (pagesGenerated > 0) {
|
|
337
|
+
this.onProgress('overview', 88, 'Regenerating overview page...');
|
|
338
|
+
await this.generateOverview(moduleTree);
|
|
339
|
+
}
|
|
340
|
+
// Update metadata
|
|
341
|
+
this.onProgress('finalize', 95, 'Saving metadata...');
|
|
342
|
+
const moduleFiles = this.extractModuleFiles(moduleTree);
|
|
343
|
+
await this.saveWikiMeta({
|
|
344
|
+
fromCommit: currentCommit,
|
|
345
|
+
generatedAt: new Date().toISOString(),
|
|
346
|
+
model: this.llmConfig.model,
|
|
347
|
+
moduleFiles,
|
|
348
|
+
moduleTree,
|
|
349
|
+
failedModules: this.failedModules.length > 0 ? [...this.failedModules] : undefined,
|
|
350
|
+
});
|
|
351
|
+
this.onProgress('done', 100, 'Retry complete');
|
|
352
|
+
return { pagesGenerated, mode: 'full', failedModules: [...this.failedModules] };
|
|
353
|
+
}
|
|
263
354
|
// ─── Phase 1: Build Module Tree ────────────────────────────────────
|
|
264
355
|
async buildModuleTree(files) {
|
|
265
356
|
// Check for existing immutable snapshot (resumability)
|
|
@@ -620,6 +711,7 @@ export class WikiGenerator {
|
|
|
620
711
|
fromCommit: currentCommit,
|
|
621
712
|
generatedAt: new Date().toISOString(),
|
|
622
713
|
model: this.llmConfig.model,
|
|
714
|
+
failedModules: this.failedModules.length > 0 ? [...this.failedModules] : undefined,
|
|
623
715
|
});
|
|
624
716
|
this.onProgress('done', 100, 'Incremental update complete');
|
|
625
717
|
return { pagesGenerated, mode: 'incremental', failedModules: [...this.failedModules] };
|
|
@@ -6,6 +6,7 @@
|
|
|
6
6
|
*/
|
|
7
7
|
import { initLbug, executeQuery, closeLbug } from '../../mcp/core/lbug-adapter.js';
|
|
8
8
|
const REPO_ID = '__wiki__';
|
|
9
|
+
const extractLabel = (v) => Array.isArray(v) ? v[0] : typeof v === 'string' ? v : undefined;
|
|
9
10
|
/**
|
|
10
11
|
* Initialize the LadybugDB connection for wiki generation.
|
|
11
12
|
*/
|
|
@@ -25,14 +26,14 @@ export async function getFilesWithExports() {
|
|
|
25
26
|
const rows = await executeQuery(REPO_ID, `
|
|
26
27
|
MATCH (f:File)-[:CodeRelation {type: 'DEFINES'}]->(n)
|
|
27
28
|
WHERE n.isExported = true
|
|
28
|
-
RETURN f.filePath AS filePath, n.name AS name, labels(n)
|
|
29
|
+
RETURN f.filePath AS filePath, n.name AS name, labels(n) AS type
|
|
29
30
|
ORDER BY f.filePath
|
|
30
31
|
`);
|
|
31
32
|
const fileMap = new Map();
|
|
32
33
|
for (const row of rows) {
|
|
33
34
|
const fp = row.filePath || row[0];
|
|
34
35
|
const name = row.name || row[1];
|
|
35
|
-
const type = row.type || row[2];
|
|
36
|
+
const type = extractLabel(row.type || row[2]);
|
|
36
37
|
let entry = fileMap.get(fp);
|
|
37
38
|
if (!entry) {
|
|
38
39
|
entry = { filePath: fp, symbols: [] };
|
|
@@ -152,7 +153,7 @@ export async function getProcessesForFiles(filePaths, limit = 5) {
|
|
|
152
153
|
// Get the full step trace for this process
|
|
153
154
|
const stepRows = await executeQuery(REPO_ID, `
|
|
154
155
|
MATCH (s)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process {id: '${procId.replace(/'/g, "''")}'})
|
|
155
|
-
RETURN s.name AS name, s.filePath AS filePath, labels(s)
|
|
156
|
+
RETURN s.name AS name, s.filePath AS filePath, labels(s) AS type, r.step AS step
|
|
156
157
|
ORDER BY r.step
|
|
157
158
|
`);
|
|
158
159
|
processes.push({
|
|
@@ -164,7 +165,7 @@ export async function getProcessesForFiles(filePaths, limit = 5) {
|
|
|
164
165
|
step: s.step || s[3] || 0,
|
|
165
166
|
name: s.name || s[0],
|
|
166
167
|
filePath: s.filePath || s[1],
|
|
167
|
-
type: s.type || s[2],
|
|
168
|
+
type: extractLabel(s.type || s[2]),
|
|
168
169
|
})),
|
|
169
170
|
});
|
|
170
171
|
}
|
|
@@ -189,7 +190,7 @@ export async function getAllProcesses(limit = 20) {
|
|
|
189
190
|
const stepCount = row.stepCount || row[3] || 0;
|
|
190
191
|
const stepRows = await executeQuery(REPO_ID, `
|
|
191
192
|
MATCH (s)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process {id: '${procId.replace(/'/g, "''")}'})
|
|
192
|
-
RETURN s.name AS name, s.filePath AS filePath, labels(s)
|
|
193
|
+
RETURN s.name AS name, s.filePath AS filePath, labels(s) AS type, r.step AS step
|
|
193
194
|
ORDER BY r.step
|
|
194
195
|
`);
|
|
195
196
|
processes.push({
|
|
@@ -201,7 +202,7 @@ export async function getAllProcesses(limit = 20) {
|
|
|
201
202
|
step: s.step || s[3] || 0,
|
|
202
203
|
name: s.name || s[0],
|
|
203
204
|
filePath: s.filePath || s[1],
|
|
204
|
-
type: s.type || s[2],
|
|
205
|
+
type: extractLabel(s.type || s[2]),
|
|
205
206
|
})),
|
|
206
207
|
});
|
|
207
208
|
}
|
|
@@ -39,5 +39,7 @@ export interface CallLLMOptions {
|
|
|
39
39
|
* Call an OpenAI-compatible LLM API.
|
|
40
40
|
* Uses streaming when onChunk callback is provided for real-time progress.
|
|
41
41
|
* Retries up to 3 times on transient failures (429, 5xx, network errors).
|
|
42
|
+
*
|
|
43
|
+
* Automatically detects Anthropic API URLs and routes to the Messages API.
|
|
42
44
|
*/
|
|
43
45
|
export declare function callLLM(prompt: string, config: LLMConfig, systemPrompt?: string, options?: CallLLMOptions): Promise<LLMResponse>;
|
|
@@ -51,12 +51,164 @@ function isGatewayStreamStartFailure(status, errorText) {
|
|
|
51
51
|
return normalized.includes('empty_stream')
|
|
52
52
|
|| normalized.includes('upstream stream closed before first payload');
|
|
53
53
|
}
|
|
54
|
+
/**
|
|
55
|
+
* Detect whether a base URL points to the Anthropic Messages API.
|
|
56
|
+
*/
|
|
57
|
+
function isAnthropicAPI(baseUrl) {
|
|
58
|
+
return /anthropic\.com/i.test(baseUrl);
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Resolve the API key for Anthropic, preferring Anthropic-specific sources.
|
|
62
|
+
* Priority: ANTHROPIC_API_KEY env > saved config key (if Anthropic-shaped) > config.apiKey
|
|
63
|
+
*/
|
|
64
|
+
async function resolveAnthropicApiKey(configApiKey) {
|
|
65
|
+
// 1. Dedicated Anthropic env var
|
|
66
|
+
if (process.env.ANTHROPIC_API_KEY)
|
|
67
|
+
return process.env.ANTHROPIC_API_KEY;
|
|
68
|
+
// 2. If the resolved key looks like an Anthropic key, use it
|
|
69
|
+
if (configApiKey.startsWith('sk-ant-'))
|
|
70
|
+
return configApiKey;
|
|
71
|
+
// 3. Fall back to saved config key (may differ from resolved key when env vars override)
|
|
72
|
+
const { loadCLIConfig } = await import('../../storage/repo-manager.js');
|
|
73
|
+
const saved = await loadCLIConfig();
|
|
74
|
+
if (saved.apiKey?.startsWith('sk-ant-'))
|
|
75
|
+
return saved.apiKey;
|
|
76
|
+
// 4. Use whatever was resolved
|
|
77
|
+
return configApiKey;
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Call the Anthropic Messages API (/v1/messages).
|
|
81
|
+
* Handles both streaming and non-streaming modes.
|
|
82
|
+
*/
|
|
83
|
+
async function callAnthropicLLM(prompt, config, systemPrompt, options) {
|
|
84
|
+
const apiKey = await resolveAnthropicApiKey(config.apiKey);
|
|
85
|
+
const url = `${config.baseUrl.replace(/\/+$/, '')}/v1/messages`;
|
|
86
|
+
const useStream = !!options?.onChunk;
|
|
87
|
+
const body = {
|
|
88
|
+
model: config.model,
|
|
89
|
+
max_tokens: config.maxTokens,
|
|
90
|
+
temperature: config.temperature,
|
|
91
|
+
messages: [{ role: 'user', content: prompt }],
|
|
92
|
+
};
|
|
93
|
+
if (systemPrompt) {
|
|
94
|
+
body.system = systemPrompt;
|
|
95
|
+
}
|
|
96
|
+
if (useStream) {
|
|
97
|
+
body.stream = true;
|
|
98
|
+
}
|
|
99
|
+
const headers = {
|
|
100
|
+
'Content-Type': 'application/json',
|
|
101
|
+
'x-api-key': apiKey,
|
|
102
|
+
'anthropic-version': '2023-06-01',
|
|
103
|
+
};
|
|
104
|
+
const MAX_RETRIES = 3;
|
|
105
|
+
let lastError = null;
|
|
106
|
+
for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
|
|
107
|
+
try {
|
|
108
|
+
const response = await fetch(url, {
|
|
109
|
+
method: 'POST',
|
|
110
|
+
headers,
|
|
111
|
+
body: JSON.stringify(body),
|
|
112
|
+
});
|
|
113
|
+
if (!response.ok) {
|
|
114
|
+
const errorText = await response.text().catch(() => 'unknown error');
|
|
115
|
+
if (response.status === 429 && attempt < MAX_RETRIES - 1) {
|
|
116
|
+
const retryAfter = parseInt(response.headers.get('retry-after') || '0', 10);
|
|
117
|
+
const delay = retryAfter > 0 ? retryAfter * 1000 : (2 ** attempt) * 3000;
|
|
118
|
+
await sleep(delay);
|
|
119
|
+
continue;
|
|
120
|
+
}
|
|
121
|
+
if (response.status >= 500 && attempt < MAX_RETRIES - 1) {
|
|
122
|
+
await sleep((attempt + 1) * 2000);
|
|
123
|
+
continue;
|
|
124
|
+
}
|
|
125
|
+
throw new Error(`LLM API error (${response.status}): ${errorText.slice(0, 500)}`);
|
|
126
|
+
}
|
|
127
|
+
if (useStream && response.body) {
|
|
128
|
+
return await readAnthropicSSEStream(response.body, options.onChunk);
|
|
129
|
+
}
|
|
130
|
+
const json = await response.json();
|
|
131
|
+
const text = json.content?.find((b) => b.type === 'text')?.text;
|
|
132
|
+
if (!text) {
|
|
133
|
+
throw new Error('LLM returned empty response');
|
|
134
|
+
}
|
|
135
|
+
return {
|
|
136
|
+
content: text,
|
|
137
|
+
promptTokens: json.usage?.input_tokens,
|
|
138
|
+
completionTokens: json.usage?.output_tokens,
|
|
139
|
+
};
|
|
140
|
+
}
|
|
141
|
+
catch (err) {
|
|
142
|
+
lastError = err;
|
|
143
|
+
if (attempt < MAX_RETRIES - 1 && (err.code === 'ECONNREFUSED' || err.code === 'ETIMEDOUT' || err.message?.includes('fetch'))) {
|
|
144
|
+
await sleep((attempt + 1) * 3000);
|
|
145
|
+
continue;
|
|
146
|
+
}
|
|
147
|
+
throw err;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
throw lastError || new Error('LLM call failed after retries');
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Read an SSE stream from the Anthropic Messages API.
|
|
154
|
+
* Anthropic uses event types: content_block_delta with delta.text.
|
|
155
|
+
*/
|
|
156
|
+
async function readAnthropicSSEStream(body, onChunk) {
|
|
157
|
+
const decoder = new TextDecoder();
|
|
158
|
+
const reader = body.getReader();
|
|
159
|
+
let content = '';
|
|
160
|
+
let buffer = '';
|
|
161
|
+
let promptTokens;
|
|
162
|
+
let completionTokens;
|
|
163
|
+
while (true) {
|
|
164
|
+
const { done, value } = await reader.read();
|
|
165
|
+
if (done)
|
|
166
|
+
break;
|
|
167
|
+
buffer += decoder.decode(value, { stream: true });
|
|
168
|
+
const lines = buffer.split('\n');
|
|
169
|
+
buffer = lines.pop() || '';
|
|
170
|
+
for (const line of lines) {
|
|
171
|
+
const trimmed = line.trim();
|
|
172
|
+
if (!trimmed || !trimmed.startsWith('data: '))
|
|
173
|
+
continue;
|
|
174
|
+
const data = trimmed.slice(6);
|
|
175
|
+
if (data === '[DONE]')
|
|
176
|
+
continue;
|
|
177
|
+
try {
|
|
178
|
+
const parsed = JSON.parse(data);
|
|
179
|
+
if (parsed.type === 'content_block_delta' && parsed.delta?.text) {
|
|
180
|
+
content += parsed.delta.text;
|
|
181
|
+
onChunk(content.length);
|
|
182
|
+
}
|
|
183
|
+
else if (parsed.type === 'message_delta' && parsed.usage) {
|
|
184
|
+
completionTokens = parsed.usage.output_tokens;
|
|
185
|
+
}
|
|
186
|
+
else if (parsed.type === 'message_start' && parsed.message?.usage) {
|
|
187
|
+
promptTokens = parsed.message.usage.input_tokens;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
catch {
|
|
191
|
+
// Skip malformed SSE chunks
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
if (!content) {
|
|
196
|
+
throw new Error('LLM returned empty streaming response');
|
|
197
|
+
}
|
|
198
|
+
return { content, promptTokens, completionTokens };
|
|
199
|
+
}
|
|
54
200
|
/**
|
|
55
201
|
* Call an OpenAI-compatible LLM API.
|
|
56
202
|
* Uses streaming when onChunk callback is provided for real-time progress.
|
|
57
203
|
* Retries up to 3 times on transient failures (429, 5xx, network errors).
|
|
204
|
+
*
|
|
205
|
+
* Automatically detects Anthropic API URLs and routes to the Messages API.
|
|
58
206
|
*/
|
|
59
207
|
export async function callLLM(prompt, config, systemPrompt, options) {
|
|
208
|
+
// Route to Anthropic Messages API when baseUrl points to anthropic.com
|
|
209
|
+
if (isAnthropicAPI(config.baseUrl)) {
|
|
210
|
+
return callAnthropicLLM(prompt, config, systemPrompt, options);
|
|
211
|
+
}
|
|
60
212
|
const messages = [];
|
|
61
213
|
if (systemPrompt) {
|
|
62
214
|
messages.push({ role: 'system', content: systemPrompt });
|
|
@@ -42,6 +42,12 @@ export const VALID_NODE_LABELS = new Set([
|
|
|
42
42
|
export const VALID_RELATION_TYPES = new Set(['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS', 'HAS_METHOD', 'HAS_PROPERTY', 'OVERRIDES', 'ACCESSES']);
|
|
43
43
|
/** Regex to detect write operations in user-supplied Cypher queries */
|
|
44
44
|
export const CYPHER_WRITE_RE = /\b(CREATE|DELETE|SET|MERGE|REMOVE|DROP|ALTER|COPY|DETACH)\b/i;
|
|
45
|
+
/**
|
|
46
|
+
* Extract the primary label from a KùzuDB labels() result.
|
|
47
|
+
* KùzuDB returns labels() as a string[] but does not support array indexing [0]
|
|
48
|
+
* in Cypher, so we return the full array from queries and extract here.
|
|
49
|
+
*/
|
|
50
|
+
const extractLabel = (v) => Array.isArray(v) ? v[0] : typeof v === 'string' ? v : undefined;
|
|
45
51
|
/** Check if a Cypher query contains write operations */
|
|
46
52
|
export function isWriteQuery(query) {
|
|
47
53
|
return CYPHER_WRITE_RE.test(query);
|
|
@@ -294,21 +300,21 @@ export class LocalBackend {
|
|
|
294
300
|
if (!params.symbol_uid && !symbolName) {
|
|
295
301
|
return { error: 'Either "function" or "symbol_uid" is required.' };
|
|
296
302
|
}
|
|
297
|
-
const rows = await executeParameterized(repo.id, `
|
|
298
|
-
MATCH (n)
|
|
299
|
-
WHERE labels(n)
|
|
300
|
-
AND (($symbolId = '') OR n.id = $symbolId)
|
|
301
|
-
AND (($symbolName = '') OR n.name = $symbolName)
|
|
302
|
-
AND (($filePath = '') OR n.filePath = $filePath)
|
|
303
|
-
OPTIONAL MATCH (owner)-[:CodeRelation {type: 'HAS_METHOD'}]->(n)
|
|
304
|
-
RETURN
|
|
305
|
-
n.id AS symbolId,
|
|
306
|
-
n.name AS symbolName,
|
|
307
|
-
labels(n)
|
|
308
|
-
n.filePath AS filePath,
|
|
309
|
-
n.startLine AS startLine,
|
|
310
|
-
owner.name AS ownerClass
|
|
311
|
-
LIMIT 25
|
|
303
|
+
const rows = await executeParameterized(repo.id, `
|
|
304
|
+
MATCH (n)
|
|
305
|
+
WHERE labels(n) IN ['Function', 'Method']
|
|
306
|
+
AND (($symbolId = '') OR n.id = $symbolId)
|
|
307
|
+
AND (($symbolName = '') OR n.name = $symbolName)
|
|
308
|
+
AND (($filePath = '') OR n.filePath = $filePath)
|
|
309
|
+
OPTIONAL MATCH (owner)-[:CodeRelation {type: 'HAS_METHOD'}]->(n)
|
|
310
|
+
RETURN
|
|
311
|
+
n.id AS symbolId,
|
|
312
|
+
n.name AS symbolName,
|
|
313
|
+
labels(n) AS symbolType,
|
|
314
|
+
n.filePath AS filePath,
|
|
315
|
+
n.startLine AS startLine,
|
|
316
|
+
owner.name AS ownerClass
|
|
317
|
+
LIMIT 25
|
|
312
318
|
`, {
|
|
313
319
|
symbolId: params.symbol_uid || '',
|
|
314
320
|
symbolName,
|
|
@@ -322,7 +328,7 @@ export class LocalBackend {
|
|
|
322
328
|
return {
|
|
323
329
|
symbol_id: row.symbolId || row[0],
|
|
324
330
|
symbol_name: row.symbolName || row[1],
|
|
325
|
-
symbol_type: row.symbolType || row[2],
|
|
331
|
+
symbol_type: extractLabel(row.symbolType || row[2]),
|
|
326
332
|
class_name: ownerClass,
|
|
327
333
|
file_path: row.filePath || row[3],
|
|
328
334
|
start_line: row.startLine || row[4],
|
|
@@ -706,7 +712,7 @@ export class LocalBackend {
|
|
|
706
712
|
const symbols = await executeParameterized(repo.id, `
|
|
707
713
|
MATCH (n)
|
|
708
714
|
WHERE n.filePath = $filePath
|
|
709
|
-
RETURN n.id AS id, n.name AS name, labels(n)
|
|
715
|
+
RETURN n.id AS id, n.name AS name, labels(n) AS type, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine
|
|
710
716
|
LIMIT 3
|
|
711
717
|
`, { filePath: fullPath });
|
|
712
718
|
if (symbols.length > 0) {
|
|
@@ -714,7 +720,7 @@ export class LocalBackend {
|
|
|
714
720
|
results.push({
|
|
715
721
|
nodeId: sym.id || sym[0],
|
|
716
722
|
name: sym.name || sym[1],
|
|
717
|
-
type: sym.type || sym[2],
|
|
723
|
+
type: extractLabel(sym.type || sym[2]),
|
|
718
724
|
filePath: sym.filePath || sym[3],
|
|
719
725
|
startLine: sym.startLine || sym[4],
|
|
720
726
|
endLine: sym.endLine || sym[5],
|
|
@@ -961,7 +967,7 @@ export class LocalBackend {
|
|
|
961
967
|
if (uid) {
|
|
962
968
|
symbols = await executeParameterized(repo.id, `
|
|
963
969
|
MATCH (n {id: $uid})
|
|
964
|
-
RETURN n.id AS id, n.name AS name, labels(n)
|
|
970
|
+
RETURN n.id AS id, n.name AS name, labels(n) AS type, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine${include_content ? ', n.content AS content' : ''}
|
|
965
971
|
LIMIT 1
|
|
966
972
|
`, { uid });
|
|
967
973
|
}
|
|
@@ -983,7 +989,7 @@ export class LocalBackend {
|
|
|
983
989
|
}
|
|
984
990
|
symbols = await executeParameterized(repo.id, `
|
|
985
991
|
MATCH (n) ${whereClause}
|
|
986
|
-
RETURN n.id AS id, n.name AS name, labels(n)
|
|
992
|
+
RETURN n.id AS id, n.name AS name, labels(n) AS type, n.filePath AS filePath, n.startLine AS startLine, n.endLine AS endLine${include_content ? ', n.content AS content' : ''}
|
|
987
993
|
LIMIT 10
|
|
988
994
|
`, queryParams);
|
|
989
995
|
}
|
|
@@ -998,7 +1004,7 @@ export class LocalBackend {
|
|
|
998
1004
|
candidates: symbols.map((s) => ({
|
|
999
1005
|
uid: s.id || s[0],
|
|
1000
1006
|
name: s.name || s[1],
|
|
1001
|
-
kind: s.type || s[2],
|
|
1007
|
+
kind: extractLabel(s.type || s[2]),
|
|
1002
1008
|
filePath: s.filePath || s[3],
|
|
1003
1009
|
line: s.startLine || s[4],
|
|
1004
1010
|
})),
|
|
@@ -1011,14 +1017,14 @@ export class LocalBackend {
|
|
|
1011
1017
|
const incomingRows = await executeParameterized(repo.id, `
|
|
1012
1018
|
MATCH (caller)-[r:CodeRelation]->(n {id: $symId})
|
|
1013
1019
|
WHERE r.type IN ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS', 'HAS_METHOD', 'HAS_PROPERTY', 'OVERRIDES', 'ACCESSES']
|
|
1014
|
-
RETURN r.type AS relType, caller.id AS uid, caller.name AS name, caller.filePath AS filePath, labels(caller)
|
|
1020
|
+
RETURN r.type AS relType, caller.id AS uid, caller.name AS name, caller.filePath AS filePath, labels(caller) AS kind
|
|
1015
1021
|
LIMIT 30
|
|
1016
1022
|
`, { symId });
|
|
1017
1023
|
// Categorized outgoing refs
|
|
1018
1024
|
const outgoingRows = await executeParameterized(repo.id, `
|
|
1019
1025
|
MATCH (n {id: $symId})-[r:CodeRelation]->(target)
|
|
1020
1026
|
WHERE r.type IN ['CALLS', 'IMPORTS', 'EXTENDS', 'IMPLEMENTS', 'HAS_METHOD', 'HAS_PROPERTY', 'OVERRIDES', 'ACCESSES']
|
|
1021
|
-
RETURN r.type AS relType, target.id AS uid, target.name AS name, target.filePath AS filePath, labels(target)
|
|
1027
|
+
RETURN r.type AS relType, target.id AS uid, target.name AS name, target.filePath AS filePath, labels(target) AS kind
|
|
1022
1028
|
LIMIT 30
|
|
1023
1029
|
`, { symId });
|
|
1024
1030
|
// Process participation
|
|
@@ -1041,7 +1047,7 @@ export class LocalBackend {
|
|
|
1041
1047
|
uid: row.uid || row[1],
|
|
1042
1048
|
name: row.name || row[2],
|
|
1043
1049
|
filePath: row.filePath || row[3],
|
|
1044
|
-
kind: row.kind || row[4],
|
|
1050
|
+
kind: extractLabel(row.kind || row[4]),
|
|
1045
1051
|
};
|
|
1046
1052
|
if (!cats[relType])
|
|
1047
1053
|
cats[relType] = [];
|
|
@@ -1054,7 +1060,7 @@ export class LocalBackend {
|
|
|
1054
1060
|
symbol: {
|
|
1055
1061
|
uid: sym.id || sym[0],
|
|
1056
1062
|
name: sym.name || sym[1],
|
|
1057
|
-
kind: sym.type || sym[2],
|
|
1063
|
+
kind: extractLabel(sym.type || sym[2]),
|
|
1058
1064
|
filePath: sym.filePath || sym[3],
|
|
1059
1065
|
startLine: sym.startLine || sym[4],
|
|
1060
1066
|
endLine: sym.endLine || sym[5],
|
|
@@ -1101,7 +1107,7 @@ export class LocalBackend {
|
|
|
1101
1107
|
const members = await executeParameterized(repo.id, `
|
|
1102
1108
|
MATCH (n)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
1103
1109
|
WHERE c.label = $clusterName OR c.heuristicLabel = $clusterName
|
|
1104
|
-
RETURN DISTINCT n.name AS name, labels(n)
|
|
1110
|
+
RETURN DISTINCT n.name AS name, labels(n) AS type, n.filePath AS filePath
|
|
1105
1111
|
LIMIT 30
|
|
1106
1112
|
`, { clusterName: name });
|
|
1107
1113
|
return {
|
|
@@ -1114,7 +1120,7 @@ export class LocalBackend {
|
|
|
1114
1120
|
subCommunities: rawClusters.length,
|
|
1115
1121
|
},
|
|
1116
1122
|
members: members.map((m) => ({
|
|
1117
|
-
name: m.name || m[0], type: m.type || m[1], filePath: m.filePath || m[2],
|
|
1123
|
+
name: m.name || m[0], type: extractLabel(m.type || m[1]), filePath: m.filePath || m[2],
|
|
1118
1124
|
})),
|
|
1119
1125
|
};
|
|
1120
1126
|
}
|
|
@@ -1131,7 +1137,7 @@ export class LocalBackend {
|
|
|
1131
1137
|
const procId = proc.id || proc[0];
|
|
1132
1138
|
const steps = await executeParameterized(repo.id, `
|
|
1133
1139
|
MATCH (n)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p {id: $procId})
|
|
1134
|
-
RETURN n.name AS name, labels(n)
|
|
1140
|
+
RETURN n.name AS name, labels(n) AS type, n.filePath AS filePath, r.step AS step
|
|
1135
1141
|
ORDER BY r.step
|
|
1136
1142
|
`, { procId });
|
|
1137
1143
|
return {
|
|
@@ -1140,7 +1146,7 @@ export class LocalBackend {
|
|
|
1140
1146
|
processType: proc.processType || proc[3], stepCount: proc.stepCount || proc[4],
|
|
1141
1147
|
},
|
|
1142
1148
|
steps: steps.map((s) => ({
|
|
1143
|
-
step: s.step || s[3], name: s.name || s[0], type: s.type || s[1], filePath: s.filePath || s[2],
|
|
1149
|
+
step: s.step || s[3], name: s.name || s[0], type: extractLabel(s.type || s[1]), filePath: s.filePath || s[2],
|
|
1144
1150
|
})),
|
|
1145
1151
|
};
|
|
1146
1152
|
}
|
|
@@ -1195,14 +1201,14 @@ export class LocalBackend {
|
|
|
1195
1201
|
try {
|
|
1196
1202
|
const symbols = await executeParameterized(repo.id, `
|
|
1197
1203
|
MATCH (n) WHERE n.filePath CONTAINS $filePath
|
|
1198
|
-
RETURN n.id AS id, n.name AS name, labels(n)
|
|
1204
|
+
RETURN n.id AS id, n.name AS name, labels(n) AS type, n.filePath AS filePath
|
|
1199
1205
|
LIMIT 20
|
|
1200
1206
|
`, { filePath: normalizedFile });
|
|
1201
1207
|
for (const sym of symbols) {
|
|
1202
1208
|
changedSymbols.push({
|
|
1203
1209
|
id: sym.id || sym[0],
|
|
1204
1210
|
name: sym.name || sym[1],
|
|
1205
|
-
type: sym.type || sym[2],
|
|
1211
|
+
type: extractLabel(sym.type || sym[2]),
|
|
1206
1212
|
filePath: sym.filePath || sym[3],
|
|
1207
1213
|
change_type: 'Modified',
|
|
1208
1214
|
});
|
|
@@ -1441,7 +1447,7 @@ export class LocalBackend {
|
|
|
1441
1447
|
const targets = await executeParameterized(repo.id, `
|
|
1442
1448
|
MATCH (n)
|
|
1443
1449
|
WHERE n.name = $targetName
|
|
1444
|
-
RETURN n.id AS id, n.name AS name, labels(n)
|
|
1450
|
+
RETURN n.id AS id, n.name AS name, labels(n) AS type, n.filePath AS filePath
|
|
1445
1451
|
LIMIT 1
|
|
1446
1452
|
`, { targetName: target });
|
|
1447
1453
|
if (targets.length === 0)
|
|
@@ -1457,8 +1463,8 @@ export class LocalBackend {
|
|
|
1457
1463
|
// Batch frontier nodes into a single Cypher query per depth level
|
|
1458
1464
|
const idList = frontier.map(id => `'${id.replace(/'/g, "''")}'`).join(', ');
|
|
1459
1465
|
const query = direction === 'upstream'
|
|
1460
|
-
? `MATCH (caller)-[r:CodeRelation]->(n) WHERE n.id IN [${idList}] AND r.type IN [${relTypeFilter}]${confidenceFilter} RETURN n.id AS sourceId, caller.id AS id, caller.name AS name, labels(caller)
|
|
1461
|
-
: `MATCH (n)-[r:CodeRelation]->(callee) WHERE n.id IN [${idList}] AND r.type IN [${relTypeFilter}]${confidenceFilter} RETURN n.id AS sourceId, callee.id AS id, callee.name AS name, labels(callee)
|
|
1466
|
+
? `MATCH (caller)-[r:CodeRelation]->(n) WHERE n.id IN [${idList}] AND r.type IN [${relTypeFilter}]${confidenceFilter} RETURN n.id AS sourceId, caller.id AS id, caller.name AS name, labels(caller) AS type, caller.filePath AS filePath, r.type AS relType, r.confidence AS confidence`
|
|
1467
|
+
: `MATCH (n)-[r:CodeRelation]->(callee) WHERE n.id IN [${idList}] AND r.type IN [${relTypeFilter}]${confidenceFilter} RETURN n.id AS sourceId, callee.id AS id, callee.name AS name, labels(callee) AS type, callee.filePath AS filePath, r.type AS relType, r.confidence AS confidence`;
|
|
1462
1468
|
try {
|
|
1463
1469
|
const related = await executeQuery(repo.id, query);
|
|
1464
1470
|
for (const rel of related) {
|
|
@@ -1473,7 +1479,7 @@ export class LocalBackend {
|
|
|
1473
1479
|
depth,
|
|
1474
1480
|
id: relId,
|
|
1475
1481
|
name: rel.name || rel[2],
|
|
1476
|
-
type: rel.type || rel[3],
|
|
1482
|
+
type: extractLabel(rel.type || rel[3]),
|
|
1477
1483
|
filePath,
|
|
1478
1484
|
relationType: rel.relType || rel[5],
|
|
1479
1485
|
confidence: rel.confidence || rel[6] || 1.0,
|
|
@@ -1558,7 +1564,7 @@ export class LocalBackend {
|
|
|
1558
1564
|
target: {
|
|
1559
1565
|
id: symId,
|
|
1560
1566
|
name: sym.name || sym[1],
|
|
1561
|
-
type: sym.type || sym[2],
|
|
1567
|
+
type: extractLabel(sym.type || sym[2]),
|
|
1562
1568
|
filePath: sym.filePath || sym[3],
|
|
1563
1569
|
},
|
|
1564
1570
|
direction,
|
|
@@ -1659,7 +1665,7 @@ export class LocalBackend {
|
|
|
1659
1665
|
const members = await executeParameterized(repo.id, `
|
|
1660
1666
|
MATCH (n)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
1661
1667
|
WHERE c.label = $clusterName OR c.heuristicLabel = $clusterName
|
|
1662
|
-
RETURN DISTINCT n.name AS name, labels(n)
|
|
1668
|
+
RETURN DISTINCT n.name AS name, labels(n) AS type, n.filePath AS filePath
|
|
1663
1669
|
LIMIT 30
|
|
1664
1670
|
`, { clusterName: name });
|
|
1665
1671
|
return {
|
|
@@ -1672,7 +1678,7 @@ export class LocalBackend {
|
|
|
1672
1678
|
subCommunities: rawClusters.length,
|
|
1673
1679
|
},
|
|
1674
1680
|
members: members.map((m) => ({
|
|
1675
|
-
name: m.name || m[0], type: m.type || m[1], filePath: m.filePath || m[2],
|
|
1681
|
+
name: m.name || m[0], type: extractLabel(m.type || m[1]), filePath: m.filePath || m[2],
|
|
1676
1682
|
})),
|
|
1677
1683
|
};
|
|
1678
1684
|
}
|
|
@@ -1695,7 +1701,7 @@ export class LocalBackend {
|
|
|
1695
1701
|
const procId = proc.id || proc[0];
|
|
1696
1702
|
const steps = await executeParameterized(repo.id, `
|
|
1697
1703
|
MATCH (n)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p {id: $procId})
|
|
1698
|
-
RETURN n.name AS name, labels(n)
|
|
1704
|
+
RETURN n.name AS name, labels(n) AS type, n.filePath AS filePath, r.step AS step
|
|
1699
1705
|
ORDER BY r.step
|
|
1700
1706
|
`, { procId });
|
|
1701
1707
|
return {
|
|
@@ -1704,7 +1710,7 @@ export class LocalBackend {
|
|
|
1704
1710
|
processType: proc.processType || proc[3], stepCount: proc.stepCount || proc[4],
|
|
1705
1711
|
},
|
|
1706
1712
|
steps: steps.map((s) => ({
|
|
1707
|
-
step: s.step || s[3], name: s.name || s[0], type: s.type || s[1], filePath: s.filePath || s[2],
|
|
1713
|
+
step: s.step || s[3], name: s.name || s[0], type: extractLabel(s.type || s[1]), filePath: s.filePath || s[2],
|
|
1708
1714
|
})),
|
|
1709
1715
|
};
|
|
1710
1716
|
}
|
package/dist/mcp/resources.js
CHANGED
|
@@ -295,7 +295,7 @@ example_queries:
|
|
|
295
295
|
find_community_members: |
|
|
296
296
|
MATCH (s)-[:CodeRelation {type: 'MEMBER_OF'}]->(c:Community)
|
|
297
297
|
WHERE c.heuristicLabel = "Auth"
|
|
298
|
-
RETURN s.name, labels(s)
|
|
298
|
+
RETURN s.name, labels(s) AS type
|
|
299
299
|
|
|
300
300
|
trace_process: |
|
|
301
301
|
MATCH (s)-[r:CodeRelation {type: 'STEP_IN_PROCESS'}]->(p:Process)
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Blueprint Ingestion — adds Unreal Blueprint assets to the knowledge graph
|
|
3
|
+
*
|
|
4
|
+
* Reads the asset manifest (produced by `gitnexus unreal-sync`) and creates
|
|
5
|
+
* Blueprint nodes plus edges to C++ classes and functions already in the graph.
|
|
6
|
+
* Runs as a post-pipeline step before LadybugDB loading.
|
|
7
|
+
*/
|
|
8
|
+
import { KnowledgeGraph } from '../core/graph/types.js';
|
|
9
|
+
export interface BlueprintIngestionResult {
|
|
10
|
+
nodesAdded: number;
|
|
11
|
+
edgesAdded: number;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Ingest Blueprint assets from the Unreal asset manifest into the knowledge graph.
|
|
15
|
+
* Creates Blueprint nodes and edges (EXTENDS, CALLS, IMPORTS) linking them to
|
|
16
|
+
* existing C++ symbols in the graph.
|
|
17
|
+
*/
|
|
18
|
+
export declare const ingestBlueprintsIntoGraph: (graph: KnowledgeGraph, storagePath: string) => Promise<BlueprintIngestionResult>;
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Blueprint Ingestion — adds Unreal Blueprint assets to the knowledge graph
|
|
3
|
+
*
|
|
4
|
+
* Reads the asset manifest (produced by `gitnexus unreal-sync`) and creates
|
|
5
|
+
* Blueprint nodes plus edges to C++ classes and functions already in the graph.
|
|
6
|
+
* Runs as a post-pipeline step before LadybugDB loading.
|
|
7
|
+
*/
|
|
8
|
+
import fs from 'fs/promises';
|
|
9
|
+
import path from 'path';
|
|
10
|
+
import { generateId } from '../lib/utils.js';
|
|
11
|
+
/** Extract a display name from an Unreal asset path (last segment). */
|
|
12
|
+
const extractAssetName = (assetPath) => {
|
|
13
|
+
// "/Game/Characters/BP_Hero" → "BP_Hero"
|
|
14
|
+
// "/Game/Characters/BP_Hero.BP_Hero_C" → "BP_Hero"
|
|
15
|
+
const lastSlash = assetPath.lastIndexOf('/');
|
|
16
|
+
const segment = lastSlash >= 0 ? assetPath.slice(lastSlash + 1) : assetPath;
|
|
17
|
+
const dotIdx = segment.indexOf('.');
|
|
18
|
+
return dotIdx >= 0 ? segment.slice(0, dotIdx) : segment;
|
|
19
|
+
};
|
|
20
|
+
/** Extract a C++ class name from an Unreal class path.
|
|
21
|
+
* "/Script/Engine.Character" → "Character"
|
|
22
|
+
* "/Script/Engine.ACharacter" → "ACharacter"
|
|
23
|
+
* "ACharacter" → "ACharacter" (already plain)
|
|
24
|
+
*/
|
|
25
|
+
const extractClassName = (unrealPath) => {
|
|
26
|
+
const dotIdx = unrealPath.lastIndexOf('.');
|
|
27
|
+
if (dotIdx >= 0)
|
|
28
|
+
return unrealPath.slice(dotIdx + 1);
|
|
29
|
+
const slashIdx = unrealPath.lastIndexOf('/');
|
|
30
|
+
return slashIdx >= 0 ? unrealPath.slice(slashIdx + 1) : unrealPath;
|
|
31
|
+
};
|
|
32
|
+
/**
|
|
33
|
+
* Ingest Blueprint assets from the Unreal asset manifest into the knowledge graph.
|
|
34
|
+
* Creates Blueprint nodes and edges (EXTENDS, CALLS, IMPORTS) linking them to
|
|
35
|
+
* existing C++ symbols in the graph.
|
|
36
|
+
*/
|
|
37
|
+
export const ingestBlueprintsIntoGraph = async (graph, storagePath) => {
|
|
38
|
+
const manifestPath = path.join(storagePath, 'unreal', 'asset-manifest.json');
|
|
39
|
+
let manifest;
|
|
40
|
+
try {
|
|
41
|
+
const raw = await fs.readFile(manifestPath, 'utf-8');
|
|
42
|
+
manifest = JSON.parse(raw);
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
return { nodesAdded: 0, edgesAdded: 0 };
|
|
46
|
+
}
|
|
47
|
+
if (!manifest.assets || manifest.assets.length === 0) {
|
|
48
|
+
return { nodesAdded: 0, edgesAdded: 0 };
|
|
49
|
+
}
|
|
50
|
+
// ── Build lookup indexes from existing graph nodes ──────────────────
|
|
51
|
+
// Class/Struct nodes keyed by name (for parent class matching)
|
|
52
|
+
const classByName = new Map();
|
|
53
|
+
// Method/Function nodes keyed by name (for native_function_refs matching)
|
|
54
|
+
const symbolByName = new Map();
|
|
55
|
+
// Class ID → set of Method IDs (via HAS_METHOD edges)
|
|
56
|
+
const classMethodIds = new Map();
|
|
57
|
+
// Build class→method map from HAS_METHOD edges
|
|
58
|
+
for (const rel of graph.iterRelationships()) {
|
|
59
|
+
if (rel.type === 'HAS_METHOD') {
|
|
60
|
+
let methods = classMethodIds.get(rel.sourceId);
|
|
61
|
+
if (!methods) {
|
|
62
|
+
methods = new Set();
|
|
63
|
+
classMethodIds.set(rel.sourceId, methods);
|
|
64
|
+
}
|
|
65
|
+
methods.add(rel.targetId);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
for (const node of graph.iterNodes()) {
|
|
69
|
+
if (node.label === 'Class' || node.label === 'Struct') {
|
|
70
|
+
const name = node.properties.name;
|
|
71
|
+
let list = classByName.get(name);
|
|
72
|
+
if (!list) {
|
|
73
|
+
list = [];
|
|
74
|
+
classByName.set(name, list);
|
|
75
|
+
}
|
|
76
|
+
list.push(node);
|
|
77
|
+
}
|
|
78
|
+
if (node.label === 'Method' || node.label === 'Function') {
|
|
79
|
+
const name = node.properties.name;
|
|
80
|
+
let list = symbolByName.get(name);
|
|
81
|
+
if (!list) {
|
|
82
|
+
list = [];
|
|
83
|
+
symbolByName.set(name, list);
|
|
84
|
+
}
|
|
85
|
+
list.push(node);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
// Reverse map: method ID → owning class name (for disambiguation)
|
|
89
|
+
const methodOwnerName = new Map();
|
|
90
|
+
for (const [classId, methodIds] of classMethodIds) {
|
|
91
|
+
const classNode = graph.getNode(classId);
|
|
92
|
+
if (!classNode)
|
|
93
|
+
continue;
|
|
94
|
+
for (const methodId of methodIds) {
|
|
95
|
+
methodOwnerName.set(methodId, classNode.properties.name);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
// ── Create Blueprint nodes and edges ────────────────────────────────
|
|
99
|
+
let nodesAdded = 0;
|
|
100
|
+
let edgesAdded = 0;
|
|
101
|
+
let edgeCounter = 0;
|
|
102
|
+
// Track created Blueprint IDs for second-pass dependency edges
|
|
103
|
+
const blueprintIdByAssetPath = new Map();
|
|
104
|
+
for (const asset of manifest.assets) {
|
|
105
|
+
const bpId = generateId('Blueprint', asset.asset_path);
|
|
106
|
+
const bpName = extractAssetName(asset.asset_path);
|
|
107
|
+
graph.addNode({
|
|
108
|
+
id: bpId,
|
|
109
|
+
label: 'Blueprint',
|
|
110
|
+
properties: {
|
|
111
|
+
name: bpName,
|
|
112
|
+
filePath: asset.asset_path,
|
|
113
|
+
startLine: -1,
|
|
114
|
+
endLine: -1,
|
|
115
|
+
description: asset.generated_class || '',
|
|
116
|
+
},
|
|
117
|
+
});
|
|
118
|
+
nodesAdded++;
|
|
119
|
+
blueprintIdByAssetPath.set(asset.asset_path, bpId);
|
|
120
|
+
// ── EXTENDS edge to nearest native parent class ──────────────
|
|
121
|
+
const nativeParents = asset.native_parents || [];
|
|
122
|
+
if (nativeParents.length > 0) {
|
|
123
|
+
const parentClassName = extractClassName(nativeParents[0]);
|
|
124
|
+
const candidates = classByName.get(parentClassName);
|
|
125
|
+
if (candidates && candidates.length > 0) {
|
|
126
|
+
const target = candidates[0]; // pick first match
|
|
127
|
+
graph.addRelationship({
|
|
128
|
+
id: generateId('EXTENDS', `${bpId}->${target.id}:${edgeCounter++}`),
|
|
129
|
+
sourceId: bpId,
|
|
130
|
+
targetId: target.id,
|
|
131
|
+
type: 'EXTENDS',
|
|
132
|
+
confidence: 0.9,
|
|
133
|
+
reason: 'blueprint-manifest',
|
|
134
|
+
});
|
|
135
|
+
edgesAdded++;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
// ── CALLS edges for native function references ───────────────
|
|
139
|
+
const funcRefs = asset.native_function_refs || [];
|
|
140
|
+
for (const ref of funcRefs) {
|
|
141
|
+
const colonIdx = ref.lastIndexOf('::');
|
|
142
|
+
let targetClassName;
|
|
143
|
+
let funcName;
|
|
144
|
+
if (colonIdx >= 0) {
|
|
145
|
+
targetClassName = ref.slice(0, colonIdx);
|
|
146
|
+
funcName = ref.slice(colonIdx + 2);
|
|
147
|
+
}
|
|
148
|
+
else {
|
|
149
|
+
funcName = ref;
|
|
150
|
+
}
|
|
151
|
+
const candidates = symbolByName.get(funcName);
|
|
152
|
+
if (!candidates || candidates.length === 0)
|
|
153
|
+
continue;
|
|
154
|
+
// If we have a class name, prefer methods owned by that class
|
|
155
|
+
let matched;
|
|
156
|
+
if (targetClassName) {
|
|
157
|
+
matched = candidates.find(c => {
|
|
158
|
+
const owner = methodOwnerName.get(c.id);
|
|
159
|
+
return owner === targetClassName;
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
// Fallback: first candidate
|
|
163
|
+
if (!matched)
|
|
164
|
+
matched = candidates[0];
|
|
165
|
+
graph.addRelationship({
|
|
166
|
+
id: generateId('CALLS', `${bpId}->${matched.id}:${edgeCounter++}`),
|
|
167
|
+
sourceId: bpId,
|
|
168
|
+
targetId: matched.id,
|
|
169
|
+
type: 'CALLS',
|
|
170
|
+
confidence: 0.8,
|
|
171
|
+
reason: 'blueprint-manifest',
|
|
172
|
+
});
|
|
173
|
+
edgesAdded++;
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
// ── Second pass: Blueprint-to-Blueprint IMPORTS edges ───────────────
|
|
177
|
+
for (const asset of manifest.assets) {
|
|
178
|
+
const deps = asset.dependencies || [];
|
|
179
|
+
if (deps.length === 0)
|
|
180
|
+
continue;
|
|
181
|
+
const sourceBpId = blueprintIdByAssetPath.get(asset.asset_path);
|
|
182
|
+
if (!sourceBpId)
|
|
183
|
+
continue;
|
|
184
|
+
for (const dep of deps) {
|
|
185
|
+
const targetBpId = blueprintIdByAssetPath.get(dep);
|
|
186
|
+
if (!targetBpId || targetBpId === sourceBpId)
|
|
187
|
+
continue;
|
|
188
|
+
graph.addRelationship({
|
|
189
|
+
id: generateId('IMPORTS', `${sourceBpId}->${targetBpId}:${edgeCounter++}`),
|
|
190
|
+
sourceId: sourceBpId,
|
|
191
|
+
targetId: targetBpId,
|
|
192
|
+
type: 'IMPORTS',
|
|
193
|
+
confidence: 0.7,
|
|
194
|
+
reason: 'blueprint-manifest',
|
|
195
|
+
});
|
|
196
|
+
edgesAdded++;
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
return { nodesAdded, edgesAdded };
|
|
200
|
+
};
|
package/dist/unreal/types.d.ts
CHANGED
|
@@ -52,12 +52,39 @@ export interface UnrealConfirmedReference {
|
|
|
52
52
|
chain_anchor_id: string;
|
|
53
53
|
source: 'editor_confirmed';
|
|
54
54
|
}
|
|
55
|
+
export interface UnrealChainNodePin {
|
|
56
|
+
name: string;
|
|
57
|
+
direction: 'input' | 'output';
|
|
58
|
+
type: string;
|
|
59
|
+
sub_type?: string;
|
|
60
|
+
default_value?: string;
|
|
61
|
+
connected_to?: string[];
|
|
62
|
+
connected_to_title?: string[];
|
|
63
|
+
}
|
|
64
|
+
export interface UnrealChainNodePins {
|
|
65
|
+
exec_pins: UnrealChainNodePin[];
|
|
66
|
+
data_pins: UnrealChainNodePin[];
|
|
67
|
+
}
|
|
68
|
+
export interface UnrealChainNodeDetails {
|
|
69
|
+
is_pure?: boolean;
|
|
70
|
+
target_class?: string;
|
|
71
|
+
function_name?: string;
|
|
72
|
+
variable_name?: string;
|
|
73
|
+
node_role?: 'variable_get' | 'variable_set';
|
|
74
|
+
branch_type?: 'if_then_else' | 'switch';
|
|
75
|
+
}
|
|
55
76
|
export interface UnrealChainNode {
|
|
56
77
|
node_id: string;
|
|
57
78
|
graph_name?: string;
|
|
58
79
|
node_kind: string;
|
|
59
80
|
node_title?: string;
|
|
60
81
|
depth: number;
|
|
82
|
+
is_enabled?: boolean;
|
|
83
|
+
comment?: string;
|
|
84
|
+
traversed_from_pin?: string;
|
|
85
|
+
traversed_from_node?: string;
|
|
86
|
+
pins?: UnrealChainNodePins;
|
|
87
|
+
details?: UnrealChainNodeDetails;
|
|
61
88
|
}
|
|
62
89
|
export interface SyncUnrealAssetManifestResult {
|
|
63
90
|
status: 'success' | 'error';
|
package/package.json
CHANGED
|
@@ -108,6 +108,20 @@ function Resolve-UnrealEditorCmd {
|
|
|
108
108
|
} catch {
|
|
109
109
|
}
|
|
110
110
|
|
|
111
|
+
# Try LauncherInstalled.dat (Epic Games Launcher writes this for all engine installs)
|
|
112
|
+
try {
|
|
113
|
+
$launcherDat = Join-Path $env:LOCALAPPDATA 'EpicGames\UnrealEngineLauncher\LauncherInstalled.dat'
|
|
114
|
+
if (Test-Path -LiteralPath $launcherDat -PathType Leaf) {
|
|
115
|
+
$launcher = Get-Content -LiteralPath $launcherDat -Raw | ConvertFrom-Json
|
|
116
|
+
foreach ($entry in $launcher.InstallationList) {
|
|
117
|
+
if ($entry.AppName -eq "UE_$EngineAssociation" -or $entry.AppName -eq $EngineAssociation) {
|
|
118
|
+
Add-EditorCmdCandidates -Candidates $candidates -RootOrExe $entry.InstallLocation
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
} catch {
|
|
123
|
+
}
|
|
124
|
+
|
|
111
125
|
Add-EditorCmdCandidates -Candidates $candidates -RootOrExe "C:\Program Files\Epic Games\UE_$EngineAssociation"
|
|
112
126
|
}
|
|
113
127
|
|