@kodus/kodus-graph 0.2.8 → 0.2.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +252 -0
- package/dist/analysis/blast-radius.d.ts +2 -0
- package/dist/analysis/blast-radius.js +55 -0
- package/dist/analysis/communities.d.ts +28 -0
- package/dist/analysis/communities.js +100 -0
- package/dist/analysis/context-builder.d.ts +34 -0
- package/dist/analysis/context-builder.js +92 -0
- package/dist/analysis/diff.d.ts +41 -0
- package/dist/analysis/diff.js +155 -0
- package/dist/analysis/enrich.d.ts +5 -0
- package/dist/analysis/enrich.js +126 -0
- package/dist/analysis/flows.d.ts +27 -0
- package/dist/analysis/flows.js +86 -0
- package/dist/analysis/inheritance.d.ts +3 -0
- package/dist/analysis/inheritance.js +31 -0
- package/dist/analysis/prompt-formatter.d.ts +2 -0
- package/dist/analysis/prompt-formatter.js +173 -0
- package/dist/analysis/risk-score.d.ts +4 -0
- package/dist/analysis/risk-score.js +51 -0
- package/dist/analysis/search.d.ts +11 -0
- package/dist/analysis/search.js +64 -0
- package/dist/analysis/test-gaps.d.ts +2 -0
- package/dist/analysis/test-gaps.js +14 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +210 -0
- package/dist/commands/analyze.d.ts +9 -0
- package/dist/commands/analyze.js +116 -0
- package/dist/commands/communities.d.ts +8 -0
- package/dist/commands/communities.js +9 -0
- package/dist/commands/context.d.ts +12 -0
- package/dist/commands/context.js +130 -0
- package/dist/commands/diff.d.ts +9 -0
- package/dist/commands/diff.js +89 -0
- package/dist/commands/flows.d.ts +8 -0
- package/dist/commands/flows.js +9 -0
- package/dist/commands/parse.d.ts +11 -0
- package/dist/commands/parse.js +101 -0
- package/dist/commands/search.d.ts +12 -0
- package/dist/commands/search.js +27 -0
- package/dist/commands/update.d.ts +7 -0
- package/dist/commands/update.js +154 -0
- package/dist/graph/builder.d.ts +6 -0
- package/dist/graph/builder.js +248 -0
- package/dist/graph/edges.d.ts +23 -0
- package/dist/graph/edges.js +159 -0
- package/dist/graph/json-writer.d.ts +9 -0
- package/dist/graph/json-writer.js +38 -0
- package/dist/graph/loader.d.ts +13 -0
- package/dist/graph/loader.js +101 -0
- package/dist/graph/merger.d.ts +7 -0
- package/dist/graph/merger.js +18 -0
- package/dist/graph/types.d.ts +252 -0
- package/dist/graph/types.js +1 -0
- package/dist/parser/batch.d.ts +5 -0
- package/dist/parser/batch.js +93 -0
- package/dist/parser/discovery.d.ts +7 -0
- package/dist/parser/discovery.js +61 -0
- package/dist/parser/extractor.d.ts +4 -0
- package/dist/parser/extractor.js +33 -0
- package/dist/parser/extractors/generic.d.ts +8 -0
- package/dist/parser/extractors/generic.js +471 -0
- package/dist/parser/extractors/python.d.ts +8 -0
- package/dist/parser/extractors/python.js +133 -0
- package/dist/parser/extractors/ruby.d.ts +8 -0
- package/dist/parser/extractors/ruby.js +153 -0
- package/dist/parser/extractors/typescript.d.ts +10 -0
- package/dist/parser/extractors/typescript.js +365 -0
- package/dist/parser/languages.d.ts +32 -0
- package/dist/parser/languages.js +304 -0
- package/dist/resolver/call-resolver.d.ts +36 -0
- package/dist/resolver/call-resolver.js +178 -0
- package/dist/resolver/external-detector.d.ts +11 -0
- package/dist/resolver/external-detector.js +820 -0
- package/dist/resolver/fs-cache.d.ts +8 -0
- package/dist/resolver/fs-cache.js +36 -0
- package/dist/resolver/import-map.d.ts +12 -0
- package/dist/resolver/import-map.js +21 -0
- package/dist/resolver/import-resolver.d.ts +19 -0
- package/dist/resolver/import-resolver.js +310 -0
- package/dist/resolver/languages/csharp.d.ts +3 -0
- package/dist/resolver/languages/csharp.js +94 -0
- package/dist/resolver/languages/go.d.ts +3 -0
- package/dist/resolver/languages/go.js +197 -0
- package/dist/resolver/languages/java.d.ts +1 -0
- package/dist/resolver/languages/java.js +193 -0
- package/dist/resolver/languages/php.d.ts +3 -0
- package/dist/resolver/languages/php.js +75 -0
- package/dist/resolver/languages/python.d.ts +11 -0
- package/dist/resolver/languages/python.js +127 -0
- package/dist/resolver/languages/ruby.d.ts +24 -0
- package/dist/resolver/languages/ruby.js +110 -0
- package/dist/resolver/languages/rust.d.ts +1 -0
- package/dist/resolver/languages/rust.js +197 -0
- package/dist/resolver/languages/typescript.d.ts +35 -0
- package/dist/resolver/languages/typescript.js +416 -0
- package/dist/resolver/re-export-resolver.d.ts +24 -0
- package/dist/resolver/re-export-resolver.js +57 -0
- package/dist/resolver/symbol-table.d.ts +17 -0
- package/dist/resolver/symbol-table.js +60 -0
- package/dist/shared/extract-calls.d.ts +26 -0
- package/dist/shared/extract-calls.js +57 -0
- package/dist/shared/file-hash.d.ts +3 -0
- package/dist/shared/file-hash.js +10 -0
- package/dist/shared/filters.d.ts +3 -0
- package/dist/shared/filters.js +240 -0
- package/dist/shared/logger.d.ts +6 -0
- package/dist/shared/logger.js +17 -0
- package/dist/shared/qualified-name.d.ts +1 -0
- package/dist/shared/qualified-name.js +9 -0
- package/dist/shared/safe-path.d.ts +6 -0
- package/dist/shared/safe-path.js +29 -0
- package/dist/shared/schemas.d.ts +43 -0
- package/dist/shared/schemas.js +30 -0
- package/dist/shared/temp.d.ts +11 -0
- package/{src/shared/temp.ts → dist/shared/temp.js} +4 -5
- package/package.json +20 -6
- package/src/analysis/blast-radius.ts +0 -54
- package/src/analysis/communities.ts +0 -135
- package/src/analysis/context-builder.ts +0 -130
- package/src/analysis/diff.ts +0 -169
- package/src/analysis/enrich.ts +0 -110
- package/src/analysis/flows.ts +0 -112
- package/src/analysis/inheritance.ts +0 -34
- package/src/analysis/prompt-formatter.ts +0 -175
- package/src/analysis/risk-score.ts +0 -62
- package/src/analysis/search.ts +0 -76
- package/src/analysis/test-gaps.ts +0 -21
- package/src/cli.ts +0 -210
- package/src/commands/analyze.ts +0 -128
- package/src/commands/communities.ts +0 -19
- package/src/commands/context.ts +0 -182
- package/src/commands/diff.ts +0 -96
- package/src/commands/flows.ts +0 -19
- package/src/commands/parse.ts +0 -124
- package/src/commands/search.ts +0 -41
- package/src/commands/update.ts +0 -166
- package/src/graph/builder.ts +0 -209
- package/src/graph/edges.ts +0 -101
- package/src/graph/json-writer.ts +0 -43
- package/src/graph/loader.ts +0 -113
- package/src/graph/merger.ts +0 -25
- package/src/graph/types.ts +0 -283
- package/src/parser/batch.ts +0 -82
- package/src/parser/discovery.ts +0 -75
- package/src/parser/extractor.ts +0 -37
- package/src/parser/extractors/generic.ts +0 -132
- package/src/parser/extractors/python.ts +0 -133
- package/src/parser/extractors/ruby.ts +0 -147
- package/src/parser/extractors/typescript.ts +0 -350
- package/src/parser/languages.ts +0 -122
- package/src/resolver/call-resolver.ts +0 -244
- package/src/resolver/import-map.ts +0 -27
- package/src/resolver/import-resolver.ts +0 -72
- package/src/resolver/languages/csharp.ts +0 -7
- package/src/resolver/languages/go.ts +0 -7
- package/src/resolver/languages/java.ts +0 -7
- package/src/resolver/languages/php.ts +0 -7
- package/src/resolver/languages/python.ts +0 -35
- package/src/resolver/languages/ruby.ts +0 -21
- package/src/resolver/languages/rust.ts +0 -7
- package/src/resolver/languages/typescript.ts +0 -168
- package/src/resolver/re-export-resolver.ts +0 -66
- package/src/resolver/symbol-table.ts +0 -67
- package/src/shared/extract-calls.ts +0 -75
- package/src/shared/file-hash.ts +0 -12
- package/src/shared/filters.ts +0 -243
- package/src/shared/logger.ts +0 -17
- package/src/shared/qualified-name.ts +0 -5
- package/src/shared/safe-path.ts +0 -31
- package/src/shared/schemas.ts +0 -32
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { execSync } from 'child_process';
|
|
2
|
+
import { existsSync, writeFileSync } from 'fs';
|
|
3
|
+
import { relative, resolve } from 'path';
|
|
4
|
+
import { performance } from 'perf_hooks';
|
|
5
|
+
import { computeStructuralDiff } from '../analysis/diff';
|
|
6
|
+
import { buildGraphData } from '../graph/builder';
|
|
7
|
+
import { loadGraph } from '../graph/loader';
|
|
8
|
+
import { parseBatch } from '../parser/batch';
|
|
9
|
+
import { discoverFiles } from '../parser/discovery';
|
|
10
|
+
import { resolveAllCalls } from '../resolver/call-resolver';
|
|
11
|
+
import { createImportMap } from '../resolver/import-map';
|
|
12
|
+
import { loadTsconfigAliases, resolveImport } from '../resolver/import-resolver';
|
|
13
|
+
import { createSymbolTable } from '../resolver/symbol-table';
|
|
14
|
+
import { computeFileHash } from '../shared/file-hash';
|
|
15
|
+
import { log } from '../shared/logger';
|
|
16
|
+
export async function executeDiff(opts) {
|
|
17
|
+
const t0 = performance.now();
|
|
18
|
+
const repoDir = resolve(opts.repoDir);
|
|
19
|
+
// Resolve changed files
|
|
20
|
+
let changedFiles;
|
|
21
|
+
if (opts.base) {
|
|
22
|
+
try {
|
|
23
|
+
const output = execSync(`git diff --name-only ${opts.base}`, { cwd: repoDir, encoding: 'utf-8' });
|
|
24
|
+
changedFiles = output.trim().split('\n').filter(Boolean);
|
|
25
|
+
}
|
|
26
|
+
catch (err) {
|
|
27
|
+
log.error('failed to run git diff', { base: opts.base, error: String(err) });
|
|
28
|
+
process.exit(1);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
else {
|
|
32
|
+
changedFiles = opts.files;
|
|
33
|
+
}
|
|
34
|
+
process.stderr.write(`[1/4] ${changedFiles.length} changed files\n`);
|
|
35
|
+
// Load old graph
|
|
36
|
+
const graphPath = resolve(opts.graph);
|
|
37
|
+
if (!existsSync(graphPath)) {
|
|
38
|
+
log.error('graph file not found', { path: graphPath });
|
|
39
|
+
process.exit(1);
|
|
40
|
+
}
|
|
41
|
+
const oldGraph = loadGraph(graphPath);
|
|
42
|
+
process.stderr.write(`[2/4] Loaded previous graph (${oldGraph.nodes.length} nodes)\n`);
|
|
43
|
+
// Re-parse changed files
|
|
44
|
+
const absFiles = discoverFiles(repoDir, changedFiles);
|
|
45
|
+
const rawGraph = await parseBatch(absFiles, repoDir);
|
|
46
|
+
const tsconfigAliases = loadTsconfigAliases(repoDir);
|
|
47
|
+
const symbolTable = createSymbolTable();
|
|
48
|
+
const importMap = createImportMap();
|
|
49
|
+
const importEdges = [];
|
|
50
|
+
for (const f of rawGraph.functions) {
|
|
51
|
+
symbolTable.add(f.file, f.name, f.qualified);
|
|
52
|
+
}
|
|
53
|
+
for (const c of rawGraph.classes) {
|
|
54
|
+
symbolTable.add(c.file, c.name, c.qualified);
|
|
55
|
+
}
|
|
56
|
+
for (const i of rawGraph.interfaces) {
|
|
57
|
+
symbolTable.add(i.file, i.name, i.qualified);
|
|
58
|
+
}
|
|
59
|
+
for (const imp of rawGraph.imports) {
|
|
60
|
+
const langKey = imp.lang;
|
|
61
|
+
const resolved = resolveImport(resolve(repoDir, imp.file), imp.module, langKey, repoDir, tsconfigAliases);
|
|
62
|
+
const resolvedRel = resolved ? relative(repoDir, resolved) : null;
|
|
63
|
+
importEdges.push({
|
|
64
|
+
source: imp.file,
|
|
65
|
+
target: resolvedRel || imp.module,
|
|
66
|
+
resolved: !!resolvedRel,
|
|
67
|
+
line: imp.line,
|
|
68
|
+
});
|
|
69
|
+
const target = resolvedRel || imp.module;
|
|
70
|
+
for (const name of imp.names) {
|
|
71
|
+
importMap.add(imp.file, name, target);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
const { callEdges } = resolveAllCalls(rawGraph.rawCalls, rawGraph.diMaps, symbolTable, importMap);
|
|
75
|
+
const fileHashes = new Map();
|
|
76
|
+
for (const f of absFiles) {
|
|
77
|
+
try {
|
|
78
|
+
fileHashes.set(relative(repoDir, f), computeFileHash(f));
|
|
79
|
+
}
|
|
80
|
+
catch { }
|
|
81
|
+
}
|
|
82
|
+
const newGraphData = buildGraphData(rawGraph, callEdges, importEdges, repoDir, fileHashes, symbolTable, importMap);
|
|
83
|
+
process.stderr.write(`[3/4] Re-parsed ${absFiles.length} files (${newGraphData.nodes.length} nodes)\n`);
|
|
84
|
+
// Compute diff
|
|
85
|
+
const relChangedFiles = changedFiles.map((f) => (f.startsWith('/') ? relative(repoDir, f) : f));
|
|
86
|
+
const result = computeStructuralDiff(oldGraph, newGraphData.nodes, newGraphData.edges, relChangedFiles);
|
|
87
|
+
process.stderr.write(`[4/4] Diff: +${result.summary.added} -${result.summary.removed} ~${result.summary.modified} nodes (${Math.round(performance.now() - t0)}ms)\n`);
|
|
88
|
+
writeFileSync(opts.out, JSON.stringify(result, null, 2));
|
|
89
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { writeFileSync } from 'fs';
|
|
2
|
+
import { detectFlows } from '../analysis/flows';
|
|
3
|
+
import { loadGraph } from '../graph/loader';
|
|
4
|
+
export function executeFlows(opts) {
|
|
5
|
+
const graph = loadGraph(opts.graph);
|
|
6
|
+
const result = detectFlows(graph, { maxDepth: opts.maxDepth, type: opts.type });
|
|
7
|
+
writeFileSync(opts.out, JSON.stringify(result, null, 2));
|
|
8
|
+
process.stderr.write(`Flows: ${result.summary.total_flows} detected (test:${result.summary.by_type.test} http:${result.summary.by_type.http}), avg depth ${result.summary.avg_depth}\n`);
|
|
9
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export interface ParseOptions {
|
|
2
|
+
repoDir: string;
|
|
3
|
+
files?: string[];
|
|
4
|
+
all: boolean;
|
|
5
|
+
out: string;
|
|
6
|
+
include?: string[];
|
|
7
|
+
exclude?: string[];
|
|
8
|
+
skipTests?: boolean;
|
|
9
|
+
maxMemoryMB?: number;
|
|
10
|
+
}
|
|
11
|
+
export declare function executeParse(opts: ParseOptions): Promise<void>;
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import { relative, resolve } from 'path';
|
|
2
|
+
import { performance } from 'perf_hooks';
|
|
3
|
+
import { buildGraphData } from '../graph/builder';
|
|
4
|
+
import { writeGraphJSON } from '../graph/json-writer';
|
|
5
|
+
import { parseBatch } from '../parser/batch';
|
|
6
|
+
import { discoverFiles } from '../parser/discovery';
|
|
7
|
+
import { resolveAllCalls } from '../resolver/call-resolver';
|
|
8
|
+
import { createImportMap } from '../resolver/import-map';
|
|
9
|
+
import { loadTsconfigAliases, resolveImport } from '../resolver/import-resolver';
|
|
10
|
+
import { buildReExportMap } from '../resolver/re-export-resolver';
|
|
11
|
+
import { createSymbolTable } from '../resolver/symbol-table';
|
|
12
|
+
import { computeFileHash } from '../shared/file-hash';
|
|
13
|
+
import { log } from '../shared/logger';
|
|
14
|
+
export async function executeParse(opts) {
|
|
15
|
+
const t0 = performance.now();
|
|
16
|
+
const repoDir = resolve(opts.repoDir);
|
|
17
|
+
// Phase 1: Discover files
|
|
18
|
+
const files = discoverFiles(repoDir, opts.all ? undefined : opts.files, opts.include, opts.exclude);
|
|
19
|
+
process.stderr.write(`[1/5] Discovered ${files.length} files\n`);
|
|
20
|
+
// Phase 2: Parse + extract
|
|
21
|
+
let rawGraph = await parseBatch(files, repoDir, { skipTests: opts.skipTests, maxMemoryMB: opts.maxMemoryMB });
|
|
22
|
+
process.stderr.write(`[2/5] Parsed ${rawGraph.functions.length} functions, ${rawGraph.classes.length} classes, ${rawGraph.rawCalls.length} call sites\n`);
|
|
23
|
+
// Phase 3: Resolve imports
|
|
24
|
+
const tsconfigAliases = loadTsconfigAliases(repoDir);
|
|
25
|
+
let symbolTable = createSymbolTable();
|
|
26
|
+
let importMap = createImportMap();
|
|
27
|
+
let importEdges = [];
|
|
28
|
+
for (const f of rawGraph.functions) {
|
|
29
|
+
symbolTable.add(f.file, f.name, f.qualified);
|
|
30
|
+
}
|
|
31
|
+
for (const c of rawGraph.classes) {
|
|
32
|
+
symbolTable.add(c.file, c.name, c.qualified);
|
|
33
|
+
}
|
|
34
|
+
for (const i of rawGraph.interfaces) {
|
|
35
|
+
symbolTable.add(i.file, i.name, i.qualified);
|
|
36
|
+
}
|
|
37
|
+
// Pre-resolve re-exports so barrel imports follow through to actual definitions
|
|
38
|
+
const barrelMap = buildReExportMap(rawGraph.reExports, repoDir, tsconfigAliases);
|
|
39
|
+
for (const imp of rawGraph.imports) {
|
|
40
|
+
const langKey = imp.lang;
|
|
41
|
+
const resolved = resolveImport(resolve(repoDir, imp.file), imp.module, langKey, repoDir, tsconfigAliases);
|
|
42
|
+
const resolvedRel = resolved ? relative(repoDir, resolved) : null;
|
|
43
|
+
importEdges.push({
|
|
44
|
+
source: imp.file,
|
|
45
|
+
target: resolvedRel || imp.module,
|
|
46
|
+
resolved: !!resolvedRel,
|
|
47
|
+
line: imp.line,
|
|
48
|
+
});
|
|
49
|
+
const target = resolvedRel || imp.module;
|
|
50
|
+
for (const name of imp.names) {
|
|
51
|
+
// If target is a barrel file, follow re-exports to find the actual definition
|
|
52
|
+
let finalTarget = target;
|
|
53
|
+
if (resolvedRel) {
|
|
54
|
+
const reExportedFiles = barrelMap.get(resolvedRel);
|
|
55
|
+
if (reExportedFiles) {
|
|
56
|
+
for (const reFile of reExportedFiles) {
|
|
57
|
+
if (symbolTable.lookupExact(reFile, name)) {
|
|
58
|
+
finalTarget = reFile;
|
|
59
|
+
break;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
importMap.add(imp.file, name, finalTarget);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
process.stderr.write(`[3/5] Resolved ${importEdges.filter((e) => e.resolved).length}/${importEdges.length} imports\n`);
|
|
68
|
+
// Phase 4: Resolve calls
|
|
69
|
+
let { callEdges, stats } = resolveAllCalls(rawGraph.rawCalls, rawGraph.diMaps, symbolTable, importMap);
|
|
70
|
+
process.stderr.write(`[4/5] Resolved ${callEdges.length} calls (DI:${stats.di} same:${stats.same} import:${stats.import} unique:${stats.unique} ambiguous:${stats.ambiguous} noise:${stats.noise})\n`);
|
|
71
|
+
// Phase 5: Build output
|
|
72
|
+
const fileHashes = new Map();
|
|
73
|
+
for (const f of files) {
|
|
74
|
+
try {
|
|
75
|
+
fileHashes.set(relative(repoDir, f), computeFileHash(f));
|
|
76
|
+
}
|
|
77
|
+
catch (err) {
|
|
78
|
+
log.warn('Failed to compute file hash', { file: f, error: String(err) });
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
const parseErrors = rawGraph.parseErrors;
|
|
82
|
+
const extractErrors = rawGraph.extractErrors;
|
|
83
|
+
const graphData = buildGraphData(rawGraph, callEdges, importEdges, repoDir, fileHashes, symbolTable, importMap);
|
|
84
|
+
process.stderr.write(`[5/5] Built graph: ${graphData.nodes.length} nodes, ${graphData.edges.length} edges\n`);
|
|
85
|
+
// Release intermediaries — no longer needed after buildGraphData
|
|
86
|
+
rawGraph = null;
|
|
87
|
+
symbolTable = null;
|
|
88
|
+
importMap = null;
|
|
89
|
+
callEdges = null;
|
|
90
|
+
importEdges = null;
|
|
91
|
+
const metadata = {
|
|
92
|
+
repo_dir: repoDir,
|
|
93
|
+
files_parsed: files.length,
|
|
94
|
+
total_nodes: graphData.nodes.length,
|
|
95
|
+
total_edges: graphData.edges.length,
|
|
96
|
+
duration_ms: Math.round(performance.now() - t0),
|
|
97
|
+
parse_errors: parseErrors,
|
|
98
|
+
extract_errors: extractErrors,
|
|
99
|
+
};
|
|
100
|
+
writeGraphJSON(opts.out, metadata, graphData.nodes, graphData.edges);
|
|
101
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
interface SearchCommandOptions {
|
|
2
|
+
graph: string;
|
|
3
|
+
query?: string;
|
|
4
|
+
kind?: string;
|
|
5
|
+
file?: string;
|
|
6
|
+
callersOf?: string;
|
|
7
|
+
calleesOf?: string;
|
|
8
|
+
limit: number;
|
|
9
|
+
out?: string;
|
|
10
|
+
}
|
|
11
|
+
export declare function executeSearch(opts: SearchCommandOptions): void;
|
|
12
|
+
export {};
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { writeFileSync } from 'fs';
|
|
2
|
+
import { findCallees, findCallers, searchNodes } from '../analysis/search';
|
|
3
|
+
import { loadGraph } from '../graph/loader';
|
|
4
|
+
export function executeSearch(opts) {
|
|
5
|
+
const graph = loadGraph(opts.graph);
|
|
6
|
+
let results;
|
|
7
|
+
let queryInfo;
|
|
8
|
+
if (opts.callersOf) {
|
|
9
|
+
results = findCallers(graph, opts.callersOf);
|
|
10
|
+
queryInfo = { callers_of: opts.callersOf, kind: null, file: null };
|
|
11
|
+
}
|
|
12
|
+
else if (opts.calleesOf) {
|
|
13
|
+
results = findCallees(graph, opts.calleesOf);
|
|
14
|
+
queryInfo = { callees_of: opts.calleesOf, kind: null, file: null };
|
|
15
|
+
}
|
|
16
|
+
else {
|
|
17
|
+
results = searchNodes(graph, { query: opts.query, kind: opts.kind, file: opts.file, limit: opts.limit });
|
|
18
|
+
queryInfo = { pattern: opts.query || null, kind: opts.kind || null, file: opts.file || null };
|
|
19
|
+
}
|
|
20
|
+
const output = JSON.stringify({ results, total: results.length, query: queryInfo }, null, 2);
|
|
21
|
+
if (opts.out) {
|
|
22
|
+
writeFileSync(opts.out, output);
|
|
23
|
+
}
|
|
24
|
+
else {
|
|
25
|
+
process.stdout.write(`${output}\n`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
import { existsSync, mkdirSync, writeFileSync } from 'fs';
|
|
2
|
+
import { dirname, relative, resolve } from 'path';
|
|
3
|
+
import { performance } from 'perf_hooks';
|
|
4
|
+
import { buildGraphData } from '../graph/builder';
|
|
5
|
+
import { loadGraph } from '../graph/loader';
|
|
6
|
+
import { parseBatch } from '../parser/batch';
|
|
7
|
+
import { discoverFiles } from '../parser/discovery';
|
|
8
|
+
import { resolveAllCalls } from '../resolver/call-resolver';
|
|
9
|
+
import { createImportMap } from '../resolver/import-map';
|
|
10
|
+
import { loadTsconfigAliases, resolveImport } from '../resolver/import-resolver';
|
|
11
|
+
import { createSymbolTable } from '../resolver/symbol-table';
|
|
12
|
+
import { computeFileHash } from '../shared/file-hash';
|
|
13
|
+
import { log } from '../shared/logger';
|
|
14
|
+
const DEFAULT_GRAPH_PATH = '.kodus-graph/graph.json';
|
|
15
|
+
export async function executeUpdate(opts) {
|
|
16
|
+
const t0 = performance.now();
|
|
17
|
+
const repoDir = resolve(opts.repoDir);
|
|
18
|
+
const graphPath = resolve(repoDir, opts.graph || DEFAULT_GRAPH_PATH);
|
|
19
|
+
const outPath = resolve(repoDir, opts.out || opts.graph || DEFAULT_GRAPH_PATH);
|
|
20
|
+
if (!existsSync(graphPath)) {
|
|
21
|
+
log.error('graph file not found — run "kodus-graph parse" first', { path: graphPath });
|
|
22
|
+
process.exit(1);
|
|
23
|
+
}
|
|
24
|
+
const oldGraph = loadGraph(graphPath);
|
|
25
|
+
process.stderr.write(`[1/5] Loaded previous graph (${oldGraph.nodes.length} nodes)\n`);
|
|
26
|
+
// Build file hash index from old graph
|
|
27
|
+
const oldHashes = new Map();
|
|
28
|
+
for (const node of oldGraph.nodes) {
|
|
29
|
+
if (node.file_hash && !oldHashes.has(node.file_path)) {
|
|
30
|
+
oldHashes.set(node.file_path, node.file_hash);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
// Discover current files
|
|
34
|
+
const allFiles = discoverFiles(repoDir);
|
|
35
|
+
const allRel = allFiles.map((f) => relative(repoDir, f));
|
|
36
|
+
const currentFiles = new Set(allRel);
|
|
37
|
+
const oldFiles = new Set(oldHashes.keys());
|
|
38
|
+
// Classify files
|
|
39
|
+
const added = [];
|
|
40
|
+
const modified = [];
|
|
41
|
+
const deleted = [];
|
|
42
|
+
const unchanged = [];
|
|
43
|
+
for (const file of currentFiles) {
|
|
44
|
+
const absPath = resolve(repoDir, file);
|
|
45
|
+
if (!oldHashes.has(file)) {
|
|
46
|
+
added.push(file);
|
|
47
|
+
}
|
|
48
|
+
else {
|
|
49
|
+
const currentHash = computeFileHash(absPath);
|
|
50
|
+
if (currentHash !== oldHashes.get(file)) {
|
|
51
|
+
modified.push(file);
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
unchanged.push(file);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
for (const file of oldFiles) {
|
|
59
|
+
if (!currentFiles.has(file)) {
|
|
60
|
+
deleted.push(file);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
const toReparse = [...added, ...modified];
|
|
64
|
+
process.stderr.write(`[2/5] Files: ${added.length} added, ${modified.length} modified, ${deleted.length} deleted, ${unchanged.length} unchanged\n`);
|
|
65
|
+
if (toReparse.length === 0 && deleted.length === 0) {
|
|
66
|
+
process.stderr.write('[3/5] No changes detected, graph is up to date\n');
|
|
67
|
+
const output = {
|
|
68
|
+
metadata: {
|
|
69
|
+
...oldGraph.metadata,
|
|
70
|
+
duration_ms: Math.round(performance.now() - t0),
|
|
71
|
+
files_unchanged: unchanged.length,
|
|
72
|
+
incremental: true,
|
|
73
|
+
},
|
|
74
|
+
nodes: oldGraph.nodes,
|
|
75
|
+
edges: oldGraph.edges,
|
|
76
|
+
};
|
|
77
|
+
ensureDir(outPath);
|
|
78
|
+
writeFileSync(outPath, JSON.stringify(output, null, 2));
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
// Re-parse changed files
|
|
82
|
+
const absToReparse = toReparse.map((f) => resolve(repoDir, f));
|
|
83
|
+
const rawGraph = await parseBatch(absToReparse, repoDir);
|
|
84
|
+
process.stderr.write(`[3/5] Re-parsed ${toReparse.length} files\n`);
|
|
85
|
+
// Resolve imports and calls for new files
|
|
86
|
+
const tsconfigAliases = loadTsconfigAliases(repoDir);
|
|
87
|
+
const symbolTable = createSymbolTable();
|
|
88
|
+
const importMap = createImportMap();
|
|
89
|
+
const importEdges = [];
|
|
90
|
+
for (const f of rawGraph.functions) {
|
|
91
|
+
symbolTable.add(f.file, f.name, f.qualified);
|
|
92
|
+
}
|
|
93
|
+
for (const c of rawGraph.classes) {
|
|
94
|
+
symbolTable.add(c.file, c.name, c.qualified);
|
|
95
|
+
}
|
|
96
|
+
for (const i of rawGraph.interfaces) {
|
|
97
|
+
symbolTable.add(i.file, i.name, i.qualified);
|
|
98
|
+
}
|
|
99
|
+
for (const imp of rawGraph.imports) {
|
|
100
|
+
const langKey = imp.lang;
|
|
101
|
+
const resolved = resolveImport(resolve(repoDir, imp.file), imp.module, langKey, repoDir, tsconfigAliases);
|
|
102
|
+
const resolvedRel = resolved ? relative(repoDir, resolved) : null;
|
|
103
|
+
importEdges.push({
|
|
104
|
+
source: imp.file,
|
|
105
|
+
target: resolvedRel || imp.module,
|
|
106
|
+
resolved: !!resolvedRel,
|
|
107
|
+
line: imp.line,
|
|
108
|
+
});
|
|
109
|
+
const target = resolvedRel || imp.module;
|
|
110
|
+
for (const name of imp.names) {
|
|
111
|
+
importMap.add(imp.file, name, target);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
const { callEdges } = resolveAllCalls(rawGraph.rawCalls, rawGraph.diMaps, symbolTable, importMap);
|
|
115
|
+
const fileHashes = new Map();
|
|
116
|
+
for (const f of absToReparse) {
|
|
117
|
+
try {
|
|
118
|
+
fileHashes.set(relative(repoDir, f), computeFileHash(f));
|
|
119
|
+
}
|
|
120
|
+
catch { }
|
|
121
|
+
}
|
|
122
|
+
const newGraphData = buildGraphData(rawGraph, callEdges, importEdges, repoDir, fileHashes, symbolTable, importMap);
|
|
123
|
+
process.stderr.write(`[4/5] Built new graph fragment (${newGraphData.nodes.length} nodes)\n`);
|
|
124
|
+
// Merge: keep old nodes/edges NOT in changed/deleted files, add new ones
|
|
125
|
+
const changedOrDeleted = new Set([...toReparse, ...deleted]);
|
|
126
|
+
const mergedNodes = oldGraph.nodes.filter((n) => !changedOrDeleted.has(n.file_path));
|
|
127
|
+
const mergedEdges = oldGraph.edges.filter((e) => !changedOrDeleted.has(e.file_path));
|
|
128
|
+
mergedNodes.push(...newGraphData.nodes);
|
|
129
|
+
mergedEdges.push(...newGraphData.edges);
|
|
130
|
+
process.stderr.write(`[5/5] Merged: ${mergedNodes.length} nodes, ${mergedEdges.length} edges\n`);
|
|
131
|
+
const output = {
|
|
132
|
+
metadata: {
|
|
133
|
+
repo_dir: repoDir,
|
|
134
|
+
files_parsed: toReparse.length,
|
|
135
|
+
files_unchanged: unchanged.length,
|
|
136
|
+
total_nodes: mergedNodes.length,
|
|
137
|
+
total_edges: mergedEdges.length,
|
|
138
|
+
duration_ms: Math.round(performance.now() - t0),
|
|
139
|
+
parse_errors: rawGraph.parseErrors,
|
|
140
|
+
extract_errors: rawGraph.extractErrors,
|
|
141
|
+
incremental: true,
|
|
142
|
+
},
|
|
143
|
+
nodes: mergedNodes,
|
|
144
|
+
edges: mergedEdges,
|
|
145
|
+
};
|
|
146
|
+
ensureDir(outPath);
|
|
147
|
+
writeFileSync(outPath, JSON.stringify(output, null, 2));
|
|
148
|
+
}
|
|
149
|
+
function ensureDir(filePath) {
|
|
150
|
+
const dir = dirname(filePath);
|
|
151
|
+
if (!existsSync(dir)) {
|
|
152
|
+
mkdirSync(dir, { recursive: true });
|
|
153
|
+
}
|
|
154
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { GraphData, ImportEdge, RawCallEdge, RawGraph } from './types';
|
|
2
|
+
export declare function buildGraphData(raw: RawGraph, callEdges: RawCallEdge[], importEdges: ImportEdge[], _repoDir: string, fileHashes: Map<string, string>, symbolTable?: {
|
|
3
|
+
lookupGlobal(name: string): string[];
|
|
4
|
+
}, importMap?: {
|
|
5
|
+
lookup(file: string, name: string): string | null;
|
|
6
|
+
}): GraphData;
|
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
import { deriveEdges } from './edges';
|
|
2
|
+
export function buildGraphData(raw, callEdges, importEdges, _repoDir, fileHashes, symbolTable, importMap) {
|
|
3
|
+
const nodes = [];
|
|
4
|
+
const edges = [];
|
|
5
|
+
// Functions -> nodes
|
|
6
|
+
for (const f of raw.functions) {
|
|
7
|
+
nodes.push({
|
|
8
|
+
kind: f.kind,
|
|
9
|
+
ast_kind: f.ast_kind,
|
|
10
|
+
name: f.name,
|
|
11
|
+
qualified_name: f.qualified,
|
|
12
|
+
file_path: f.file,
|
|
13
|
+
line_start: f.line_start,
|
|
14
|
+
line_end: f.line_end,
|
|
15
|
+
language: detectLang(f.file),
|
|
16
|
+
parent_name: f.className || undefined,
|
|
17
|
+
params: f.params || undefined,
|
|
18
|
+
return_type: f.returnType || undefined,
|
|
19
|
+
is_test: false,
|
|
20
|
+
file_hash: fileHashes.get(f.file) || '',
|
|
21
|
+
content_hash: f.content_hash,
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
// Classes -> nodes
|
|
25
|
+
for (const c of raw.classes) {
|
|
26
|
+
nodes.push({
|
|
27
|
+
kind: 'Class',
|
|
28
|
+
ast_kind: c.ast_kind,
|
|
29
|
+
name: c.name,
|
|
30
|
+
qualified_name: c.qualified,
|
|
31
|
+
file_path: c.file,
|
|
32
|
+
line_start: c.line_start,
|
|
33
|
+
line_end: c.line_end,
|
|
34
|
+
language: detectLang(c.file),
|
|
35
|
+
is_test: false,
|
|
36
|
+
file_hash: fileHashes.get(c.file) || '',
|
|
37
|
+
content_hash: c.content_hash,
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
// Interfaces -> nodes
|
|
41
|
+
for (const i of raw.interfaces) {
|
|
42
|
+
nodes.push({
|
|
43
|
+
kind: 'Interface',
|
|
44
|
+
ast_kind: i.ast_kind,
|
|
45
|
+
name: i.name,
|
|
46
|
+
qualified_name: i.qualified,
|
|
47
|
+
file_path: i.file,
|
|
48
|
+
line_start: i.line_start,
|
|
49
|
+
line_end: i.line_end,
|
|
50
|
+
language: detectLang(i.file),
|
|
51
|
+
is_test: false,
|
|
52
|
+
file_hash: fileHashes.get(i.file) || '',
|
|
53
|
+
content_hash: i.content_hash,
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
// Enums -> nodes
|
|
57
|
+
for (const e of raw.enums) {
|
|
58
|
+
nodes.push({
|
|
59
|
+
kind: 'Enum',
|
|
60
|
+
ast_kind: e.ast_kind,
|
|
61
|
+
name: e.name,
|
|
62
|
+
qualified_name: e.qualified,
|
|
63
|
+
file_path: e.file,
|
|
64
|
+
line_start: e.line_start,
|
|
65
|
+
line_end: e.line_end,
|
|
66
|
+
language: detectLang(e.file),
|
|
67
|
+
is_test: false,
|
|
68
|
+
file_hash: fileHashes.get(e.file) || '',
|
|
69
|
+
content_hash: e.content_hash,
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
// Tests -> nodes
|
|
73
|
+
for (const t of raw.tests) {
|
|
74
|
+
nodes.push({
|
|
75
|
+
kind: 'Test',
|
|
76
|
+
ast_kind: t.ast_kind,
|
|
77
|
+
name: t.name,
|
|
78
|
+
qualified_name: t.qualified,
|
|
79
|
+
file_path: t.file,
|
|
80
|
+
line_start: t.line_start,
|
|
81
|
+
line_end: t.line_end,
|
|
82
|
+
language: detectLang(t.file),
|
|
83
|
+
is_test: true,
|
|
84
|
+
file_hash: fileHashes.get(t.file) || '',
|
|
85
|
+
content_hash: t.content_hash,
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
// Build a set of all parsed file paths for validation (filter external targets)
|
|
89
|
+
const parsedFiles = new Set();
|
|
90
|
+
for (const f of raw.functions) {
|
|
91
|
+
parsedFiles.add(f.file);
|
|
92
|
+
}
|
|
93
|
+
for (const c of raw.classes) {
|
|
94
|
+
parsedFiles.add(c.file);
|
|
95
|
+
}
|
|
96
|
+
for (const i of raw.interfaces) {
|
|
97
|
+
parsedFiles.add(i.file);
|
|
98
|
+
}
|
|
99
|
+
for (const e of raw.enums) {
|
|
100
|
+
parsedFiles.add(e.file);
|
|
101
|
+
}
|
|
102
|
+
for (const t of raw.tests) {
|
|
103
|
+
parsedFiles.add(t.file);
|
|
104
|
+
}
|
|
105
|
+
// Build file→functions index to resolve caller from line number
|
|
106
|
+
const functionsByFile = new Map();
|
|
107
|
+
for (const node of nodes) {
|
|
108
|
+
if (node.kind === 'Class' || node.kind === 'Interface' || node.kind === 'Enum') {
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
const entry = { qualified_name: node.qualified_name, line_start: node.line_start, line_end: node.line_end };
|
|
112
|
+
const list = functionsByFile.get(node.file_path);
|
|
113
|
+
if (list) {
|
|
114
|
+
list.push(entry);
|
|
115
|
+
}
|
|
116
|
+
else {
|
|
117
|
+
functionsByFile.set(node.file_path, [entry]);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
// Sort descending by line_start so inner/nested functions match first
|
|
121
|
+
for (const list of functionsByFile.values()) {
|
|
122
|
+
list.sort((a, b) => b.line_start - a.line_start);
|
|
123
|
+
}
|
|
124
|
+
// CALLS edges — resolve caller function from call line number
|
|
125
|
+
for (const ce of callEdges) {
|
|
126
|
+
// Skip calls to external packages (target file not in repo)
|
|
127
|
+
const targetFile = ce.target.split('::')[0];
|
|
128
|
+
if (targetFile && !parsedFiles.has(targetFile)) {
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
const sourceFile = ce.source.includes('::') ? ce.source.split('::')[0] : ce.source;
|
|
132
|
+
let sourceQualified;
|
|
133
|
+
if (ce.source.includes('::')) {
|
|
134
|
+
sourceQualified = ce.source;
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
// Find the innermost function containing this call line
|
|
138
|
+
const fns = functionsByFile.get(ce.source);
|
|
139
|
+
let resolved;
|
|
140
|
+
if (fns) {
|
|
141
|
+
for (const fn of fns) {
|
|
142
|
+
if (ce.line >= fn.line_start && ce.line <= fn.line_end) {
|
|
143
|
+
resolved = fn.qualified_name;
|
|
144
|
+
break;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
sourceQualified = resolved || `${ce.source}::unknown`;
|
|
149
|
+
}
|
|
150
|
+
edges.push({
|
|
151
|
+
kind: 'CALLS',
|
|
152
|
+
source_qualified: sourceQualified,
|
|
153
|
+
target_qualified: ce.target,
|
|
154
|
+
file_path: sourceFile,
|
|
155
|
+
line: ce.line,
|
|
156
|
+
confidence: ce.confidence,
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
// IMPORTS edges — only emit resolved imports (skip external/unresolved packages)
|
|
160
|
+
for (const ie of importEdges) {
|
|
161
|
+
if (!ie.resolved) {
|
|
162
|
+
continue;
|
|
163
|
+
}
|
|
164
|
+
edges.push({
|
|
165
|
+
kind: 'IMPORTS',
|
|
166
|
+
source_qualified: ie.source,
|
|
167
|
+
target_qualified: ie.target,
|
|
168
|
+
file_path: ie.source,
|
|
169
|
+
line: ie.line,
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
// Derived edges
|
|
173
|
+
const derived = deriveEdges(raw, importEdges, symbolTable, importMap);
|
|
174
|
+
// Release raw graph arrays — no longer needed after deriveEdges
|
|
175
|
+
raw.functions = [];
|
|
176
|
+
raw.classes = [];
|
|
177
|
+
raw.interfaces = [];
|
|
178
|
+
raw.enums = [];
|
|
179
|
+
raw.tests = [];
|
|
180
|
+
raw.rawCalls = [];
|
|
181
|
+
for (const e of derived.inherits) {
|
|
182
|
+
edges.push({
|
|
183
|
+
kind: 'INHERITS',
|
|
184
|
+
source_qualified: e.source,
|
|
185
|
+
target_qualified: e.target,
|
|
186
|
+
file_path: e.file || '',
|
|
187
|
+
line: 0,
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
for (const e of derived.implements) {
|
|
191
|
+
edges.push({
|
|
192
|
+
kind: 'IMPLEMENTS',
|
|
193
|
+
source_qualified: e.source,
|
|
194
|
+
target_qualified: e.target,
|
|
195
|
+
file_path: e.file || '',
|
|
196
|
+
line: 0,
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
for (const e of derived.testedBy) {
|
|
200
|
+
edges.push({
|
|
201
|
+
kind: 'TESTED_BY',
|
|
202
|
+
source_qualified: e.source,
|
|
203
|
+
target_qualified: e.target,
|
|
204
|
+
file_path: e.target || '',
|
|
205
|
+
line: 0,
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
for (const e of derived.contains) {
|
|
209
|
+
edges.push({
|
|
210
|
+
kind: 'CONTAINS',
|
|
211
|
+
source_qualified: e.source,
|
|
212
|
+
target_qualified: e.target,
|
|
213
|
+
file_path: e.source,
|
|
214
|
+
line: 0,
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
return { nodes, edges };
|
|
218
|
+
}
|
|
219
|
+
function detectLang(file) {
|
|
220
|
+
if (file.endsWith('.ts') || file.endsWith('.tsx')) {
|
|
221
|
+
return 'typescript';
|
|
222
|
+
}
|
|
223
|
+
if (file.endsWith('.js') || file.endsWith('.jsx') || file.endsWith('.mjs') || file.endsWith('.cjs')) {
|
|
224
|
+
return 'javascript';
|
|
225
|
+
}
|
|
226
|
+
if (file.endsWith('.py')) {
|
|
227
|
+
return 'python';
|
|
228
|
+
}
|
|
229
|
+
if (file.endsWith('.rb')) {
|
|
230
|
+
return 'ruby';
|
|
231
|
+
}
|
|
232
|
+
if (file.endsWith('.go')) {
|
|
233
|
+
return 'go';
|
|
234
|
+
}
|
|
235
|
+
if (file.endsWith('.java')) {
|
|
236
|
+
return 'java';
|
|
237
|
+
}
|
|
238
|
+
if (file.endsWith('.rs')) {
|
|
239
|
+
return 'rust';
|
|
240
|
+
}
|
|
241
|
+
if (file.endsWith('.cs')) {
|
|
242
|
+
return 'csharp';
|
|
243
|
+
}
|
|
244
|
+
if (file.endsWith('.php')) {
|
|
245
|
+
return 'php';
|
|
246
|
+
}
|
|
247
|
+
return 'unknown';
|
|
248
|
+
}
|