@kodus/kodus-graph 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +62 -0
- package/src/analysis/blast-radius.ts +54 -0
- package/src/analysis/communities.ts +135 -0
- package/src/analysis/diff.ts +120 -0
- package/src/analysis/flows.ts +112 -0
- package/src/analysis/review-context.ts +141 -0
- package/src/analysis/risk-score.ts +62 -0
- package/src/analysis/search.ts +76 -0
- package/src/analysis/test-gaps.ts +21 -0
- package/src/cli.ts +192 -0
- package/src/commands/analyze.ts +66 -0
- package/src/commands/communities.ts +19 -0
- package/src/commands/context.ts +69 -0
- package/src/commands/diff.ts +96 -0
- package/src/commands/flows.ts +19 -0
- package/src/commands/parse.ts +100 -0
- package/src/commands/search.ts +41 -0
- package/src/commands/update.ts +166 -0
- package/src/graph/builder.ts +170 -0
- package/src/graph/edges.ts +101 -0
- package/src/graph/loader.ts +100 -0
- package/src/graph/merger.ts +25 -0
- package/src/graph/types.ts +218 -0
- package/src/parser/batch.ts +74 -0
- package/src/parser/discovery.ts +42 -0
- package/src/parser/extractor.ts +37 -0
- package/src/parser/extractors/generic.ts +87 -0
- package/src/parser/extractors/python.ts +127 -0
- package/src/parser/extractors/ruby.ts +142 -0
- package/src/parser/extractors/typescript.ts +329 -0
- package/src/parser/languages.ts +122 -0
- package/src/resolver/call-resolver.ts +179 -0
- package/src/resolver/import-map.ts +27 -0
- package/src/resolver/import-resolver.ts +72 -0
- package/src/resolver/languages/csharp.ts +7 -0
- package/src/resolver/languages/go.ts +7 -0
- package/src/resolver/languages/java.ts +7 -0
- package/src/resolver/languages/php.ts +7 -0
- package/src/resolver/languages/python.ts +35 -0
- package/src/resolver/languages/ruby.ts +21 -0
- package/src/resolver/languages/rust.ts +7 -0
- package/src/resolver/languages/typescript.ts +168 -0
- package/src/resolver/symbol-table.ts +53 -0
- package/src/shared/file-hash.ts +7 -0
- package/src/shared/filters.ts +243 -0
- package/src/shared/logger.ts +14 -0
- package/src/shared/qualified-name.ts +5 -0
- package/src/shared/safe-path.ts +31 -0
- package/src/shared/schemas.ts +31 -0
- package/src/shared/temp.ts +17 -0
package/package.json
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@kodus/kodus-graph",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Code graph builder for Kodus code review — parses source code into structural graphs with nodes, edges, and analysis",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"kodus-graph": "./src/cli.ts"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"src/**/*.ts",
|
|
11
|
+
"README.md",
|
|
12
|
+
"LICENSE"
|
|
13
|
+
],
|
|
14
|
+
"scripts": {
|
|
15
|
+
"dev": "bun run src/cli.ts",
|
|
16
|
+
"test": "bun test",
|
|
17
|
+
"lint": "biome check src/ tests/",
|
|
18
|
+
"lint:fix": "biome check --write src/ tests/",
|
|
19
|
+
"format": "biome format --write src/ tests/",
|
|
20
|
+
"typecheck": "tsc --noEmit",
|
|
21
|
+
"check": "bun run typecheck && bun run lint && bun test",
|
|
22
|
+
"build": "bun build src/cli.ts --compile --outfile kodus-graph",
|
|
23
|
+
"build:all": "bun run build:darwin-arm64 && bun run build:darwin-x64 && bun run build:linux-x64 && bun run build:linux-arm64",
|
|
24
|
+
"build:darwin-arm64": "bun build src/cli.ts --compile --target=bun-darwin-arm64 --outfile dist/kodus-graph-darwin-arm64",
|
|
25
|
+
"build:darwin-x64": "bun build src/cli.ts --compile --target=bun-darwin-x64 --outfile dist/kodus-graph-darwin-x64",
|
|
26
|
+
"build:linux-x64": "bun build src/cli.ts --compile --target=bun-linux-x64 --outfile dist/kodus-graph-linux-x64",
|
|
27
|
+
"build:linux-arm64": "bun build src/cli.ts --compile --target=bun-linux-arm64 --outfile dist/kodus-graph-linux-arm64"
|
|
28
|
+
},
|
|
29
|
+
"keywords": [
|
|
30
|
+
"code-graph",
|
|
31
|
+
"ast",
|
|
32
|
+
"code-review",
|
|
33
|
+
"static-analysis",
|
|
34
|
+
"kodus"
|
|
35
|
+
],
|
|
36
|
+
"author": "Kodus AI",
|
|
37
|
+
"license": "MIT",
|
|
38
|
+
"repository": {
|
|
39
|
+
"type": "git",
|
|
40
|
+
"url": "https://github.com/kodustech/kodus-graph.git"
|
|
41
|
+
},
|
|
42
|
+
"engines": {
|
|
43
|
+
"bun": ">=1.0.0"
|
|
44
|
+
},
|
|
45
|
+
"dependencies": {
|
|
46
|
+
"@ast-grep/lang-csharp": "^0.0.6",
|
|
47
|
+
"@ast-grep/lang-go": "^0.0.6",
|
|
48
|
+
"@ast-grep/lang-java": "^0.0.7",
|
|
49
|
+
"@ast-grep/lang-php": "^0.0.7",
|
|
50
|
+
"@ast-grep/lang-python": "^0.0.6",
|
|
51
|
+
"@ast-grep/lang-ruby": "^0.0.7",
|
|
52
|
+
"@ast-grep/lang-rust": "^0.0.7",
|
|
53
|
+
"@ast-grep/napi": "^0.42.0",
|
|
54
|
+
"commander": "^12.0.0",
|
|
55
|
+
"zod": "^4.3.6"
|
|
56
|
+
},
|
|
57
|
+
"devDependencies": {
|
|
58
|
+
"@biomejs/biome": "^2.4.10",
|
|
59
|
+
"@types/bun": "latest",
|
|
60
|
+
"typescript": "^5.5.0"
|
|
61
|
+
}
|
|
62
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import type { BlastRadiusResult, GraphData } from '../graph/types';
|
|
2
|
+
|
|
3
|
+
export function computeBlastRadius(graph: GraphData, changedFiles: string[], maxDepth: number = 2): BlastRadiusResult {
|
|
4
|
+
// Build adjacency list from CALLS edges (callers of changed nodes)
|
|
5
|
+
const adj = new Map<string, Set<string>>();
|
|
6
|
+
for (const edge of graph.edges) {
|
|
7
|
+
if (edge.kind !== 'CALLS' && edge.kind !== 'IMPORTS') continue;
|
|
8
|
+
// Reverse direction: target -> source (who calls/imports this?)
|
|
9
|
+
if (!adj.has(edge.target_qualified)) adj.set(edge.target_qualified, new Set());
|
|
10
|
+
adj.get(edge.target_qualified)!.add(edge.source_qualified);
|
|
11
|
+
// Forward direction too for IMPORTS
|
|
12
|
+
if (edge.kind === 'IMPORTS') {
|
|
13
|
+
if (!adj.has(edge.source_qualified)) adj.set(edge.source_qualified, new Set());
|
|
14
|
+
adj.get(edge.source_qualified)!.add(edge.target_qualified);
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
// Seed: all nodes in changed files
|
|
19
|
+
const changedSet = new Set(changedFiles);
|
|
20
|
+
const seeds = graph.nodes.filter((n) => changedSet.has(n.file_path)).map((n) => n.qualified_name);
|
|
21
|
+
|
|
22
|
+
// BFS
|
|
23
|
+
const visited = new Set<string>(seeds);
|
|
24
|
+
const byDepth: Record<string, string[]> = {};
|
|
25
|
+
let frontier = seeds;
|
|
26
|
+
|
|
27
|
+
for (let depth = 1; depth <= maxDepth; depth++) {
|
|
28
|
+
const next: string[] = [];
|
|
29
|
+
for (const node of frontier) {
|
|
30
|
+
for (const neighbor of adj.get(node) || []) {
|
|
31
|
+
if (!visited.has(neighbor)) {
|
|
32
|
+
visited.add(neighbor);
|
|
33
|
+
next.push(neighbor);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
if (next.length > 0) byDepth[String(depth)] = next;
|
|
38
|
+
frontier = next;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// Count unique files
|
|
42
|
+
const nodeIndex = new Map(graph.nodes.map((n) => [n.qualified_name, n]));
|
|
43
|
+
const impactedFiles = new Set<string>();
|
|
44
|
+
for (const q of visited) {
|
|
45
|
+
const node = nodeIndex.get(q);
|
|
46
|
+
if (node) impactedFiles.add(node.file_path);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return {
|
|
50
|
+
total_functions: visited.size,
|
|
51
|
+
total_files: impactedFiles.size,
|
|
52
|
+
by_depth: byDepth,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import type { IndexedGraph } from '../graph/loader';
|
|
2
|
+
|
|
3
|
+
export interface CommunityOptions {
|
|
4
|
+
depth: number;
|
|
5
|
+
minSize: number;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export interface Community {
|
|
9
|
+
name: string;
|
|
10
|
+
files: string[];
|
|
11
|
+
node_count: number;
|
|
12
|
+
cohesion: number;
|
|
13
|
+
language: string;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export interface CouplingPair {
|
|
17
|
+
source: string;
|
|
18
|
+
target: string;
|
|
19
|
+
edges: number;
|
|
20
|
+
strength: 'HIGH' | 'MEDIUM' | 'LOW';
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export interface CommunitiesResult {
|
|
24
|
+
communities: Community[];
|
|
25
|
+
coupling: CouplingPair[];
|
|
26
|
+
summary: { total_communities: number; avg_cohesion: number; high_coupling_pairs: number };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function getCommunityKey(filePath: string, depth: number): string {
|
|
30
|
+
const parts = filePath.split('/');
|
|
31
|
+
return parts.slice(0, depth).join('/');
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function detectCommunities(graph: IndexedGraph, opts: CommunityOptions): CommunitiesResult {
|
|
35
|
+
const { depth, minSize } = opts;
|
|
36
|
+
|
|
37
|
+
// Group nodes by directory
|
|
38
|
+
const groups = new Map<string, Set<string>>(); // community -> files
|
|
39
|
+
const nodeComm = new Map<string, string>(); // qualified_name -> community
|
|
40
|
+
|
|
41
|
+
for (const node of graph.nodes) {
|
|
42
|
+
const key = getCommunityKey(node.file_path, depth);
|
|
43
|
+
if (!groups.has(key)) groups.set(key, new Set());
|
|
44
|
+
groups.get(key)!.add(node.file_path);
|
|
45
|
+
nodeComm.set(node.qualified_name, key);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Count internal and cross edges per community pair
|
|
49
|
+
const internalEdges = new Map<string, number>();
|
|
50
|
+
const crossEdges = new Map<string, number>(); // "a|b" -> count
|
|
51
|
+
|
|
52
|
+
for (const edge of graph.edges) {
|
|
53
|
+
if (edge.kind !== 'CALLS' && edge.kind !== 'IMPORTS') continue;
|
|
54
|
+
const srcComm = nodeComm.get(edge.source_qualified);
|
|
55
|
+
const tgtComm = nodeComm.get(edge.target_qualified);
|
|
56
|
+
if (!srcComm || !tgtComm) continue;
|
|
57
|
+
|
|
58
|
+
if (srcComm === tgtComm) {
|
|
59
|
+
internalEdges.set(srcComm, (internalEdges.get(srcComm) || 0) + 1);
|
|
60
|
+
} else {
|
|
61
|
+
const pairKey = [srcComm, tgtComm].sort().join('|');
|
|
62
|
+
crossEdges.set(pairKey, (crossEdges.get(pairKey) || 0) + 1);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Build communities
|
|
67
|
+
const communities: Community[] = [];
|
|
68
|
+
for (const [name, files] of groups) {
|
|
69
|
+
const nodeCount = graph.nodes.filter((n) => getCommunityKey(n.file_path, depth) === name).length;
|
|
70
|
+
if (nodeCount < minSize) continue;
|
|
71
|
+
|
|
72
|
+
const internal = internalEdges.get(name) || 0;
|
|
73
|
+
const maxPossible = nodeCount * (nodeCount - 1);
|
|
74
|
+
const cohesion = maxPossible > 0 ? Math.round((internal / maxPossible) * 100) / 100 : 0;
|
|
75
|
+
|
|
76
|
+
const langs = new Map<string, number>();
|
|
77
|
+
for (const n of graph.nodes) {
|
|
78
|
+
if (getCommunityKey(n.file_path, depth) === name) {
|
|
79
|
+
langs.set(n.language, (langs.get(n.language) || 0) + 1);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
let dominant = 'unknown';
|
|
83
|
+
let maxCount = 0;
|
|
84
|
+
for (const [lang, count] of langs) {
|
|
85
|
+
if (count > maxCount) {
|
|
86
|
+
dominant = lang;
|
|
87
|
+
maxCount = count;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
communities.push({
|
|
92
|
+
name,
|
|
93
|
+
files: [...files].sort(),
|
|
94
|
+
node_count: nodeCount,
|
|
95
|
+
cohesion,
|
|
96
|
+
language: dominant,
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
communities.sort((a, b) => b.node_count - a.node_count);
|
|
101
|
+
|
|
102
|
+
// Build coupling pairs
|
|
103
|
+
const communityNames = new Set(communities.map((c) => c.name));
|
|
104
|
+
const coupling: CouplingPair[] = [];
|
|
105
|
+
for (const [pairKey, count] of crossEdges) {
|
|
106
|
+
const [src, tgt] = pairKey.split('|');
|
|
107
|
+
if (!communityNames.has(src) || !communityNames.has(tgt)) continue;
|
|
108
|
+
|
|
109
|
+
const srcTotal = graph.edges.filter((e) => {
|
|
110
|
+
const c = nodeComm.get(e.source_qualified);
|
|
111
|
+
return c === src || c === tgt;
|
|
112
|
+
}).length;
|
|
113
|
+
const ratio = srcTotal > 0 ? count / srcTotal : 0;
|
|
114
|
+
const strength = ratio > 0.3 ? 'HIGH' : ratio > 0.1 ? 'MEDIUM' : 'LOW';
|
|
115
|
+
|
|
116
|
+
coupling.push({ source: src, target: tgt, edges: count, strength });
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
coupling.sort((a, b) => b.edges - a.edges);
|
|
120
|
+
|
|
121
|
+
const avgCohesion =
|
|
122
|
+
communities.length > 0
|
|
123
|
+
? Math.round((communities.reduce((s, c) => s + c.cohesion, 0) / communities.length) * 100) / 100
|
|
124
|
+
: 0;
|
|
125
|
+
|
|
126
|
+
return {
|
|
127
|
+
communities,
|
|
128
|
+
coupling,
|
|
129
|
+
summary: {
|
|
130
|
+
total_communities: communities.length,
|
|
131
|
+
avg_cohesion: avgCohesion,
|
|
132
|
+
high_coupling_pairs: coupling.filter((c) => c.strength === 'HIGH').length,
|
|
133
|
+
},
|
|
134
|
+
};
|
|
135
|
+
}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import type { IndexedGraph } from '../graph/loader';
|
|
2
|
+
import type { GraphEdge, GraphNode } from '../graph/types';
|
|
3
|
+
|
|
4
|
+
export interface NodeChange {
|
|
5
|
+
qualified_name: string;
|
|
6
|
+
kind: string;
|
|
7
|
+
file_path: string;
|
|
8
|
+
line_start: number;
|
|
9
|
+
line_end: number;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export interface ModifiedNode {
|
|
13
|
+
qualified_name: string;
|
|
14
|
+
changes: string[];
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export interface DiffResult {
|
|
18
|
+
changed_files: string[];
|
|
19
|
+
summary: { added: number; removed: number; modified: number };
|
|
20
|
+
nodes: { added: NodeChange[]; removed: NodeChange[]; modified: ModifiedNode[] };
|
|
21
|
+
edges: {
|
|
22
|
+
added: Pick<GraphEdge, 'kind' | 'source_qualified' | 'target_qualified'>[];
|
|
23
|
+
removed: Pick<GraphEdge, 'kind' | 'source_qualified' | 'target_qualified'>[];
|
|
24
|
+
};
|
|
25
|
+
risk_by_file: Record<string, { dependents: number; risk: 'HIGH' | 'MEDIUM' | 'LOW' }>;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export function computeStructuralDiff(
|
|
29
|
+
oldGraph: IndexedGraph,
|
|
30
|
+
newNodes: GraphNode[],
|
|
31
|
+
newEdges: GraphEdge[],
|
|
32
|
+
changedFiles: string[],
|
|
33
|
+
): DiffResult {
|
|
34
|
+
const changedSet = new Set(changedFiles);
|
|
35
|
+
|
|
36
|
+
// Old nodes in changed files
|
|
37
|
+
const oldNodesInChanged = new Map<string, GraphNode>();
|
|
38
|
+
for (const n of oldGraph.nodes) {
|
|
39
|
+
if (changedSet.has(n.file_path)) oldNodesInChanged.set(n.qualified_name, n);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// New nodes in changed files
|
|
43
|
+
const newNodesMap = new Map<string, GraphNode>();
|
|
44
|
+
for (const n of newNodes) {
|
|
45
|
+
if (changedSet.has(n.file_path)) newNodesMap.set(n.qualified_name, n);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Classify nodes
|
|
49
|
+
const added: NodeChange[] = [];
|
|
50
|
+
const removed: NodeChange[] = [];
|
|
51
|
+
const modified: ModifiedNode[] = [];
|
|
52
|
+
|
|
53
|
+
for (const [qn, n] of newNodesMap) {
|
|
54
|
+
if (!oldNodesInChanged.has(qn)) {
|
|
55
|
+
added.push({
|
|
56
|
+
qualified_name: qn,
|
|
57
|
+
kind: n.kind,
|
|
58
|
+
file_path: n.file_path,
|
|
59
|
+
line_start: n.line_start,
|
|
60
|
+
line_end: n.line_end,
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
for (const [qn, n] of oldNodesInChanged) {
|
|
66
|
+
if (!newNodesMap.has(qn)) {
|
|
67
|
+
removed.push({
|
|
68
|
+
qualified_name: qn,
|
|
69
|
+
kind: n.kind,
|
|
70
|
+
file_path: n.file_path,
|
|
71
|
+
line_start: n.line_start,
|
|
72
|
+
line_end: n.line_end,
|
|
73
|
+
});
|
|
74
|
+
} else {
|
|
75
|
+
const newN = newNodesMap.get(qn)!;
|
|
76
|
+
const changes: string[] = [];
|
|
77
|
+
if (n.line_start !== newN.line_start || n.line_end !== newN.line_end) changes.push('line_range');
|
|
78
|
+
if ((n.params || '') !== (newN.params || '')) changes.push('params');
|
|
79
|
+
if ((n.return_type || '') !== (newN.return_type || '')) changes.push('return_type');
|
|
80
|
+
if (changes.length > 0) modified.push({ qualified_name: qn, changes });
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Classify edges
|
|
85
|
+
const oldEdgesInChanged = oldGraph.edges.filter((e) => changedSet.has(e.file_path));
|
|
86
|
+
const oldEdgeKeys = new Set(oldEdgesInChanged.map((e) => `${e.kind}|${e.source_qualified}|${e.target_qualified}`));
|
|
87
|
+
const newEdgesInChanged = newEdges.filter((e) => changedSet.has(e.file_path));
|
|
88
|
+
const newEdgeKeys = new Set(newEdgesInChanged.map((e) => `${e.kind}|${e.source_qualified}|${e.target_qualified}`));
|
|
89
|
+
|
|
90
|
+
const addedEdges = newEdgesInChanged
|
|
91
|
+
.filter((e) => !oldEdgeKeys.has(`${e.kind}|${e.source_qualified}|${e.target_qualified}`))
|
|
92
|
+
.map((e) => ({ kind: e.kind, source_qualified: e.source_qualified, target_qualified: e.target_qualified }));
|
|
93
|
+
|
|
94
|
+
const removedEdges = oldEdgesInChanged
|
|
95
|
+
.filter((e) => !newEdgeKeys.has(`${e.kind}|${e.source_qualified}|${e.target_qualified}`))
|
|
96
|
+
.map((e) => ({ kind: e.kind, source_qualified: e.source_qualified, target_qualified: e.target_qualified }));
|
|
97
|
+
|
|
98
|
+
// Risk by file: count unique dependents via reverse adjacency
|
|
99
|
+
const riskByFile: Record<string, { dependents: number; risk: 'HIGH' | 'MEDIUM' | 'LOW' }> = {};
|
|
100
|
+
for (const file of changedFiles) {
|
|
101
|
+
const nodesInFile = oldGraph.byFile.get(file) || [];
|
|
102
|
+
const dependents = new Set<string>();
|
|
103
|
+
for (const n of nodesInFile) {
|
|
104
|
+
for (const edge of oldGraph.reverseAdjacency.get(n.qualified_name) || []) {
|
|
105
|
+
if (!changedSet.has(edge.file_path)) dependents.add(edge.source_qualified);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
const count = dependents.size;
|
|
109
|
+
const risk = count >= 10 ? 'HIGH' : count >= 3 ? 'MEDIUM' : 'LOW';
|
|
110
|
+
riskByFile[file] = { dependents: count, risk };
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return {
|
|
114
|
+
changed_files: changedFiles,
|
|
115
|
+
summary: { added: added.length, removed: removed.length, modified: modified.length },
|
|
116
|
+
nodes: { added, removed, modified },
|
|
117
|
+
edges: { added: addedEdges, removed: removedEdges },
|
|
118
|
+
risk_by_file: riskByFile,
|
|
119
|
+
};
|
|
120
|
+
}
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import type { IndexedGraph } from '../graph/loader';
|
|
2
|
+
|
|
3
|
+
export interface FlowOptions {
|
|
4
|
+
maxDepth: number;
|
|
5
|
+
type: 'test' | 'http' | 'all';
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export interface Flow {
|
|
9
|
+
entry_point: string;
|
|
10
|
+
type: 'test' | 'http';
|
|
11
|
+
depth: number;
|
|
12
|
+
node_count: number;
|
|
13
|
+
file_count: number;
|
|
14
|
+
criticality: number;
|
|
15
|
+
path: string[];
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export interface FlowsResult {
|
|
19
|
+
flows: Flow[];
|
|
20
|
+
summary: {
|
|
21
|
+
total_flows: number;
|
|
22
|
+
by_type: { test: number; http: number };
|
|
23
|
+
avg_depth: number;
|
|
24
|
+
max_criticality: number;
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const HTTP_METHOD_NAMES = new Set(['get', 'post', 'put', 'delete', 'patch', 'handle', 'handler']);
|
|
29
|
+
|
|
30
|
+
function isHttpHandler(_qualifiedName: string, name: string, parentName?: string): boolean {
|
|
31
|
+
if (HTTP_METHOD_NAMES.has(name.toLowerCase())) return true;
|
|
32
|
+
if (parentName?.toLowerCase().endsWith('controller')) return true;
|
|
33
|
+
return false;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export function detectFlows(graph: IndexedGraph, opts: FlowOptions): FlowsResult {
|
|
37
|
+
const { maxDepth, type } = opts;
|
|
38
|
+
|
|
39
|
+
// Find entry points
|
|
40
|
+
const entryPoints: { qualified: string; type: 'test' | 'http' }[] = [];
|
|
41
|
+
|
|
42
|
+
for (const node of graph.nodes) {
|
|
43
|
+
if (type !== 'http' && node.kind === 'Test') {
|
|
44
|
+
entryPoints.push({ qualified: node.qualified_name, type: 'test' });
|
|
45
|
+
}
|
|
46
|
+
if (type !== 'test' && (node.kind === 'Method' || node.kind === 'Function')) {
|
|
47
|
+
if (isHttpHandler(node.qualified_name, node.name, node.parent_name)) {
|
|
48
|
+
entryPoints.push({ qualified: node.qualified_name, type: 'http' });
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// BFS for each entry point
|
|
54
|
+
const flows: Flow[] = [];
|
|
55
|
+
|
|
56
|
+
for (const ep of entryPoints) {
|
|
57
|
+
const path: string[] = [ep.qualified];
|
|
58
|
+
const visited = new Set<string>([ep.qualified]);
|
|
59
|
+
const files = new Set<string>();
|
|
60
|
+
|
|
61
|
+
const startNode = graph.byQualified.get(ep.qualified);
|
|
62
|
+
if (startNode) files.add(startNode.file_path);
|
|
63
|
+
|
|
64
|
+
let frontier = [ep.qualified];
|
|
65
|
+
let depth = 0;
|
|
66
|
+
|
|
67
|
+
while (frontier.length > 0 && depth < maxDepth) {
|
|
68
|
+
const next: string[] = [];
|
|
69
|
+
for (const q of frontier) {
|
|
70
|
+
for (const edge of graph.adjacency.get(q) || []) {
|
|
71
|
+
if (edge.kind !== 'CALLS') continue;
|
|
72
|
+
if (visited.has(edge.target_qualified)) continue;
|
|
73
|
+
visited.add(edge.target_qualified);
|
|
74
|
+
next.push(edge.target_qualified);
|
|
75
|
+
path.push(edge.target_qualified);
|
|
76
|
+
const targetNode = graph.byQualified.get(edge.target_qualified);
|
|
77
|
+
if (targetNode) files.add(targetNode.file_path);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
if (next.length === 0) break;
|
|
81
|
+
frontier = next;
|
|
82
|
+
depth++;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
flows.push({
|
|
86
|
+
entry_point: ep.qualified,
|
|
87
|
+
type: ep.type,
|
|
88
|
+
depth,
|
|
89
|
+
node_count: visited.size,
|
|
90
|
+
file_count: files.size,
|
|
91
|
+
criticality: visited.size * files.size,
|
|
92
|
+
path,
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
flows.sort((a, b) => b.criticality - a.criticality);
|
|
97
|
+
|
|
98
|
+
const testFlows = flows.filter((f) => f.type === 'test').length;
|
|
99
|
+
const httpFlows = flows.filter((f) => f.type === 'http').length;
|
|
100
|
+
const avgDepth = flows.length > 0 ? Math.round((flows.reduce((s, f) => s + f.depth, 0) / flows.length) * 10) / 10 : 0;
|
|
101
|
+
const maxCriticality = flows.length > 0 ? flows[0].criticality : 0;
|
|
102
|
+
|
|
103
|
+
return {
|
|
104
|
+
flows,
|
|
105
|
+
summary: {
|
|
106
|
+
total_flows: flows.length,
|
|
107
|
+
by_type: { test: testFlows, http: httpFlows },
|
|
108
|
+
avg_depth: avgDepth,
|
|
109
|
+
max_criticality: maxCriticality,
|
|
110
|
+
},
|
|
111
|
+
};
|
|
112
|
+
}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import type { ContextOutput, GraphData } from '../graph/types';
|
|
2
|
+
import { computeBlastRadius } from './blast-radius';
|
|
3
|
+
import { computeRiskScore } from './risk-score';
|
|
4
|
+
import { findTestGaps } from './test-gaps';
|
|
5
|
+
|
|
6
|
+
export function buildReviewContext(graph: GraphData, changedFiles: string[]): ContextOutput {
|
|
7
|
+
const changedSet = new Set(changedFiles);
|
|
8
|
+
const lines: string[] = [];
|
|
9
|
+
|
|
10
|
+
// Build caller/callee index from CALLS edges
|
|
11
|
+
const callersOf = new Map<string, Array<{ name: string; file: string; line: number; confidence: number }>>();
|
|
12
|
+
const calleesOf = new Map<
|
|
13
|
+
string,
|
|
14
|
+
Array<{ name: string; target: string; file: string; line: number; confidence: number }>
|
|
15
|
+
>();
|
|
16
|
+
|
|
17
|
+
// Index nodes by qualified name
|
|
18
|
+
const nodeIndex = new Map(graph.nodes.map((n) => [n.qualified_name, n]));
|
|
19
|
+
|
|
20
|
+
for (const edge of graph.edges) {
|
|
21
|
+
if (edge.kind !== 'CALLS' || (edge.confidence ?? 0) < 0.5) continue;
|
|
22
|
+
|
|
23
|
+
// callers: who calls edge.target
|
|
24
|
+
if (!callersOf.has(edge.target_qualified)) callersOf.set(edge.target_qualified, []);
|
|
25
|
+
const sourceNode = nodeIndex.get(edge.source_qualified);
|
|
26
|
+
callersOf.get(edge.target_qualified)!.push({
|
|
27
|
+
name: sourceNode?.name || edge.source_qualified.split('::').pop() || 'unknown',
|
|
28
|
+
file: sourceNode?.file_path || edge.file_path,
|
|
29
|
+
line: edge.line,
|
|
30
|
+
confidence: edge.confidence || 0,
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
// callees: what does source call
|
|
34
|
+
if (!calleesOf.has(edge.source_qualified)) calleesOf.set(edge.source_qualified, []);
|
|
35
|
+
const targetNode = nodeIndex.get(edge.target_qualified);
|
|
36
|
+
calleesOf.get(edge.source_qualified)!.push({
|
|
37
|
+
name: targetNode?.name || edge.target_qualified.split('::').pop() || 'unknown',
|
|
38
|
+
target: edge.target_qualified,
|
|
39
|
+
file: targetNode?.file_path || '',
|
|
40
|
+
line: edge.line,
|
|
41
|
+
confidence: edge.confidence || 0,
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// TESTED_BY index
|
|
46
|
+
const testedFiles = new Set(graph.edges.filter((e) => e.kind === 'TESTED_BY').map((e) => e.source_qualified));
|
|
47
|
+
|
|
48
|
+
lines.push('Changed functions (AST analysis):\n');
|
|
49
|
+
|
|
50
|
+
// Functions in changed files
|
|
51
|
+
const changedFunctions = graph.nodes
|
|
52
|
+
.filter(
|
|
53
|
+
(n) =>
|
|
54
|
+
changedSet.has(n.file_path) && !n.is_test && n.kind !== 'Class' && n.kind !== 'Interface' && n.kind !== 'Enum',
|
|
55
|
+
)
|
|
56
|
+
.sort((a, b) => a.file_path.localeCompare(b.file_path) || a.line_start - b.line_start);
|
|
57
|
+
|
|
58
|
+
let callerCount = 0;
|
|
59
|
+
let calleeCount = 0;
|
|
60
|
+
|
|
61
|
+
for (const func of changedFunctions) {
|
|
62
|
+
if (func.kind === 'Constructor') continue;
|
|
63
|
+
|
|
64
|
+
const shortName = func.name.includes('.') ? func.name.split('.').pop()! : func.name;
|
|
65
|
+
const sig = func.params && func.params !== '()' ? `${shortName}${func.params}` : shortName;
|
|
66
|
+
const ret = func.return_type ? ` -> ${func.return_type}` : '';
|
|
67
|
+
lines.push(`${sig}${ret} (${func.file_path}:${func.line_start})`);
|
|
68
|
+
|
|
69
|
+
// Callers
|
|
70
|
+
const callers = callersOf.get(func.qualified_name) || [];
|
|
71
|
+
callerCount += callers.length;
|
|
72
|
+
for (const caller of callers.slice(0, 5)) {
|
|
73
|
+
const conf = caller.confidence >= 0.85 ? '' : ` [${Math.round(caller.confidence * 100)}%]`;
|
|
74
|
+
lines.push(` ← called by ${caller.name} (${caller.file}:${caller.line})${conf}`);
|
|
75
|
+
}
|
|
76
|
+
if (callers.length > 5) lines.push(` ← ... and ${callers.length - 5} more callers`);
|
|
77
|
+
|
|
78
|
+
// Callees
|
|
79
|
+
const callees = calleesOf.get(func.qualified_name) || [];
|
|
80
|
+
calleeCount += callees.length;
|
|
81
|
+
const seenCallees = new Set<string>();
|
|
82
|
+
for (const callee of callees.slice(0, 5)) {
|
|
83
|
+
if (seenCallees.has(callee.target)) continue;
|
|
84
|
+
seenCallees.add(callee.target);
|
|
85
|
+
const calleeNode = nodeIndex.get(callee.target);
|
|
86
|
+
if (calleeNode) {
|
|
87
|
+
const calleeSig =
|
|
88
|
+
calleeNode.params && calleeNode.params !== '()' ? `${callee.name}${calleeNode.params}` : callee.name;
|
|
89
|
+
const calleeRet = calleeNode.return_type ? ` -> ${calleeNode.return_type}` : '';
|
|
90
|
+
lines.push(` → calls ${calleeSig}${calleeRet} (${calleeNode.file_path}:${calleeNode.line_start})`);
|
|
91
|
+
} else {
|
|
92
|
+
lines.push(` → calls ${callee.name} (${callee.file || 'external'})`);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Test coverage
|
|
97
|
+
if (testedFiles.has(func.file_path)) {
|
|
98
|
+
lines.push(` ✅ has test coverage`);
|
|
99
|
+
} else {
|
|
100
|
+
lines.push(` ⚠ NO TEST COVERAGE`);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
lines.push('');
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Blast radius
|
|
107
|
+
const blastRadius = computeBlastRadius(graph, changedFiles);
|
|
108
|
+
if (blastRadius.total_files > changedFiles.length) {
|
|
109
|
+
lines.push(
|
|
110
|
+
`Blast radius: ${changedFunctions.filter((f) => f.kind !== 'Constructor').length} changed functions impact ${blastRadius.total_files - changedFiles.length} other files`,
|
|
111
|
+
);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Risk score
|
|
115
|
+
const riskScore = computeRiskScore(graph, changedFiles, blastRadius);
|
|
116
|
+
lines.push(`\nRisk: ${riskScore.level} (${riskScore.score})`);
|
|
117
|
+
|
|
118
|
+
// Test gaps
|
|
119
|
+
const testGaps = findTestGaps(graph, changedFiles);
|
|
120
|
+
const untestedCount = testGaps.length;
|
|
121
|
+
if (untestedCount > 0) {
|
|
122
|
+
lines.push(`\n⚠ ${untestedCount} changed function(s) without test coverage:`);
|
|
123
|
+
for (const gap of testGaps.slice(0, 10)) {
|
|
124
|
+
const shortName = gap.function.split('::').pop() || gap.function;
|
|
125
|
+
lines.push(` ${shortName} (${gap.file_path}:${gap.line_start})`);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return {
|
|
130
|
+
text: lines.join('\n'),
|
|
131
|
+
metadata: {
|
|
132
|
+
changed_functions: changedFunctions.filter((f) => f.kind !== 'Constructor').length,
|
|
133
|
+
caller_count: callerCount,
|
|
134
|
+
callee_count: calleeCount,
|
|
135
|
+
untested_count: untestedCount,
|
|
136
|
+
blast_radius: { functions: blastRadius.total_functions, files: blastRadius.total_files },
|
|
137
|
+
risk_level: riskScore.level,
|
|
138
|
+
risk_score: riskScore.score,
|
|
139
|
+
},
|
|
140
|
+
};
|
|
141
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import type { BlastRadiusResult, GraphData, RiskScoreResult } from '../graph/types';
|
|
2
|
+
|
|
3
|
+
export function computeRiskScore(
|
|
4
|
+
graph: GraphData,
|
|
5
|
+
changedFiles: string[],
|
|
6
|
+
blastRadius: BlastRadiusResult,
|
|
7
|
+
): RiskScoreResult {
|
|
8
|
+
const changedSet = new Set(changedFiles);
|
|
9
|
+
const changedNodes = graph.nodes.filter((n) => changedSet.has(n.file_path) && !n.is_test);
|
|
10
|
+
|
|
11
|
+
// Factor 1: Blast radius (0.35)
|
|
12
|
+
const brValue = Math.min(blastRadius.total_functions / 20, 1); // cap at 20
|
|
13
|
+
|
|
14
|
+
// Factor 2: Test gaps (0.30)
|
|
15
|
+
const testedFiles = new Set(graph.edges.filter((e) => e.kind === 'TESTED_BY').map((e) => e.source_qualified));
|
|
16
|
+
const changedFunctions = changedNodes.filter((n) => n.kind === 'Function' || n.kind === 'Method');
|
|
17
|
+
const untestedCount = changedFunctions.filter((n) => !testedFiles.has(n.file_path)).length;
|
|
18
|
+
const tgValue = changedFunctions.length > 0 ? untestedCount / changedFunctions.length : 0;
|
|
19
|
+
|
|
20
|
+
// Factor 3: Complexity (0.20)
|
|
21
|
+
const avgSize =
|
|
22
|
+
changedNodes.length > 0
|
|
23
|
+
? changedNodes.reduce((s, n) => s + (n.line_end - n.line_start), 0) / changedNodes.length
|
|
24
|
+
: 0;
|
|
25
|
+
const cxValue = Math.min(avgSize / 50, 1); // cap at 50 lines
|
|
26
|
+
|
|
27
|
+
// Factor 4: Inheritance (0.15)
|
|
28
|
+
const hasInheritance = graph.edges.some(
|
|
29
|
+
(e) => (e.kind === 'INHERITS' || e.kind === 'IMPLEMENTS') && changedSet.has(e.file_path),
|
|
30
|
+
);
|
|
31
|
+
const ihValue = hasInheritance ? 1 : 0;
|
|
32
|
+
|
|
33
|
+
const score = brValue * 0.35 + tgValue * 0.3 + cxValue * 0.2 + ihValue * 0.15;
|
|
34
|
+
const level = score >= 0.6 ? 'HIGH' : score >= 0.3 ? 'MEDIUM' : 'LOW';
|
|
35
|
+
|
|
36
|
+
return {
|
|
37
|
+
level,
|
|
38
|
+
score: Math.round(score * 100) / 100,
|
|
39
|
+
factors: {
|
|
40
|
+
blast_radius: {
|
|
41
|
+
weight: 0.35,
|
|
42
|
+
value: Math.round(brValue * 100) / 100,
|
|
43
|
+
detail: `${blastRadius.total_functions} functions, ${blastRadius.total_files} files`,
|
|
44
|
+
},
|
|
45
|
+
test_gaps: {
|
|
46
|
+
weight: 0.3,
|
|
47
|
+
value: Math.round(tgValue * 100) / 100,
|
|
48
|
+
detail: `${untestedCount}/${changedFunctions.length} untested`,
|
|
49
|
+
},
|
|
50
|
+
complexity: {
|
|
51
|
+
weight: 0.2,
|
|
52
|
+
value: Math.round(cxValue * 100) / 100,
|
|
53
|
+
detail: `avg ${Math.round(avgSize)} lines`,
|
|
54
|
+
},
|
|
55
|
+
inheritance: {
|
|
56
|
+
weight: 0.15,
|
|
57
|
+
value: ihValue,
|
|
58
|
+
detail: hasInheritance ? 'has inheritance' : 'no inheritance',
|
|
59
|
+
},
|
|
60
|
+
},
|
|
61
|
+
};
|
|
62
|
+
}
|