@kodus/kodus-graph 0.2.7 → 0.2.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +252 -0
- package/dist/analysis/blast-radius.d.ts +2 -0
- package/dist/analysis/blast-radius.js +57 -0
- package/dist/analysis/communities.d.ts +28 -0
- package/dist/analysis/communities.js +100 -0
- package/dist/analysis/context-builder.d.ts +34 -0
- package/dist/analysis/context-builder.js +83 -0
- package/dist/analysis/diff.d.ts +35 -0
- package/dist/analysis/diff.js +140 -0
- package/dist/analysis/enrich.d.ts +5 -0
- package/dist/analysis/enrich.js +98 -0
- package/dist/analysis/flows.d.ts +27 -0
- package/dist/analysis/flows.js +86 -0
- package/dist/analysis/inheritance.d.ts +3 -0
- package/dist/analysis/inheritance.js +31 -0
- package/dist/analysis/prompt-formatter.d.ts +2 -0
- package/dist/analysis/prompt-formatter.js +166 -0
- package/dist/analysis/risk-score.d.ts +4 -0
- package/dist/analysis/risk-score.js +51 -0
- package/dist/analysis/search.d.ts +11 -0
- package/dist/analysis/search.js +64 -0
- package/dist/analysis/test-gaps.d.ts +2 -0
- package/dist/analysis/test-gaps.js +14 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +208 -0
- package/dist/commands/analyze.d.ts +9 -0
- package/dist/commands/analyze.js +114 -0
- package/dist/commands/communities.d.ts +8 -0
- package/dist/commands/communities.js +9 -0
- package/dist/commands/context.d.ts +12 -0
- package/dist/commands/context.js +130 -0
- package/dist/commands/diff.d.ts +9 -0
- package/dist/commands/diff.js +89 -0
- package/dist/commands/flows.d.ts +8 -0
- package/dist/commands/flows.js +9 -0
- package/dist/commands/parse.d.ts +10 -0
- package/dist/commands/parse.js +101 -0
- package/dist/commands/search.d.ts +12 -0
- package/dist/commands/search.js +27 -0
- package/dist/commands/update.d.ts +7 -0
- package/dist/commands/update.js +154 -0
- package/dist/graph/builder.d.ts +2 -0
- package/dist/graph/builder.js +216 -0
- package/dist/graph/edges.d.ts +19 -0
- package/dist/graph/edges.js +105 -0
- package/dist/graph/json-writer.d.ts +9 -0
- package/dist/graph/json-writer.js +38 -0
- package/dist/graph/loader.d.ts +13 -0
- package/dist/graph/loader.js +101 -0
- package/dist/graph/merger.d.ts +7 -0
- package/dist/graph/merger.js +18 -0
- package/dist/graph/types.d.ts +249 -0
- package/dist/graph/types.js +1 -0
- package/dist/parser/batch.d.ts +4 -0
- package/dist/parser/batch.js +78 -0
- package/dist/parser/discovery.d.ts +7 -0
- package/dist/parser/discovery.js +61 -0
- package/dist/parser/extractor.d.ts +4 -0
- package/dist/parser/extractor.js +33 -0
- package/dist/parser/extractors/generic.d.ts +8 -0
- package/dist/parser/extractors/generic.js +471 -0
- package/dist/parser/extractors/python.d.ts +8 -0
- package/dist/parser/extractors/python.js +133 -0
- package/dist/parser/extractors/ruby.d.ts +8 -0
- package/dist/parser/extractors/ruby.js +153 -0
- package/dist/parser/extractors/typescript.d.ts +10 -0
- package/dist/parser/extractors/typescript.js +365 -0
- package/dist/parser/languages.d.ts +32 -0
- package/dist/parser/languages.js +303 -0
- package/dist/resolver/call-resolver.d.ts +36 -0
- package/dist/resolver/call-resolver.js +178 -0
- package/dist/resolver/import-map.d.ts +12 -0
- package/dist/resolver/import-map.js +21 -0
- package/dist/resolver/import-resolver.d.ts +19 -0
- package/dist/resolver/import-resolver.js +212 -0
- package/dist/resolver/languages/csharp.d.ts +1 -0
- package/dist/resolver/languages/csharp.js +31 -0
- package/dist/resolver/languages/go.d.ts +3 -0
- package/dist/resolver/languages/go.js +196 -0
- package/dist/resolver/languages/java.d.ts +1 -0
- package/dist/resolver/languages/java.js +108 -0
- package/dist/resolver/languages/php.d.ts +3 -0
- package/dist/resolver/languages/php.js +54 -0
- package/dist/resolver/languages/python.d.ts +11 -0
- package/dist/resolver/languages/python.js +51 -0
- package/dist/resolver/languages/ruby.d.ts +9 -0
- package/dist/resolver/languages/ruby.js +59 -0
- package/dist/resolver/languages/rust.d.ts +1 -0
- package/dist/resolver/languages/rust.js +196 -0
- package/dist/resolver/languages/typescript.d.ts +27 -0
- package/dist/resolver/languages/typescript.js +240 -0
- package/dist/resolver/re-export-resolver.d.ts +24 -0
- package/dist/resolver/re-export-resolver.js +57 -0
- package/dist/resolver/symbol-table.d.ts +17 -0
- package/dist/resolver/symbol-table.js +60 -0
- package/dist/shared/extract-calls.d.ts +26 -0
- package/dist/shared/extract-calls.js +57 -0
- package/dist/shared/file-hash.d.ts +3 -0
- package/dist/shared/file-hash.js +10 -0
- package/dist/shared/filters.d.ts +3 -0
- package/dist/shared/filters.js +240 -0
- package/dist/shared/logger.d.ts +6 -0
- package/dist/shared/logger.js +17 -0
- package/dist/shared/qualified-name.d.ts +1 -0
- package/dist/shared/qualified-name.js +9 -0
- package/dist/shared/safe-path.d.ts +6 -0
- package/dist/shared/safe-path.js +29 -0
- package/dist/shared/schemas.d.ts +43 -0
- package/dist/shared/schemas.js +30 -0
- package/dist/shared/temp.d.ts +11 -0
- package/{src/shared/temp.ts → dist/shared/temp.js} +4 -5
- package/package.json +20 -6
- package/src/analysis/blast-radius.ts +0 -54
- package/src/analysis/communities.ts +0 -135
- package/src/analysis/context-builder.ts +0 -130
- package/src/analysis/diff.ts +0 -131
- package/src/analysis/enrich.ts +0 -110
- package/src/analysis/flows.ts +0 -112
- package/src/analysis/inheritance.ts +0 -34
- package/src/analysis/prompt-formatter.ts +0 -175
- package/src/analysis/risk-score.ts +0 -62
- package/src/analysis/search.ts +0 -76
- package/src/analysis/test-gaps.ts +0 -21
- package/src/cli.ts +0 -207
- package/src/commands/analyze.ts +0 -128
- package/src/commands/communities.ts +0 -19
- package/src/commands/context.ts +0 -139
- package/src/commands/diff.ts +0 -96
- package/src/commands/flows.ts +0 -19
- package/src/commands/parse.ts +0 -124
- package/src/commands/search.ts +0 -41
- package/src/commands/update.ts +0 -166
- package/src/graph/builder.ts +0 -209
- package/src/graph/edges.ts +0 -101
- package/src/graph/json-writer.ts +0 -43
- package/src/graph/loader.ts +0 -113
- package/src/graph/merger.ts +0 -25
- package/src/graph/types.ts +0 -283
- package/src/parser/batch.ts +0 -82
- package/src/parser/discovery.ts +0 -75
- package/src/parser/extractor.ts +0 -37
- package/src/parser/extractors/generic.ts +0 -132
- package/src/parser/extractors/python.ts +0 -133
- package/src/parser/extractors/ruby.ts +0 -147
- package/src/parser/extractors/typescript.ts +0 -350
- package/src/parser/languages.ts +0 -122
- package/src/resolver/call-resolver.ts +0 -244
- package/src/resolver/import-map.ts +0 -27
- package/src/resolver/import-resolver.ts +0 -72
- package/src/resolver/languages/csharp.ts +0 -7
- package/src/resolver/languages/go.ts +0 -7
- package/src/resolver/languages/java.ts +0 -7
- package/src/resolver/languages/php.ts +0 -7
- package/src/resolver/languages/python.ts +0 -35
- package/src/resolver/languages/ruby.ts +0 -21
- package/src/resolver/languages/rust.ts +0 -7
- package/src/resolver/languages/typescript.ts +0 -168
- package/src/resolver/re-export-resolver.ts +0 -66
- package/src/resolver/symbol-table.ts +0 -67
- package/src/shared/extract-calls.ts +0 -75
- package/src/shared/file-hash.ts +0 -12
- package/src/shared/filters.ts +0 -243
- package/src/shared/logger.ts +0 -14
- package/src/shared/qualified-name.ts +0 -5
- package/src/shared/safe-path.ts +0 -31
- package/src/shared/schemas.ts +0 -32
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
import type { BlastRadiusResult, GraphData } from '../graph/types';
|
|
2
|
-
|
|
3
|
-
export function computeBlastRadius(graph: GraphData, changedFiles: string[], maxDepth: number = 2): BlastRadiusResult {
|
|
4
|
-
// Build adjacency list from CALLS edges (callers of changed nodes)
|
|
5
|
-
const adj = new Map<string, Set<string>>();
|
|
6
|
-
for (const edge of graph.edges) {
|
|
7
|
-
if (edge.kind !== 'CALLS' && edge.kind !== 'IMPORTS') continue;
|
|
8
|
-
// Reverse direction: target -> source (who calls/imports this?)
|
|
9
|
-
if (!adj.has(edge.target_qualified)) adj.set(edge.target_qualified, new Set());
|
|
10
|
-
adj.get(edge.target_qualified)!.add(edge.source_qualified);
|
|
11
|
-
// Forward direction too for IMPORTS
|
|
12
|
-
if (edge.kind === 'IMPORTS') {
|
|
13
|
-
if (!adj.has(edge.source_qualified)) adj.set(edge.source_qualified, new Set());
|
|
14
|
-
adj.get(edge.source_qualified)!.add(edge.target_qualified);
|
|
15
|
-
}
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
// Seed: all nodes in changed files
|
|
19
|
-
const changedSet = new Set(changedFiles);
|
|
20
|
-
const seeds = graph.nodes.filter((n) => changedSet.has(n.file_path)).map((n) => n.qualified_name);
|
|
21
|
-
|
|
22
|
-
// BFS
|
|
23
|
-
const visited = new Set<string>(seeds);
|
|
24
|
-
const byDepth: Record<string, string[]> = {};
|
|
25
|
-
let frontier = seeds;
|
|
26
|
-
|
|
27
|
-
for (let depth = 1; depth <= maxDepth; depth++) {
|
|
28
|
-
const next: string[] = [];
|
|
29
|
-
for (const node of frontier) {
|
|
30
|
-
for (const neighbor of adj.get(node) || []) {
|
|
31
|
-
if (!visited.has(neighbor)) {
|
|
32
|
-
visited.add(neighbor);
|
|
33
|
-
next.push(neighbor);
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
if (next.length > 0) byDepth[String(depth)] = next;
|
|
38
|
-
frontier = next;
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
// Count unique files
|
|
42
|
-
const nodeIndex = new Map(graph.nodes.map((n) => [n.qualified_name, n]));
|
|
43
|
-
const impactedFiles = new Set<string>();
|
|
44
|
-
for (const q of visited) {
|
|
45
|
-
const node = nodeIndex.get(q);
|
|
46
|
-
if (node) impactedFiles.add(node.file_path);
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
return {
|
|
50
|
-
total_functions: visited.size,
|
|
51
|
-
total_files: impactedFiles.size,
|
|
52
|
-
by_depth: byDepth,
|
|
53
|
-
};
|
|
54
|
-
}
|
|
@@ -1,135 +0,0 @@
|
|
|
1
|
-
import type { IndexedGraph } from '../graph/loader';
|
|
2
|
-
|
|
3
|
-
export interface CommunityOptions {
|
|
4
|
-
depth: number;
|
|
5
|
-
minSize: number;
|
|
6
|
-
}
|
|
7
|
-
|
|
8
|
-
export interface Community {
|
|
9
|
-
name: string;
|
|
10
|
-
files: string[];
|
|
11
|
-
node_count: number;
|
|
12
|
-
cohesion: number;
|
|
13
|
-
language: string;
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
export interface CouplingPair {
|
|
17
|
-
source: string;
|
|
18
|
-
target: string;
|
|
19
|
-
edges: number;
|
|
20
|
-
strength: 'HIGH' | 'MEDIUM' | 'LOW';
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
export interface CommunitiesResult {
|
|
24
|
-
communities: Community[];
|
|
25
|
-
coupling: CouplingPair[];
|
|
26
|
-
summary: { total_communities: number; avg_cohesion: number; high_coupling_pairs: number };
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
function getCommunityKey(filePath: string, depth: number): string {
|
|
30
|
-
const parts = filePath.split('/');
|
|
31
|
-
return parts.slice(0, depth).join('/');
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
export function detectCommunities(graph: IndexedGraph, opts: CommunityOptions): CommunitiesResult {
|
|
35
|
-
const { depth, minSize } = opts;
|
|
36
|
-
|
|
37
|
-
// Group nodes by directory
|
|
38
|
-
const groups = new Map<string, Set<string>>(); // community -> files
|
|
39
|
-
const nodeComm = new Map<string, string>(); // qualified_name -> community
|
|
40
|
-
|
|
41
|
-
for (const node of graph.nodes) {
|
|
42
|
-
const key = getCommunityKey(node.file_path, depth);
|
|
43
|
-
if (!groups.has(key)) groups.set(key, new Set());
|
|
44
|
-
groups.get(key)!.add(node.file_path);
|
|
45
|
-
nodeComm.set(node.qualified_name, key);
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
// Count internal and cross edges per community pair
|
|
49
|
-
const internalEdges = new Map<string, number>();
|
|
50
|
-
const crossEdges = new Map<string, number>(); // "a|b" -> count
|
|
51
|
-
|
|
52
|
-
for (const edge of graph.edges) {
|
|
53
|
-
if (edge.kind !== 'CALLS' && edge.kind !== 'IMPORTS') continue;
|
|
54
|
-
const srcComm = nodeComm.get(edge.source_qualified);
|
|
55
|
-
const tgtComm = nodeComm.get(edge.target_qualified);
|
|
56
|
-
if (!srcComm || !tgtComm) continue;
|
|
57
|
-
|
|
58
|
-
if (srcComm === tgtComm) {
|
|
59
|
-
internalEdges.set(srcComm, (internalEdges.get(srcComm) || 0) + 1);
|
|
60
|
-
} else {
|
|
61
|
-
const pairKey = [srcComm, tgtComm].sort().join('|');
|
|
62
|
-
crossEdges.set(pairKey, (crossEdges.get(pairKey) || 0) + 1);
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
// Build communities
|
|
67
|
-
const communities: Community[] = [];
|
|
68
|
-
for (const [name, files] of groups) {
|
|
69
|
-
const nodeCount = graph.nodes.filter((n) => getCommunityKey(n.file_path, depth) === name).length;
|
|
70
|
-
if (nodeCount < minSize) continue;
|
|
71
|
-
|
|
72
|
-
const internal = internalEdges.get(name) || 0;
|
|
73
|
-
const maxPossible = nodeCount * (nodeCount - 1);
|
|
74
|
-
const cohesion = maxPossible > 0 ? Math.round((internal / maxPossible) * 100) / 100 : 0;
|
|
75
|
-
|
|
76
|
-
const langs = new Map<string, number>();
|
|
77
|
-
for (const n of graph.nodes) {
|
|
78
|
-
if (getCommunityKey(n.file_path, depth) === name) {
|
|
79
|
-
langs.set(n.language, (langs.get(n.language) || 0) + 1);
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
let dominant = 'unknown';
|
|
83
|
-
let maxCount = 0;
|
|
84
|
-
for (const [lang, count] of langs) {
|
|
85
|
-
if (count > maxCount) {
|
|
86
|
-
dominant = lang;
|
|
87
|
-
maxCount = count;
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
communities.push({
|
|
92
|
-
name,
|
|
93
|
-
files: [...files].sort(),
|
|
94
|
-
node_count: nodeCount,
|
|
95
|
-
cohesion,
|
|
96
|
-
language: dominant,
|
|
97
|
-
});
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
communities.sort((a, b) => b.node_count - a.node_count);
|
|
101
|
-
|
|
102
|
-
// Build coupling pairs
|
|
103
|
-
const communityNames = new Set(communities.map((c) => c.name));
|
|
104
|
-
const coupling: CouplingPair[] = [];
|
|
105
|
-
for (const [pairKey, count] of crossEdges) {
|
|
106
|
-
const [src, tgt] = pairKey.split('|');
|
|
107
|
-
if (!communityNames.has(src) || !communityNames.has(tgt)) continue;
|
|
108
|
-
|
|
109
|
-
const srcTotal = graph.edges.filter((e) => {
|
|
110
|
-
const c = nodeComm.get(e.source_qualified);
|
|
111
|
-
return c === src || c === tgt;
|
|
112
|
-
}).length;
|
|
113
|
-
const ratio = srcTotal > 0 ? count / srcTotal : 0;
|
|
114
|
-
const strength = ratio > 0.3 ? 'HIGH' : ratio > 0.1 ? 'MEDIUM' : 'LOW';
|
|
115
|
-
|
|
116
|
-
coupling.push({ source: src, target: tgt, edges: count, strength });
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
coupling.sort((a, b) => b.edges - a.edges);
|
|
120
|
-
|
|
121
|
-
const avgCohesion =
|
|
122
|
-
communities.length > 0
|
|
123
|
-
? Math.round((communities.reduce((s, c) => s + c.cohesion, 0) / communities.length) * 100) / 100
|
|
124
|
-
: 0;
|
|
125
|
-
|
|
126
|
-
return {
|
|
127
|
-
communities,
|
|
128
|
-
coupling,
|
|
129
|
-
summary: {
|
|
130
|
-
total_communities: communities.length,
|
|
131
|
-
avg_cohesion: avgCohesion,
|
|
132
|
-
high_coupling_pairs: coupling.filter((c) => c.strength === 'HIGH').length,
|
|
133
|
-
},
|
|
134
|
-
};
|
|
135
|
-
}
|
|
@@ -1,130 +0,0 @@
|
|
|
1
|
-
import { performance } from 'perf_hooks';
|
|
2
|
-
import { type IndexedGraph, indexGraph } from '../graph/loader';
|
|
3
|
-
import type {
|
|
4
|
-
AffectedFlow,
|
|
5
|
-
ContextAnalysisMetadata,
|
|
6
|
-
GraphData,
|
|
7
|
-
GraphEdge,
|
|
8
|
-
GraphNode,
|
|
9
|
-
ParseMetadata,
|
|
10
|
-
} from '../graph/types';
|
|
11
|
-
import { computeBlastRadius } from './blast-radius';
|
|
12
|
-
import { computeStructuralDiff, type DiffResult } from './diff';
|
|
13
|
-
import { enrichChangedFunctions } from './enrich';
|
|
14
|
-
import { detectFlows } from './flows';
|
|
15
|
-
import { extractInheritance } from './inheritance';
|
|
16
|
-
import { computeRiskScore } from './risk-score';
|
|
17
|
-
import { findTestGaps } from './test-gaps';
|
|
18
|
-
|
|
19
|
-
export interface ContextV2Output {
|
|
20
|
-
graph: {
|
|
21
|
-
nodes: GraphNode[];
|
|
22
|
-
edges: GraphEdge[];
|
|
23
|
-
metadata: ParseMetadata;
|
|
24
|
-
};
|
|
25
|
-
analysis: {
|
|
26
|
-
changed_functions: ReturnType<typeof enrichChangedFunctions>;
|
|
27
|
-
structural_diff: DiffResult;
|
|
28
|
-
blast_radius: ReturnType<typeof computeBlastRadius>;
|
|
29
|
-
affected_flows: AffectedFlow[];
|
|
30
|
-
inheritance: ReturnType<typeof extractInheritance>;
|
|
31
|
-
test_gaps: ReturnType<typeof findTestGaps>;
|
|
32
|
-
risk: ReturnType<typeof computeRiskScore>;
|
|
33
|
-
metadata: ContextAnalysisMetadata;
|
|
34
|
-
};
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
interface BuildContextV2Options {
|
|
38
|
-
mergedGraph: GraphData;
|
|
39
|
-
oldGraph: GraphData | null;
|
|
40
|
-
changedFiles: string[];
|
|
41
|
-
minConfidence: number;
|
|
42
|
-
maxDepth: number;
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
export function buildContextV2(opts: BuildContextV2Options): ContextV2Output {
|
|
46
|
-
const t0 = performance.now();
|
|
47
|
-
const { mergedGraph, oldGraph, changedFiles, minConfidence, maxDepth } = opts;
|
|
48
|
-
|
|
49
|
-
// Phase 1: Index
|
|
50
|
-
const indexed = indexGraph(mergedGraph);
|
|
51
|
-
const oldIndexed: IndexedGraph = oldGraph ? indexGraph(oldGraph) : indexGraph({ nodes: [], edges: [] });
|
|
52
|
-
|
|
53
|
-
// Phase 2: Independent analyses
|
|
54
|
-
const changedSet = new Set(changedFiles);
|
|
55
|
-
const newNodesInChanged = mergedGraph.nodes.filter((n) => changedSet.has(n.file_path));
|
|
56
|
-
const newEdgesInChanged = mergedGraph.edges.filter((e) => changedSet.has(e.file_path));
|
|
57
|
-
|
|
58
|
-
const structuralDiff = computeStructuralDiff(oldIndexed, newNodesInChanged, newEdgesInChanged, changedFiles);
|
|
59
|
-
const blastRadius = computeBlastRadius(mergedGraph, changedFiles, maxDepth);
|
|
60
|
-
const allFlows = detectFlows(indexed, { maxDepth: 10, type: 'all' });
|
|
61
|
-
const testGaps = findTestGaps(mergedGraph, changedFiles);
|
|
62
|
-
const risk = computeRiskScore(mergedGraph, changedFiles, blastRadius);
|
|
63
|
-
const inheritance = extractInheritance(indexed, changedFiles);
|
|
64
|
-
|
|
65
|
-
// Phase 3: Filter affected flows
|
|
66
|
-
const changedFuncSet = new Set(
|
|
67
|
-
mergedGraph.nodes.filter((n) => changedSet.has(n.file_path) && !n.is_test).map((n) => n.qualified_name),
|
|
68
|
-
);
|
|
69
|
-
|
|
70
|
-
const affectedFlows: AffectedFlow[] = [];
|
|
71
|
-
for (const flow of allFlows.flows) {
|
|
72
|
-
const touches = flow.path.filter((qn) => changedFuncSet.has(qn));
|
|
73
|
-
if (touches.length > 0) {
|
|
74
|
-
affectedFlows.push({
|
|
75
|
-
entry_point: flow.entry_point,
|
|
76
|
-
type: flow.type,
|
|
77
|
-
touches_changed: touches,
|
|
78
|
-
depth: flow.depth,
|
|
79
|
-
path: flow.path,
|
|
80
|
-
});
|
|
81
|
-
}
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
// Phase 3: Enrichment
|
|
85
|
-
const enriched = enrichChangedFunctions(indexed, changedFiles, structuralDiff, allFlows.flows, minConfidence);
|
|
86
|
-
|
|
87
|
-
// Phase 4: Assembly
|
|
88
|
-
const totalCallers = enriched.reduce((s, f) => s + f.callers.length, 0);
|
|
89
|
-
const totalCallees = enriched.reduce((s, f) => s + f.callees.length, 0);
|
|
90
|
-
|
|
91
|
-
const metadata: ContextAnalysisMetadata = {
|
|
92
|
-
changed_functions_count: enriched.length,
|
|
93
|
-
total_callers: totalCallers,
|
|
94
|
-
total_callees: totalCallees,
|
|
95
|
-
untested_count: testGaps.length,
|
|
96
|
-
affected_flows_count: affectedFlows.length,
|
|
97
|
-
duration_ms: Math.round(performance.now() - t0),
|
|
98
|
-
min_confidence: minConfidence,
|
|
99
|
-
};
|
|
100
|
-
|
|
101
|
-
const graphMetadata: ParseMetadata = indexed.metadata.repo_dir
|
|
102
|
-
? indexed.metadata
|
|
103
|
-
: {
|
|
104
|
-
repo_dir: '',
|
|
105
|
-
files_parsed: changedFiles.length,
|
|
106
|
-
total_nodes: mergedGraph.nodes.length,
|
|
107
|
-
total_edges: mergedGraph.edges.length,
|
|
108
|
-
duration_ms: 0,
|
|
109
|
-
parse_errors: 0,
|
|
110
|
-
extract_errors: 0,
|
|
111
|
-
};
|
|
112
|
-
|
|
113
|
-
return {
|
|
114
|
-
graph: {
|
|
115
|
-
nodes: mergedGraph.nodes,
|
|
116
|
-
edges: mergedGraph.edges,
|
|
117
|
-
metadata: graphMetadata,
|
|
118
|
-
},
|
|
119
|
-
analysis: {
|
|
120
|
-
changed_functions: enriched,
|
|
121
|
-
structural_diff: structuralDiff,
|
|
122
|
-
blast_radius: blastRadius,
|
|
123
|
-
affected_flows: affectedFlows,
|
|
124
|
-
inheritance,
|
|
125
|
-
test_gaps: testGaps,
|
|
126
|
-
risk,
|
|
127
|
-
metadata,
|
|
128
|
-
},
|
|
129
|
-
};
|
|
130
|
-
}
|
package/src/analysis/diff.ts
DELETED
|
@@ -1,131 +0,0 @@
|
|
|
1
|
-
import type { IndexedGraph } from '../graph/loader';
|
|
2
|
-
import type { GraphEdge, GraphNode } from '../graph/types';
|
|
3
|
-
|
|
4
|
-
export interface NodeChange {
|
|
5
|
-
qualified_name: string;
|
|
6
|
-
kind: string;
|
|
7
|
-
file_path: string;
|
|
8
|
-
line_start: number;
|
|
9
|
-
line_end: number;
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
export interface ModifiedNode {
|
|
13
|
-
qualified_name: string;
|
|
14
|
-
changes: string[];
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
export interface DiffResult {
|
|
18
|
-
changed_files: string[];
|
|
19
|
-
summary: { added: number; removed: number; modified: number };
|
|
20
|
-
nodes: { added: NodeChange[]; removed: NodeChange[]; modified: ModifiedNode[] };
|
|
21
|
-
edges: {
|
|
22
|
-
added: Pick<GraphEdge, 'kind' | 'source_qualified' | 'target_qualified'>[];
|
|
23
|
-
removed: Pick<GraphEdge, 'kind' | 'source_qualified' | 'target_qualified'>[];
|
|
24
|
-
};
|
|
25
|
-
risk_by_file: Record<string, { dependents: number; risk: 'HIGH' | 'MEDIUM' | 'LOW' }>;
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
export function computeStructuralDiff(
|
|
29
|
-
oldGraph: IndexedGraph,
|
|
30
|
-
newNodes: GraphNode[],
|
|
31
|
-
newEdges: GraphEdge[],
|
|
32
|
-
changedFiles: string[],
|
|
33
|
-
): DiffResult {
|
|
34
|
-
const changedSet = new Set(changedFiles);
|
|
35
|
-
|
|
36
|
-
// Old nodes in changed files
|
|
37
|
-
const oldNodesInChanged = new Map<string, GraphNode>();
|
|
38
|
-
for (const n of oldGraph.nodes) {
|
|
39
|
-
if (changedSet.has(n.file_path)) oldNodesInChanged.set(n.qualified_name, n);
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
// New nodes in changed files
|
|
43
|
-
const newNodesMap = new Map<string, GraphNode>();
|
|
44
|
-
for (const n of newNodes) {
|
|
45
|
-
if (changedSet.has(n.file_path)) newNodesMap.set(n.qualified_name, n);
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
// Classify nodes
|
|
49
|
-
const added: NodeChange[] = [];
|
|
50
|
-
const removed: NodeChange[] = [];
|
|
51
|
-
const modified: ModifiedNode[] = [];
|
|
52
|
-
|
|
53
|
-
for (const [qn, n] of newNodesMap) {
|
|
54
|
-
if (!oldNodesInChanged.has(qn)) {
|
|
55
|
-
added.push({
|
|
56
|
-
qualified_name: qn,
|
|
57
|
-
kind: n.kind,
|
|
58
|
-
file_path: n.file_path,
|
|
59
|
-
line_start: n.line_start,
|
|
60
|
-
line_end: n.line_end,
|
|
61
|
-
});
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
for (const [qn, n] of oldNodesInChanged) {
|
|
66
|
-
if (!newNodesMap.has(qn)) {
|
|
67
|
-
removed.push({
|
|
68
|
-
qualified_name: qn,
|
|
69
|
-
kind: n.kind,
|
|
70
|
-
file_path: n.file_path,
|
|
71
|
-
line_start: n.line_start,
|
|
72
|
-
line_end: n.line_end,
|
|
73
|
-
});
|
|
74
|
-
} else {
|
|
75
|
-
const newN = newNodesMap.get(qn)!;
|
|
76
|
-
const changes: string[] = [];
|
|
77
|
-
// Detect real content changes vs. pure displacement.
|
|
78
|
-
// content_hash = SHA256 of the node's source text (position-independent).
|
|
79
|
-
if (n.content_hash && newN.content_hash) {
|
|
80
|
-
// Definitive: hash comparison catches ALL content changes,
|
|
81
|
-
// even same-line-count edits (e.g. `return 1` → `return 2`).
|
|
82
|
-
if (n.content_hash !== newN.content_hash) changes.push('body');
|
|
83
|
-
} else if (n.line_start !== newN.line_start || n.line_end !== newN.line_end) {
|
|
84
|
-
// Fallback (legacy data without content_hash): size heuristic.
|
|
85
|
-
const oldSize = n.line_end - n.line_start;
|
|
86
|
-
const newSize = newN.line_end - newN.line_start;
|
|
87
|
-
if (oldSize !== newSize) changes.push('line_range');
|
|
88
|
-
}
|
|
89
|
-
if ((n.params || '') !== (newN.params || '')) changes.push('params');
|
|
90
|
-
if ((n.return_type || '') !== (newN.return_type || '')) changes.push('return_type');
|
|
91
|
-
if (changes.length > 0) modified.push({ qualified_name: qn, changes });
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
// Classify edges
|
|
96
|
-
const oldEdgesInChanged = oldGraph.edges.filter((e) => changedSet.has(e.file_path));
|
|
97
|
-
const oldEdgeKeys = new Set(oldEdgesInChanged.map((e) => `${e.kind}|${e.source_qualified}|${e.target_qualified}`));
|
|
98
|
-
const newEdgesInChanged = newEdges.filter((e) => changedSet.has(e.file_path));
|
|
99
|
-
const newEdgeKeys = new Set(newEdgesInChanged.map((e) => `${e.kind}|${e.source_qualified}|${e.target_qualified}`));
|
|
100
|
-
|
|
101
|
-
const addedEdges = newEdgesInChanged
|
|
102
|
-
.filter((e) => !oldEdgeKeys.has(`${e.kind}|${e.source_qualified}|${e.target_qualified}`))
|
|
103
|
-
.map((e) => ({ kind: e.kind, source_qualified: e.source_qualified, target_qualified: e.target_qualified }));
|
|
104
|
-
|
|
105
|
-
const removedEdges = oldEdgesInChanged
|
|
106
|
-
.filter((e) => !newEdgeKeys.has(`${e.kind}|${e.source_qualified}|${e.target_qualified}`))
|
|
107
|
-
.map((e) => ({ kind: e.kind, source_qualified: e.source_qualified, target_qualified: e.target_qualified }));
|
|
108
|
-
|
|
109
|
-
// Risk by file: count unique dependents via reverse adjacency
|
|
110
|
-
const riskByFile: Record<string, { dependents: number; risk: 'HIGH' | 'MEDIUM' | 'LOW' }> = {};
|
|
111
|
-
for (const file of changedFiles) {
|
|
112
|
-
const nodesInFile = oldGraph.byFile.get(file) || [];
|
|
113
|
-
const dependents = new Set<string>();
|
|
114
|
-
for (const n of nodesInFile) {
|
|
115
|
-
for (const edge of oldGraph.reverseAdjacency.get(n.qualified_name) || []) {
|
|
116
|
-
if (!changedSet.has(edge.file_path)) dependents.add(edge.source_qualified);
|
|
117
|
-
}
|
|
118
|
-
}
|
|
119
|
-
const count = dependents.size;
|
|
120
|
-
const risk = count >= 10 ? 'HIGH' : count >= 3 ? 'MEDIUM' : 'LOW';
|
|
121
|
-
riskByFile[file] = { dependents: count, risk };
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
return {
|
|
125
|
-
changed_files: changedFiles,
|
|
126
|
-
summary: { added: added.length, removed: removed.length, modified: modified.length },
|
|
127
|
-
nodes: { added, removed, modified },
|
|
128
|
-
edges: { added: addedEdges, removed: removedEdges },
|
|
129
|
-
risk_by_file: riskByFile,
|
|
130
|
-
};
|
|
131
|
-
}
|
package/src/analysis/enrich.ts
DELETED
|
@@ -1,110 +0,0 @@
|
|
|
1
|
-
import type { IndexedGraph } from '../graph/loader';
|
|
2
|
-
import type { CalleeRef, CallerRef, EnrichedFunction } from '../graph/types';
|
|
3
|
-
import type { DiffResult } from './diff';
|
|
4
|
-
import type { Flow } from './flows';
|
|
5
|
-
|
|
6
|
-
export function enrichChangedFunctions(
|
|
7
|
-
graph: IndexedGraph,
|
|
8
|
-
changedFiles: string[],
|
|
9
|
-
diff: DiffResult,
|
|
10
|
-
allFlows: Flow[],
|
|
11
|
-
minConfidence: number,
|
|
12
|
-
): EnrichedFunction[] {
|
|
13
|
-
const changedSet = new Set(changedFiles);
|
|
14
|
-
|
|
15
|
-
// Pre-index diff results
|
|
16
|
-
const addedSet = new Set(diff.nodes.added.map((n) => n.qualified_name));
|
|
17
|
-
const modifiedMap = new Map(diff.nodes.modified.map((m) => [m.qualified_name, m.changes]));
|
|
18
|
-
|
|
19
|
-
// Pre-index TESTED_BY
|
|
20
|
-
const testedFiles = new Set(graph.edges.filter((e) => e.kind === 'TESTED_BY').map((e) => e.source_qualified));
|
|
21
|
-
|
|
22
|
-
// Pre-index flows by function
|
|
23
|
-
const flowsByFunction = new Map<string, string[]>();
|
|
24
|
-
for (const flow of allFlows) {
|
|
25
|
-
for (const qn of flow.path) {
|
|
26
|
-
const list = flowsByFunction.get(qn);
|
|
27
|
-
if (list) {
|
|
28
|
-
if (!list.includes(flow.entry_point)) list.push(flow.entry_point);
|
|
29
|
-
} else {
|
|
30
|
-
flowsByFunction.set(qn, [flow.entry_point]);
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
// Filter functions in changed files
|
|
36
|
-
const changedFunctions = graph.nodes.filter(
|
|
37
|
-
(n) =>
|
|
38
|
-
changedSet.has(n.file_path) &&
|
|
39
|
-
!n.is_test &&
|
|
40
|
-
n.kind !== 'Constructor' &&
|
|
41
|
-
n.kind !== 'Class' &&
|
|
42
|
-
n.kind !== 'Interface' &&
|
|
43
|
-
n.kind !== 'Enum',
|
|
44
|
-
);
|
|
45
|
-
|
|
46
|
-
return changedFunctions
|
|
47
|
-
.sort((a, b) => a.file_path.localeCompare(b.file_path) || a.line_start - b.line_start)
|
|
48
|
-
.map((node) => {
|
|
49
|
-
// Callers
|
|
50
|
-
const callers: CallerRef[] = [];
|
|
51
|
-
for (const edge of graph.reverseAdjacency.get(node.qualified_name) || []) {
|
|
52
|
-
if (edge.kind !== 'CALLS') continue;
|
|
53
|
-
// null/undefined confidence = high confidence (edge came from DB or parser without scoring)
|
|
54
|
-
if ((edge.confidence ?? 1.0) < minConfidence) continue;
|
|
55
|
-
const sourceNode = graph.byQualified.get(edge.source_qualified);
|
|
56
|
-
callers.push({
|
|
57
|
-
qualified_name: edge.source_qualified,
|
|
58
|
-
name: sourceNode?.name || edge.source_qualified.split('::').pop() || 'unknown',
|
|
59
|
-
file_path: sourceNode?.file_path || edge.file_path,
|
|
60
|
-
line: edge.line,
|
|
61
|
-
confidence: edge.confidence ?? 1.0,
|
|
62
|
-
});
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
// Callees
|
|
66
|
-
const callees: CalleeRef[] = [];
|
|
67
|
-
const seenCallees = new Set<string>();
|
|
68
|
-
for (const edge of graph.adjacency.get(node.qualified_name) || []) {
|
|
69
|
-
if (edge.kind !== 'CALLS') continue;
|
|
70
|
-
if (seenCallees.has(edge.target_qualified)) continue;
|
|
71
|
-
seenCallees.add(edge.target_qualified);
|
|
72
|
-
const targetNode = graph.byQualified.get(edge.target_qualified);
|
|
73
|
-
const name = targetNode?.name || edge.target_qualified.split('::').pop() || 'unknown';
|
|
74
|
-
const params = targetNode?.params && targetNode.params !== '()' ? targetNode.params : '';
|
|
75
|
-
const ret = targetNode?.return_type ? ` -> ${targetNode.return_type}` : '';
|
|
76
|
-
callees.push({
|
|
77
|
-
qualified_name: edge.target_qualified,
|
|
78
|
-
name,
|
|
79
|
-
file_path: targetNode?.file_path || '',
|
|
80
|
-
signature: `${name}${params}${ret}`,
|
|
81
|
-
});
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
// Signature
|
|
85
|
-
const shortName = node.name.includes('.') ? node.name.split('.').pop()! : node.name;
|
|
86
|
-
const params = node.params && node.params !== '()' ? node.params : '';
|
|
87
|
-
const ret = node.return_type ? ` -> ${node.return_type}` : '';
|
|
88
|
-
const signature = `${shortName}${params}${ret}`;
|
|
89
|
-
|
|
90
|
-
// Diff
|
|
91
|
-
const isNew = addedSet.has(node.qualified_name);
|
|
92
|
-
const diffChanges = isNew ? [] : modifiedMap.get(node.qualified_name) || [];
|
|
93
|
-
|
|
94
|
-
return {
|
|
95
|
-
qualified_name: node.qualified_name,
|
|
96
|
-
name: node.name,
|
|
97
|
-
kind: node.kind,
|
|
98
|
-
signature,
|
|
99
|
-
file_path: node.file_path,
|
|
100
|
-
line_start: node.line_start,
|
|
101
|
-
line_end: node.line_end,
|
|
102
|
-
callers,
|
|
103
|
-
callees,
|
|
104
|
-
has_test_coverage: testedFiles.has(node.file_path),
|
|
105
|
-
diff_changes: diffChanges,
|
|
106
|
-
is_new: isNew,
|
|
107
|
-
in_flows: flowsByFunction.get(node.qualified_name) || [],
|
|
108
|
-
};
|
|
109
|
-
});
|
|
110
|
-
}
|
package/src/analysis/flows.ts
DELETED
|
@@ -1,112 +0,0 @@
|
|
|
1
|
-
import type { IndexedGraph } from '../graph/loader';
|
|
2
|
-
|
|
3
|
-
export interface FlowOptions {
|
|
4
|
-
maxDepth: number;
|
|
5
|
-
type: 'test' | 'http' | 'all';
|
|
6
|
-
}
|
|
7
|
-
|
|
8
|
-
export interface Flow {
|
|
9
|
-
entry_point: string;
|
|
10
|
-
type: 'test' | 'http';
|
|
11
|
-
depth: number;
|
|
12
|
-
node_count: number;
|
|
13
|
-
file_count: number;
|
|
14
|
-
criticality: number;
|
|
15
|
-
path: string[];
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
export interface FlowsResult {
|
|
19
|
-
flows: Flow[];
|
|
20
|
-
summary: {
|
|
21
|
-
total_flows: number;
|
|
22
|
-
by_type: { test: number; http: number };
|
|
23
|
-
avg_depth: number;
|
|
24
|
-
max_criticality: number;
|
|
25
|
-
};
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
const HTTP_METHOD_NAMES = new Set(['get', 'post', 'put', 'delete', 'patch', 'handle', 'handler']);
|
|
29
|
-
|
|
30
|
-
function isHttpHandler(_qualifiedName: string, name: string, parentName?: string): boolean {
|
|
31
|
-
if (HTTP_METHOD_NAMES.has(name.toLowerCase())) return true;
|
|
32
|
-
if (parentName?.toLowerCase().endsWith('controller')) return true;
|
|
33
|
-
return false;
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
export function detectFlows(graph: IndexedGraph, opts: FlowOptions): FlowsResult {
|
|
37
|
-
const { maxDepth, type } = opts;
|
|
38
|
-
|
|
39
|
-
// Find entry points
|
|
40
|
-
const entryPoints: { qualified: string; type: 'test' | 'http' }[] = [];
|
|
41
|
-
|
|
42
|
-
for (const node of graph.nodes) {
|
|
43
|
-
if (type !== 'http' && node.kind === 'Test') {
|
|
44
|
-
entryPoints.push({ qualified: node.qualified_name, type: 'test' });
|
|
45
|
-
}
|
|
46
|
-
if (type !== 'test' && (node.kind === 'Method' || node.kind === 'Function')) {
|
|
47
|
-
if (isHttpHandler(node.qualified_name, node.name, node.parent_name)) {
|
|
48
|
-
entryPoints.push({ qualified: node.qualified_name, type: 'http' });
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
// BFS for each entry point
|
|
54
|
-
const flows: Flow[] = [];
|
|
55
|
-
|
|
56
|
-
for (const ep of entryPoints) {
|
|
57
|
-
const path: string[] = [ep.qualified];
|
|
58
|
-
const visited = new Set<string>([ep.qualified]);
|
|
59
|
-
const files = new Set<string>();
|
|
60
|
-
|
|
61
|
-
const startNode = graph.byQualified.get(ep.qualified);
|
|
62
|
-
if (startNode) files.add(startNode.file_path);
|
|
63
|
-
|
|
64
|
-
let frontier = [ep.qualified];
|
|
65
|
-
let depth = 0;
|
|
66
|
-
|
|
67
|
-
while (frontier.length > 0 && depth < maxDepth) {
|
|
68
|
-
const next: string[] = [];
|
|
69
|
-
for (const q of frontier) {
|
|
70
|
-
for (const edge of graph.adjacency.get(q) || []) {
|
|
71
|
-
if (edge.kind !== 'CALLS') continue;
|
|
72
|
-
if (visited.has(edge.target_qualified)) continue;
|
|
73
|
-
visited.add(edge.target_qualified);
|
|
74
|
-
next.push(edge.target_qualified);
|
|
75
|
-
path.push(edge.target_qualified);
|
|
76
|
-
const targetNode = graph.byQualified.get(edge.target_qualified);
|
|
77
|
-
if (targetNode) files.add(targetNode.file_path);
|
|
78
|
-
}
|
|
79
|
-
}
|
|
80
|
-
if (next.length === 0) break;
|
|
81
|
-
frontier = next;
|
|
82
|
-
depth++;
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
flows.push({
|
|
86
|
-
entry_point: ep.qualified,
|
|
87
|
-
type: ep.type,
|
|
88
|
-
depth,
|
|
89
|
-
node_count: visited.size,
|
|
90
|
-
file_count: files.size,
|
|
91
|
-
criticality: visited.size * files.size,
|
|
92
|
-
path,
|
|
93
|
-
});
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
flows.sort((a, b) => b.criticality - a.criticality);
|
|
97
|
-
|
|
98
|
-
const testFlows = flows.filter((f) => f.type === 'test').length;
|
|
99
|
-
const httpFlows = flows.filter((f) => f.type === 'http').length;
|
|
100
|
-
const avgDepth = flows.length > 0 ? Math.round((flows.reduce((s, f) => s + f.depth, 0) / flows.length) * 10) / 10 : 0;
|
|
101
|
-
const maxCriticality = flows.length > 0 ? flows[0].criticality : 0;
|
|
102
|
-
|
|
103
|
-
return {
|
|
104
|
-
flows,
|
|
105
|
-
summary: {
|
|
106
|
-
total_flows: flows.length,
|
|
107
|
-
by_type: { test: testFlows, http: httpFlows },
|
|
108
|
-
avg_depth: avgDepth,
|
|
109
|
-
max_criticality: maxCriticality,
|
|
110
|
-
},
|
|
111
|
-
};
|
|
112
|
-
}
|